From de036e1f08dad4c5d0c6641f1942750558e3e787 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Fri, 4 Dec 2020 02:36:13 +0800 Subject: [PATCH 001/135] feat(std/wasi): add support for initializing reactors (#8603) This adds another entry point to Context called initialize for spinning up style modules. Reactors are modules that don't have a main function and basically run forever in the background. --- std/wasi/snapshot_preview1.ts | 33 +++++++++++++++++++++++ std/wasi/snapshot_preview1_test.ts | 42 ++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index eb296c47db1619..4c974fe1dd46c4 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -1588,4 +1588,37 @@ export default class Context { _start(); } + + /** + * Attempt to initialize instance as a reactor by invoking its _initialize() export. + * + * If instance contains a _start() export, then an exception is thrown. + * + * The instance must also have a WebAssembly.Memory export named "memory" + * which will be used as the address space, if it does not an error will be + * thrown. + */ + initialize(instance: WebAssembly.Instance) { + const { _start, _initialize, memory } = instance.exports; + + if (!(memory instanceof WebAssembly.Memory)) { + throw new TypeError("WebAsembly.instance must provide a memory export"); + } + + this.memory = memory; + + if (typeof _start == "function") { + throw new TypeError( + "WebAssembly.Instance export _start must not be a function", + ); + } + + if (typeof _initialize != "function") { + throw new TypeError( + "WebAsembly.instance export _initialize must be a function", + ); + } + + _initialize(); + } } diff --git a/std/wasi/snapshot_preview1_test.ts b/std/wasi/snapshot_preview1_test.ts index 44877117cc0178..d7e29e19597bcc 100644 --- a/std/wasi/snapshot_preview1_test.ts +++ b/std/wasi/snapshot_preview1_test.ts @@ -180,3 +180,45 @@ Deno.test("context_start", function () { "export _start must be a function", ); }); + +Deno.test("context_initialize", function () { + assertThrows( + () => { + const context = new Context({}); + context.initialize({ + exports: { + _initialize() {}, + }, + }); + }, + TypeError, + "must provide a memory export", + ); + + assertThrows( + () => { + const context = new Context({}); + context.initialize({ + exports: { + _start() {}, + memory: new WebAssembly.Memory({ initial: 1 }), + }, + }); + }, + TypeError, + "export _start must not be a function", + ); + + assertThrows( + () => { + const context = new Context({}); + context.initialize({ + exports: { + memory: new WebAssembly.Memory({ initial: 1 }), + }, + }); + }, + TypeError, + "export _initialize must be a function", + ); +}); From 0ea1c6f5b07e18dad60bca5f1c7631be5b5005b3 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Fri, 4 Dec 2020 02:57:35 +0800 Subject: [PATCH 002/135] feat(std/wasi): add return on exit option (#8605) This adds an exitOnReturn option to context making it possible to unwind the stack on the exit(2) syscall instead of delegating to it directly. Use case is being able to treat WASI execution contexts as children that don't kill the parent on exit. --- std/wasi/snapshot_preview1.ts | 21 ++++++++++++++++++++- std/wasi/snapshot_preview1_test.ts | 22 ++++++++++++++++++++-- 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 4c974fe1dd46c4..30756fe829c275 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -207,6 +207,10 @@ function syscall(target: T) { try { return target(...args); } catch (err) { + if (err instanceof ExitStatus) { + throw err; + } + switch (err.name) { case "NotFound": return ERRNO_NOENT; @@ -266,15 +270,25 @@ interface FileDescriptor { entries?: Deno.DirEntry[]; } +export class ExitStatus { + code: number; + + constructor(code: number) { + this.code = code; + } +} + export interface ContextOptions { args?: string[]; env?: { [key: string]: string | undefined }; preopens?: { [key: string]: string }; + exitOnReturn?: boolean; } export default class Context { args: string[]; env: { [key: string]: string | undefined }; + exitOnReturn: boolean; memory: WebAssembly.Memory; fds: FileDescriptor[]; @@ -284,6 +298,7 @@ export default class Context { constructor(options: ContextOptions) { this.args = options.args ? options.args : []; this.env = options.env ? options.env : {}; + this.exitOnReturn = options.exitOnReturn ?? true; this.memory = null!; this.fds = [ @@ -1497,7 +1512,11 @@ export default class Context { "proc_exit": syscall(( rval: number, ): never => { - Deno.exit(rval); + if (this.exitOnReturn) { + Deno.exit(rval); + } + + throw new ExitStatus(rval); }), "proc_raise": syscall(( diff --git a/std/wasi/snapshot_preview1_test.ts b/std/wasi/snapshot_preview1_test.ts index d7e29e19597bcc..01b91bc3e48f70 100644 --- a/std/wasi/snapshot_preview1_test.ts +++ b/std/wasi/snapshot_preview1_test.ts @@ -1,6 +1,6 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -import Context from "./snapshot_preview1.ts"; -import { assertEquals, assertThrows } from "../testing/asserts.ts"; +import Context, { ExitStatus } from "./snapshot_preview1.ts"; +import { assert, assertEquals, assertThrows } from "../testing/asserts.ts"; import { copy } from "../fs/mod.ts"; import * as path from "../path/mod.ts"; @@ -179,6 +179,24 @@ Deno.test("context_start", function () { TypeError, "export _start must be a function", ); + + try { + const context = new Context({ + exitOnReturn: false, + }); + context.start({ + exports: { + _start() { + const exit = context.exports["proc_exit"] as CallableFunction; + exit(0); + }, + memory: new WebAssembly.Memory({ initial: 1 }), + }, + }); + } catch (err) { + assert(err instanceof ExitStatus); + assertEquals(err.code, 0); + } }); Deno.test("context_initialize", function () { From e6b90beb3d515129b842f9b9a620aebcd88b522a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 4 Dec 2020 13:21:38 +0100 Subject: [PATCH 003/135] upgrade: swc_bundler 0.17.5 (#8588) --- Cargo.lock | 4 ++-- cli/Cargo.toml | 2 +- cli/tests/bundle/fixture02.out | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f77d7262df14bc..a34c10ed920da9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2461,9 +2461,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.17.3" +version = "0.17.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06641b7b0664e6f7552b7e52ae3bc3d8fa47988811b7d02670e40d522296138e" +checksum = "88b07ac843906ddcc5f2e17ad47f80c14c7c276146e6c4e7355530ae116caf07" dependencies = [ "anyhow", "crc", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 13bd91ddb1b0d2..a8d6e9c2b68d72 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -65,7 +65,7 @@ serde = { version = "1.0.116", features = ["derive"] } shell-escape = "0.1.5" sys-info = "0.7.0" sourcemap = "6.0.1" -swc_bundler = "0.17.3" +swc_bundler = "0.17.5" swc_common = { version = "0.10.6", features = ["sourcemap"] } swc_ecmascript = { version = "0.14.4", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } tempfile = "3.1.0" diff --git a/cli/tests/bundle/fixture02.out b/cli/tests/bundle/fixture02.out index a0028918352857..bd3e349bdf0be5 100644 --- a/cli/tests/bundle/fixture02.out +++ b/cli/tests/bundle/fixture02.out @@ -3,6 +3,7 @@ const mod = function() { const c1 = c; class C { } + const __default = C; return { c, default: C From ae21a9569b87411f863fa5194c873be69d8bee93 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Fri, 4 Dec 2020 20:53:16 +0800 Subject: [PATCH 004/135] refactor(std/wasi): prefer nullish coalescing (#8607) --- std/wasi/snapshot_preview1.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 30756fe829c275..98850fc1f2524b 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -296,8 +296,8 @@ export default class Context { exports: Record; constructor(options: ContextOptions) { - this.args = options.args ? options.args : []; - this.env = options.env ? options.env : {}; + this.args = options.args ?? []; + this.env = options.env ?? {}; this.exitOnReturn = options.exitOnReturn ?? true; this.memory = null!; From 71ef5a9cd33390f7f41b80cdb57862b7f7507f76 Mon Sep 17 00:00:00 2001 From: Benjamin Gruenbaum Date: Fri, 4 Dec 2020 19:47:08 +0200 Subject: [PATCH 005/135] feat(op_crates/web) EventTarget signal support (#8616) Fixes: https://github.com/denoland/deno/issues/8606 --- op_crates/web/01_event.js | 16 ++++++++++++++-- op_crates/web/event_test.js | 20 ++++++++++++++++++++ std/wasi/testdata | 2 +- 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/op_crates/web/01_event.js b/op_crates/web/01_event.js index ed76a4281aff39..35b269ea4bf057 100644 --- a/op_crates/web/01_event.js +++ b/op_crates/web/01_event.js @@ -693,7 +693,7 @@ for (let i = 0; i < handlers.length; i++) { const listener = handlers[i]; - let capture, once, passive; + let capture, once, passive, signal; if (typeof listener.options === "boolean") { capture = listener.options; once = false; @@ -895,7 +895,19 @@ return; } } - + if (options?.signal) { + const signal = options?.signal; + if (signal.aborted) { + // If signal is not null and its aborted flag is set, then return. + return; + } else { + // If listener’s signal is not null, then add the following abort + // abort steps to it: Remove an event listener. + signal.addEventListener("abort", () => { + this.removeEventListener(type, callback, options); + }); + } + } listeners[type].push({ callback, options }); } diff --git a/op_crates/web/event_test.js b/op_crates/web/event_test.js index 8107f3bca8149a..00459c442cd2ca 100644 --- a/op_crates/web/event_test.js +++ b/op_crates/web/event_test.js @@ -106,6 +106,25 @@ function eventIsTrustedGetterName() { assert(e.message.includes("not a constructor")); } } +function eventAbortSignal() { + let count = 0; + function handler() { + count++; + } + const et = new EventTarget(); + const controller = new AbortController(); + et.addEventListener("test", handler, { signal: controller.signal }); + et.dispatchEvent(new Event("test")); + assert(count === 1); + et.dispatchEvent(new Event("test")); + assert(count === 2); + controller.abort(); + et.dispatchEvent(new Event("test")); + assert(count === 2); + et.addEventListener("test", handler, { signal: controller.signal }); + et.dispatchEvent(new Event("test")); + assert(count === 2); +} function main() { eventInitializedWithType(); eventInitializedWithTypeAndDict(); @@ -116,6 +135,7 @@ function main() { eventInitializedWithNonStringType(); eventIsTrusted(); eventIsTrustedGetterName(); + eventAbortSignal(); } main(); diff --git a/std/wasi/testdata b/std/wasi/testdata index 8f49014513d950..4c7517f6cc5aa3 160000 --- a/std/wasi/testdata +++ b/std/wasi/testdata @@ -1 +1 @@ -Subproject commit 8f49014513d9508f0495977be44ec60c6f4c8e06 +Subproject commit 4c7517f6cc5aa3bd7cf405be7dfb8ec1cac6d2de From 300b39b889313aec387a2a8746add652bdee407c Mon Sep 17 00:00:00 2001 From: Paco Date: Sat, 5 Dec 2020 15:43:46 +0100 Subject: [PATCH 006/135] docs(core): Fix typo in load_module's docstring (#8564) --- core/runtime.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/runtime.rs b/core/runtime.rs index 5044072b65b878..c03ee9d6fcf9dc 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -1269,7 +1269,7 @@ impl JsRuntime { Ok(()) } - /// Asynchronously load specified module and all of it's dependencies + /// Asynchronously load specified module and all of its dependencies /// /// User must call `JsRuntime::mod_evaluate` with returned `ModuleId` /// manually after load is finished. From 4f0dc2c51df966dc7bfd1148aee5d311edafbc47 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Sat, 5 Dec 2020 22:47:01 +0800 Subject: [PATCH 007/135] refactor(std/wasi): prefer explicit encoder/decoder names (#8622) This renames a couple of identifiers from the ambigious name "text" to the more explicit textEncoder and textDecoder depending on what they are. --- std/wasi/snapshot_preview1.ts | 54 +++++++++++++++++------------------ 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 98850fc1f2524b..225458c66bc02d 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -341,7 +341,7 @@ export default class Context { argvBufferOffset: number, ): number => { const args = this.args; - const text = new TextEncoder(); + const textEncoder = new TextEncoder(); const memoryData = new Uint8Array(this.memory.buffer); const memoryView = new DataView(this.memory.buffer); @@ -349,7 +349,7 @@ export default class Context { memoryView.setUint32(argvOffset, argvBufferOffset, true); argvOffset += 4; - const data = text.encode(`${arg}\0`); + const data = textEncoder.encode(`${arg}\0`); memoryData.set(data, argvBufferOffset); argvBufferOffset += data.length; } @@ -362,14 +362,14 @@ export default class Context { argvBufferSizeOffset: number, ): number => { const args = this.args; - const text = new TextEncoder(); + const textEncoder = new TextEncoder(); const memoryView = new DataView(this.memory.buffer); memoryView.setUint32(argcOffset, args.length, true); memoryView.setUint32( argvBufferSizeOffset, args.reduce(function (acc, arg) { - return acc + text.encode(`${arg}\0`).length; + return acc + textEncoder.encode(`${arg}\0`).length; }, 0), true, ); @@ -382,7 +382,7 @@ export default class Context { environBufferOffset: number, ): number => { const entries = Object.entries(this.env); - const text = new TextEncoder(); + const textEncoder = new TextEncoder(); const memoryData = new Uint8Array(this.memory.buffer); const memoryView = new DataView(this.memory.buffer); @@ -390,7 +390,7 @@ export default class Context { memoryView.setUint32(environOffset, environBufferOffset, true); environOffset += 4; - const data = text.encode(`${key}=${value}\0`); + const data = textEncoder.encode(`${key}=${value}\0`); memoryData.set(data, environBufferOffset); environBufferOffset += data.length; } @@ -403,14 +403,14 @@ export default class Context { environBufferSizeOffset: number, ): number => { const entries = Object.entries(this.env); - const text = new TextEncoder(); + const textEncoder = new TextEncoder(); const memoryView = new DataView(this.memory.buffer); memoryView.setUint32(environcOffset, entries.length, true); memoryView.setUint32( environBufferSizeOffset, entries.reduce(function (acc, [key, value]) { - return acc + text.encode(`${key}=${value}\0`).length; + return acc + textEncoder.encode(`${key}=${value}\0`).length; }, 0), true, ); @@ -1021,9 +1021,9 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const data = new Uint8Array(this.memory.buffer, pathOffset, pathLength); - const path = resolve(entry.path!, text.decode(data)); + const path = resolve(entry.path!, textDecoder.decode(data)); Deno.mkdirSync(path); @@ -1046,9 +1046,9 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const data = new Uint8Array(this.memory.buffer, pathOffset, pathLength); - const path = resolve(entry.path!, text.decode(data)); + const path = resolve(entry.path!, textDecoder.decode(data)); const memoryView = new DataView(this.memory.buffer); @@ -1140,9 +1140,9 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const data = new Uint8Array(this.memory.buffer, pathOffset, pathLength); - const path = resolve(entry.path!, text.decode(data)); + const path = resolve(entry.path!, textDecoder.decode(data)); if ((fstflags & FSTFLAGS_ATIM_NOW) == FSTFLAGS_ATIM_NOW) { atim = BigInt(Date.now()) * BigInt(1e6); @@ -1176,19 +1176,19 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const oldData = new Uint8Array( this.memory.buffer, oldPathOffset, oldPathLength, ); - const oldPath = resolve(oldEntry.path!, text.decode(oldData)); + const oldPath = resolve(oldEntry.path!, textDecoder.decode(oldData)); const newData = new Uint8Array( this.memory.buffer, newPathOffset, newPathLength, ); - const newPath = resolve(newEntry.path!, text.decode(newData)); + const newPath = resolve(newEntry.path!, textDecoder.decode(newData)); Deno.linkSync(oldPath, newPath); @@ -1392,9 +1392,9 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const data = new Uint8Array(this.memory.buffer, pathOffset, pathLength); - const path = resolve(entry.path!, text.decode(data)); + const path = resolve(entry.path!, textDecoder.decode(data)); if (!Deno.statSync(path).isDirectory) { return ERRNO_NOTDIR; @@ -1423,19 +1423,19 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const oldData = new Uint8Array( this.memory.buffer, oldPathOffset, oldPathLength, ); - const oldPath = resolve(oldEntry.path!, text.decode(oldData)); + const oldPath = resolve(oldEntry.path!, textDecoder.decode(oldData)); const newData = new Uint8Array( this.memory.buffer, newPathOffset, newPathLength, ); - const newPath = resolve(newEntry.path!, text.decode(newData)); + const newPath = resolve(newEntry.path!, textDecoder.decode(newData)); Deno.renameSync(oldPath, newPath); @@ -1458,19 +1458,19 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const oldData = new Uint8Array( this.memory.buffer, oldPathOffset, oldPathLength, ); - const oldPath = text.decode(oldData); + const oldPath = textDecoder.decode(oldData); const newData = new Uint8Array( this.memory.buffer, newPathOffset, newPathLength, ); - const newPath = resolve(entry.path!, text.decode(newData)); + const newPath = resolve(entry.path!, textDecoder.decode(newData)); Deno.symlinkSync(oldPath, newPath); @@ -1491,9 +1491,9 @@ export default class Context { return ERRNO_INVAL; } - const text = new TextDecoder(); + const textDecoder = new TextDecoder(); const data = new Uint8Array(this.memory.buffer, pathOffset, pathLength); - const path = resolve(entry.path!, text.decode(data)); + const path = resolve(entry.path!, textDecoder.decode(data)); Deno.removeSync(path); From d72310f139661263fdb4d77c3e311130ac23d971 Mon Sep 17 00:00:00 2001 From: Yuki Tanaka Date: Sat, 5 Dec 2020 23:49:05 +0900 Subject: [PATCH 008/135] fix(std/encoding): Rewrite toml parser not to use eval() (#8624) --- std/encoding/testdata/string.toml | 2 + std/encoding/toml.ts | 148 +++++++++++++++++++++++------- std/encoding/toml_test.ts | 11 +++ 3 files changed, 126 insertions(+), 35 deletions(-) diff --git a/std/encoding/testdata/string.toml b/std/encoding/testdata/string.toml index 05f425f1109348..640717d0ebe604 100644 --- a/std/encoding/testdata/string.toml +++ b/std/encoding/testdata/string.toml @@ -32,3 +32,5 @@ trimmed in raw strings. withApostrophe = "What if it's not?" withSemicolon = "const message = 'hello world';" withHexNumberLiteral = "Prevent bug from stripping string here ->0xabcdef" +withUnicodeChar1 = "\u3042" +withUnicodeChar2 = "Deno\U01F995" diff --git a/std/encoding/toml.ts b/std/encoding/toml.ts index add9880ce7857b..a397a7bd91a041 100644 --- a/std/encoding/toml.ts +++ b/std/encoding/toml.ts @@ -264,42 +264,43 @@ class Parser { const value = this._parseData(line.slice(idx + 1)); return new KeyValuePair(key, value); } - // TODO (zekth) Need refactor using ACC _parseData(dataString: string): unknown { dataString = dataString.trim(); + switch (dataString[0]) { + case '"': + case "'": + return this._parseString(dataString); + case "[": + case "{": + return this._parseInlineTableOrArray(dataString); + default: { + // Strip a comment. + const match = /#.*$/.exec(dataString); + if (match) { + dataString = dataString.slice(0, match.index).trim(); + } - if (this._isDate(dataString)) { - return new Date(dataString.split("#")[0].trim()); - } - - if (this._isLocalTime(dataString)) { - return eval(`"${dataString.split("#")[0].trim()}"`); - } - - const cut3 = dataString.substring(0, 3).toLowerCase(); - const cut4 = dataString.substring(0, 4).toLowerCase(); - if (cut3 === "inf" || cut4 === "+inf") { - return Infinity; - } - if (cut4 === "-inf") { - return -Infinity; - } - - if (cut3 === "nan" || cut4 === "+nan" || cut4 === "-nan") { - return NaN; - } - - // If binary / octal / hex - const hex = /^(0(?:x|o|b)[0-9a-f_]*)[^#]/gi.exec(dataString); - if (hex && hex[0]) { - return hex[0].trim(); - } - - const testNumber = this._isParsableNumber(dataString); - if (testNumber && !isNaN(testNumber as number)) { - return testNumber; + switch (dataString) { + case "true": + return true; + case "false": + return false; + case "inf": + case "+inf": + return Infinity; + case "-inf": + return -Infinity; + case "nan": + case "+nan": + case "-nan": + return NaN; + default: + return this._parseNumberOrDate(dataString); + } + } } - + } + _parseInlineTableOrArray(dataString: string): unknown { const invalidArr = /,\]/g.exec(dataString); if (invalidArr) { dataString = dataString.replace(/,]/g, "]"); @@ -320,7 +321,10 @@ class Parser { } return JSON.parse(dataString); } - + throw new TOMLError("Malformed inline table or array literal"); + } + _parseString(dataString: string): string { + const quote = dataString[0]; // Handle First and last EOL for multiline strings if (dataString.startsWith(`"\\n`)) { dataString = dataString.replace(`"\\n`, `"`); @@ -332,14 +336,88 @@ class Parser { } else if (dataString.endsWith(`\\n'`)) { dataString = dataString.replace(`\\n'`, `'`); } - return eval(dataString); + let value = ""; + for (let i = 1; i < dataString.length; i++) { + switch (dataString[i]) { + case "\\": + i++; + // See https://toml.io/en/v1.0.0-rc.3#string + switch (dataString[i]) { + case "b": + value += "\b"; + break; + case "t": + value += "\t"; + break; + case "n": + value += "\n"; + break; + case "f": + value += "\f"; + break; + case "r": + value += "\r"; + break; + case "u": + case "U": { + // Unicode character + const codePointLen = dataString[i] === "u" ? 4 : 6; + const codePoint = parseInt( + "0x" + dataString.slice(i + 1, i + 1 + codePointLen), + 16, + ); + value += String.fromCodePoint(codePoint); + i += codePointLen; + break; + } + case "\\": + value += "\\"; + break; + default: + value += dataString[i]; + break; + } + break; + case quote: + if (dataString[i - 1] !== "\\") { + return value; + } + break; + default: + value += dataString[i]; + break; + } + } + throw new TOMLError("Incomplete string literal"); + } + _parseNumberOrDate(dataString: string): unknown { + if (this._isDate(dataString)) { + return new Date(dataString); + } + + if (this._isLocalTime(dataString)) { + return dataString; + } + + // If binary / octal / hex + const hex = /^(0(?:x|o|b)[0-9a-f_]*)/gi.exec(dataString); + if (hex && hex[0]) { + return hex[0].trim(); + } + + const testNumber = this._isParsableNumber(dataString); + if (testNumber !== false && !isNaN(testNumber as number)) { + return testNumber; + } + + return String(dataString); } _isLocalTime(str: string): boolean { const reg = /(\d{2}):(\d{2}):(\d{2})/; return reg.test(str); } _isParsableNumber(dataString: string): number | boolean { - const m = /((?:\+|-|)[0-9_\.e+\-]*)[^#]/i.exec(dataString.trim()); + const m = /((?:\+|-|)[0-9_\.e+\-]*)[^#]/i.exec(dataString); if (!m) { return false; } else { diff --git a/std/encoding/toml_test.ts b/std/encoding/toml_test.ts index 8e25d61ea1a70a..870793844c9e75 100644 --- a/std/encoding/toml_test.ts +++ b/std/encoding/toml_test.ts @@ -32,6 +32,8 @@ Deno.test({ withSemicolon: `const message = 'hello world';`, withHexNumberLiteral: "Prevent bug from stripping string here ->0xabcdef", + withUnicodeChar1: "あ", + withUnicodeChar2: "Deno🦕", }, }; const actual = parseFile(path.join(testdataDir, "string.toml")); @@ -466,3 +468,12 @@ Deno.test({ assertEquals(actual, expected); }, }); + +Deno.test({ + name: "[TOML] Parse malformed local time as String (#8433)", + fn(): void { + const expected = { sign: "2020-01-01x" }; + const actual = parse(`sign='2020-01-01x'`); + assertEquals(actual, expected); + }, +}); From 362be01abe572d68b34e31b361addb860841115b Mon Sep 17 00:00:00 2001 From: Liam Murphy <43807659+Liamolucko@users.noreply.github.com> Date: Sun, 6 Dec 2020 02:16:07 +1100 Subject: [PATCH 009/135] feat(std/node): Add "setImmediate" and "clearImmediate" to global scope (#8566) Co-authored-by: Ben Noordhuis --- std/node/global.d.ts | 5 +++++ std/node/global.ts | 15 +++++++++++++++ std/node/global_test.ts | 19 +++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/std/node/global.d.ts b/std/node/global.d.ts index b02a682c6210f0..6e0378b9e03893 100644 --- a/std/node/global.d.ts +++ b/std/node/global.d.ts @@ -1,6 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. import { process as processModule } from "./process.ts"; import { Buffer as bufferModule } from "./buffer.ts"; +import timers from "./timers.ts"; // d.ts files allow us to declare Buffer as a value and as a type // type something = Buffer | something_else; is quite common @@ -8,6 +9,8 @@ import { Buffer as bufferModule } from "./buffer.ts"; type GlobalType = { process: typeof processModule; Buffer: typeof bufferModule; + setImmediate: typeof timers.setImmediate; + clearImmediate: typeof timers.clearImmediate; }; declare global { @@ -23,6 +26,8 @@ declare global { var process: typeof processModule; var Buffer: typeof bufferModule; type Buffer = bufferModule; + var setImmediate: typeof timers.setImmediate; + var clearImmediate: typeof timers.clearImmediate; } export {}; diff --git a/std/node/global.ts b/std/node/global.ts index d72760089247d9..585ba149014246 100644 --- a/std/node/global.ts +++ b/std/node/global.ts @@ -2,6 +2,7 @@ /// import { process as processModule } from "./process.ts"; import { Buffer as bufferModule } from "./buffer.ts"; +import timers from "./timers.ts"; Object.defineProperty(globalThis, "global", { value: globalThis, @@ -24,4 +25,18 @@ Object.defineProperty(globalThis, "Buffer", { configurable: true, }); +Object.defineProperty(globalThis, "setImmediate", { + value: timers.setImmediate, + enumerable: true, + writable: true, + configurable: true, +}); + +Object.defineProperty(globalThis, "clearImmediate", { + value: timers.clearImmediate, + enumerable: true, + writable: true, + configurable: true, +}); + export {}; diff --git a/std/node/global_test.ts b/std/node/global_test.ts index 9ce0b21325483f..60c997daf49375 100644 --- a/std/node/global_test.ts +++ b/std/node/global_test.ts @@ -2,6 +2,7 @@ import "./global.ts"; import { assert, assertStrictEquals } from "../testing/asserts.ts"; import { Buffer as BufferModule } from "./buffer.ts"; import processModule from "./process.ts"; +import timers from "./timers.ts"; // Definitions for this are quite delicate // This ensures modifications to the global namespace don't break on TypeScript @@ -50,3 +51,21 @@ Deno.test("process is correctly defined", () => { assertStrictEquals(window.process, processModule); assert(window.process.arch); }); + +Deno.test("setImmediate is correctly defined", () => { + // deno-lint-ignore no-undef + assertStrictEquals(setImmediate, timers.setImmediate); + // deno-lint-ignore no-undef + assertStrictEquals(global.setImmediate, timers.setImmediate); + assertStrictEquals(globalThis.setImmediate, timers.setImmediate); + assertStrictEquals(window.setImmediate, timers.setImmediate); +}); + +Deno.test("clearImmediate is correctly defined", () => { + // deno-lint-ignore no-undef + assertStrictEquals(clearImmediate, timers.clearImmediate); + // deno-lint-ignore no-undef + assertStrictEquals(global.clearImmediate, timers.clearImmediate); + assertStrictEquals(globalThis.clearImmediate, timers.clearImmediate); + assertStrictEquals(window.clearImmediate, timers.clearImmediate); +}); From 2d5c742cf608b2ce0c5a51fb80f34cfd8ffd2e83 Mon Sep 17 00:00:00 2001 From: fabiante Date: Sat, 5 Dec 2020 17:10:10 +0100 Subject: [PATCH 010/135] docs(tools): add documentation for deno compile cmd (#8615) --- docs/toc.json | 1 + docs/tools.md | 1 + docs/tools/compiler.md | 18 ++++++++++++++++++ 3 files changed, 20 insertions(+) create mode 100644 docs/tools/compiler.md diff --git a/docs/toc.json b/docs/toc.json index ded25ce97a5ea4..cb3011d73e4636 100644 --- a/docs/toc.json +++ b/docs/toc.json @@ -69,6 +69,7 @@ "formatter": "Formatter", "repl": "Read-eval-print-loop", "bundler": "Bundler", + "compiler": "Compiling executables", "documentation_generator": "Documentation generator", "dependency_inspector": "Dependency inspector", "linter": "Linter" diff --git a/docs/tools.md b/docs/tools.md index 5945ee01596656..87a3c936b3ac97 100644 --- a/docs/tools.md +++ b/docs/tools.md @@ -4,6 +4,7 @@ Deno provides some built in tooling that is useful when working with JavaScript and TypeScript: - [bundler (`deno bundle`)](./tools/bundler.md) +- [compiling executables (`deno compile`)](./tools/compiler.md) - [dependency inspector (`deno info`)](./tools/dependency_inspector.md) - [documentation generator (`deno doc`)](./tools/documentation_generator.md) - [formatter (`deno fmt`)](./tools/formatter.md) diff --git a/docs/tools/compiler.md b/docs/tools/compiler.md new file mode 100644 index 00000000000000..34dbbdcc84ef55 --- /dev/null +++ b/docs/tools/compiler.md @@ -0,0 +1,18 @@ +## Compiling Executables + +> Since the compile functionality is relatively new, the `--unstable` flag has +> to be set in order for the command to work. + +`deno compile [SRC] [OUT]` will compile the script into a self contained +executable. + +``` +> deno compile --unstable https://deno.land/std/http/file_server.ts +``` + +If you omit the `OUT` parameter, the name of the executable file will be +inferred. + +### Cross Compilation + +Cross compiling binaries for different platforms is not currently possible. From c10280214e5e15fb31b83368082916b9f25470f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Canales?= Date: Sat, 5 Dec 2020 17:41:16 +0100 Subject: [PATCH 011/135] fix(std/bufio): Remove '\r' at the end of Windows lines (#8447) Co-authored-by: Nayeem Rahman --- std/io/bufio.ts | 10 +++++++++- std/io/bufio_test.ts | 12 ++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/std/io/bufio.ts b/std/io/bufio.ts index 954fc5eee19470..df3ecb002930ae 100644 --- a/std/io/bufio.ts +++ b/std/io/bufio.ts @@ -706,5 +706,13 @@ export async function* readStringDelim( export async function* readLines( reader: Reader, ): AsyncIterableIterator { - yield* readStringDelim(reader, "\n"); + for await (let chunk of readStringDelim(reader, "\n")) { + // Finding a CR at the end of the line is evidence of a + // "\r\n" at the end of the line. The "\r" part should be + // removed too. + if (chunk.endsWith("\r")) { + chunk = chunk.slice(0, -1); + } + yield chunk; + } } diff --git a/std/io/bufio_test.ts b/std/io/bufio_test.ts index 03f699d50797f5..804d59e99f41da 100644 --- a/std/io/bufio_test.ts +++ b/std/io/bufio_test.ts @@ -436,6 +436,10 @@ Deno.test("readStringDelimAndLines", async function (): Promise { assertEquals(chunks_, ["Hello World", "Hello World 2", "Hello World 3"]); const linesData = new Deno.Buffer(enc.encode("0\n1\n2\n3\n4\n5\n6\n7\n8\n9")); + // consider data with windows newlines too + const linesDataWindows = new Deno.Buffer( + enc.encode("0\r\n1\r\n2\r\n3\r\n4\r\n5\r\n6\r\n7\r\n8\r\n9"), + ); const lines_ = []; for await (const l of readLines(linesData)) { @@ -444,6 +448,14 @@ Deno.test("readStringDelimAndLines", async function (): Promise { assertEquals(lines_.length, 10); assertEquals(lines_, ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]); + + // Now test for "windows" lines + lines_.length = 0; + for await (const l of readLines(linesDataWindows)) { + lines_.push(l); + } + assertEquals(lines_.length, 10); + assertEquals(lines_, ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]); }); Deno.test( From 0b37a79060e48c1fe6936c6e642044e9b20b9bb5 Mon Sep 17 00:00:00 2001 From: Yosi Pramajaya Date: Sun, 6 Dec 2020 21:51:13 +0700 Subject: [PATCH 012/135] BREAKING(std/bytes): Adjust APIs based on std-wg discussion (#8612) --- std/bytes/README.md | 77 ++++++++++++++++----------- std/bytes/mod.ts | 85 ++++++++++++++++++++---------- std/bytes/test.ts | 117 +++++++++++++++++++++++++++++++----------- std/io/bufio.ts | 12 ++--- std/io/bufio_test.ts | 4 +- std/mime/multipart.ts | 26 +++++----- std/ws/test.ts | 2 +- 7 files changed, 215 insertions(+), 108 deletions(-) diff --git a/std/bytes/README.md b/std/bytes/README.md index 537c33473d3251..bfe40034406456 100644 --- a/std/bytes/README.md +++ b/std/bytes/README.md @@ -6,67 +6,77 @@ bytes module is made to provide helpers to manipulation of bytes slice. All the following functions are exposed in `mod.ts`. -## findIndex +## indexOf -Find first index of binary pattern from given binary array. +Find first index of binary pattern from given binary array, or -1 if it is not +present. ```typescript -import { findIndex } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { indexOf } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; -findIndex( +indexOf( new Uint8Array([1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 3]), new Uint8Array([0, 1, 2]), -); +); // => returns 2 -// => returns 2 +indexOf( + new Uint8Array([1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 3]), + new Uint8Array([0, 1, 2]), + 3, +); // => returns 5 ``` -## findLastIndex +## lastIndexOf -Find last index of binary pattern from given binary array. +Find last index of binary pattern from given binary array, or -1 if it is not +present. ```typescript -import { findLastIndex } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { lastIndexOf } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; -findLastIndex( - new Uint8Array([0, 1, 2, 0, 1, 2, 0, 1, 3]), +lastIndexOf( + new Uint8Array([0, 1, 2, 3, 3, 0, 1, 2]), new Uint8Array([0, 1, 2]), -); +); // => returns 5 -// => returns 3 +lastIndexOf( + new Uint8Array([0, 1, 2, 3, 3, 0, 1, 2]), + new Uint8Array([0, 1, 2]), + 3, +); // => returns 0 ``` -## equal +## equals Check whether given binary arrays are equal to each other. ```typescript -import { equal } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { equals } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; -equal(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 3])); // returns true -equal(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 4])); // returns false +equals(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 3])); // returns true +equals(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 4])); // returns false ``` -## hasPrefix +## startsWith -Check whether binary array has binary prefix. +Check whether binary array starts with prefix. ```typescript -import { hasPrefix } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { startsWith } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; -hasPrefix(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); // returns true -hasPrefix(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); // returns false +startsWith(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); // returns true +startsWith(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); // returns false ``` -## hasSuffix +## endsWith Check whether binary array ends with suffix. ```typescript -import { hasSuffix } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { endsWith } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; -hasSuffix(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); // returns false -hasSuffix(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); // returns true +endsWith(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); // returns false +endsWith(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); // returns true ``` ## repeat @@ -81,12 +91,19 @@ repeat(new Uint8Array([1]), 3); // returns Uint8Array(3) [ 1, 1, 1 ] ## concat -Concatenate two binary arrays and return new one. +Concatenate multiple binary arrays and return new one. ```typescript import { concat } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; concat(new Uint8Array([1, 2]), new Uint8Array([3, 4])); // returns Uint8Array(4) [ 1, 2, 3, 4 ] + +concat( + new Uint8Array([1, 2]), + new Uint8Array([3, 4]), + new Uint8Array([5, 6]), + new Uint8Array([7, 8]), +); // => returns Uint8Array(8) [ 1, 2, 3, 4, 5, 6, 7, 8 ] ``` ## contains @@ -107,14 +124,14 @@ contains( ); // => returns false ``` -## copyBytes +## copy Copy bytes from one binary array to another. ```typescript -import { copyBytes } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; +import { copy } from "https://deno.land/std@$STD_VERSION/bytes/mod.ts"; const dst = new Uint8Array(4); const src = Uint8Array.of(1, 2, 3, 4); -const len = copyBytes(src, dest); // returns len = 4 +const len = copy(src, dest); // returns len = 4 ``` diff --git a/std/bytes/mod.ts b/std/bytes/mod.ts index 8ae697c2986855..585aef2e98bb37 100644 --- a/std/bytes/mod.ts +++ b/std/bytes/mod.ts @@ -1,12 +1,23 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -/** Find first index of binary pattern from a. If not found, then return -1 +/** Find first index of binary pattern from source. If not found, then return -1 * @param source source array * @param pat pattern to find in source array + * @param start the index to start looking in the source */ -export function findIndex(source: Uint8Array, pat: Uint8Array): number { +export function indexOf( + source: Uint8Array, + pat: Uint8Array, + start = 0, +): number { + if (start >= source.length) { + return -1; + } + if (start < 0) { + start = 0; + } const s = pat[0]; - for (let i = 0; i < source.length; i++) { + for (let i = start; i < source.length; i++) { if (source[i] !== s) continue; const pin = i; let matched = 1; @@ -25,13 +36,24 @@ export function findIndex(source: Uint8Array, pat: Uint8Array): number { return -1; } -/** Find last index of binary pattern from a. If not found, then return -1. +/** Find last index of binary pattern from source. If not found, then return -1. * @param source source array * @param pat pattern to find in source array + * @param start the index to start looking in the source */ -export function findLastIndex(source: Uint8Array, pat: Uint8Array): number { +export function lastIndexOf( + source: Uint8Array, + pat: Uint8Array, + start = source.length - 1, +): number { + if (start < 0) { + return -1; + } + if (start >= source.length) { + start = source.length - 1; + } const e = pat[pat.length - 1]; - for (let i = source.length - 1; i >= 0; i--) { + for (let i = start; i >= 0; i--) { if (source[i] !== e) continue; const pin = i; let matched = 1; @@ -51,22 +73,22 @@ export function findLastIndex(source: Uint8Array, pat: Uint8Array): number { } /** Check whether binary arrays are equal to each other. - * @param source first array to check equality - * @param match second array to check equality + * @param a first array to check equality + * @param b second array to check equality */ -export function equal(source: Uint8Array, match: Uint8Array): boolean { - if (source.length !== match.length) return false; - for (let i = 0; i < match.length; i++) { - if (source[i] !== match[i]) return false; +export function equals(a: Uint8Array, b: Uint8Array): boolean { + if (a.length !== b.length) return false; + for (let i = 0; i < b.length; i++) { + if (a[i] !== b[i]) return false; } return true; } /** Check whether binary array starts with prefix. - * @param source srouce array + * @param source source array * @param prefix prefix array to check in source */ -export function hasPrefix(source: Uint8Array, prefix: Uint8Array): boolean { +export function startsWith(source: Uint8Array, prefix: Uint8Array): boolean { for (let i = 0, max = prefix.length; i < max; i++) { if (source[i] !== prefix[i]) return false; } @@ -77,7 +99,7 @@ export function hasPrefix(source: Uint8Array, prefix: Uint8Array): boolean { * @param source source array * @param suffix suffix array to check in source */ -export function hasSuffix(source: Uint8Array, suffix: Uint8Array): boolean { +export function endsWith(source: Uint8Array, suffix: Uint8Array): boolean { for ( let srci = source.length - 1, sfxi = suffix.length - 1; sfxi >= 0; @@ -91,6 +113,7 @@ export function hasSuffix(source: Uint8Array, suffix: Uint8Array): boolean { /** Repeat bytes. returns a new byte slice consisting of `count` copies of `b`. * @param origin The origin bytes * @param count The count you want to repeat. + * @throws `RangeError` When count is negative */ export function repeat(origin: Uint8Array, count: number): Uint8Array { if (count === 0) { @@ -98,7 +121,7 @@ export function repeat(origin: Uint8Array, count: number): Uint8Array { } if (count < 0) { - throw new Error("bytes: negative repeat count"); + throw new RangeError("bytes: negative repeat count"); } else if ((origin.length * count) / count !== origin.length) { throw new Error("bytes: repeat count causes overflow"); } @@ -111,23 +134,31 @@ export function repeat(origin: Uint8Array, count: number): Uint8Array { const nb = new Uint8Array(origin.length * count); - let bp = copyBytes(origin, nb); + let bp = copy(origin, nb); for (; bp < nb.length; bp *= 2) { - copyBytes(nb.slice(0, bp), nb, bp); + copy(nb.slice(0, bp), nb, bp); } return nb; } -/** Concatenate two binary arrays and return new one. - * @param origin origin array to concatenate - * @param b array to concatenate with origin +/** Concatenate multiple binary arrays and return new one. + * @param buf binary arrays to concatenate */ -export function concat(origin: Uint8Array, b: Uint8Array): Uint8Array { - const output = new Uint8Array(origin.length + b.length); - output.set(origin, 0); - output.set(b, origin.length); +export function concat(...buf: Uint8Array[]): Uint8Array { + let length = 0; + for (const b of buf) { + length += b.length; + } + + const output = new Uint8Array(length); + let index = 0; + for (const b of buf) { + output.set(b, index); + index += b.length; + } + return output; } @@ -136,7 +167,7 @@ export function concat(origin: Uint8Array, b: Uint8Array): Uint8Array { * @param pat patter array */ export function contains(source: Uint8Array, pat: Uint8Array): boolean { - return findIndex(source, pat) != -1; + return indexOf(source, pat) != -1; } /** @@ -148,7 +179,7 @@ export function contains(source: Uint8Array, pat: Uint8Array): boolean { * @param off Offset into `dst` at which to begin writing values from `src`. * @return number of bytes copied */ -export function copyBytes(src: Uint8Array, dst: Uint8Array, off = 0): number { +export function copy(src: Uint8Array, dst: Uint8Array, off = 0): number { off = Math.max(0, Math.min(off, dst.byteLength)); const dstBytesAvailable = dst.byteLength - off; if (src.byteLength > dstBytesAvailable) { diff --git a/std/bytes/test.ts b/std/bytes/test.ts index 46f81919356fb2..3aea709652ce42 100644 --- a/std/bytes/test.ts +++ b/std/bytes/test.ts @@ -3,60 +3,106 @@ import { concat, contains, - copyBytes, - equal, - findIndex, - findLastIndex, - hasPrefix, - hasSuffix, + copy, + endsWith, + equals, + indexOf, + lastIndexOf, repeat, + startsWith, } from "./mod.ts"; import { assert, assertEquals, assertThrows } from "../testing/asserts.ts"; import { decode, encode } from "../encoding/utf8.ts"; -Deno.test("[bytes] findIndex1", () => { - const i = findIndex( +Deno.test("[bytes] indexOf1", () => { + const i = indexOf( new Uint8Array([1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 3]), new Uint8Array([0, 1, 2]), ); assertEquals(i, 2); }); -Deno.test("[bytes] findIndex2", () => { - const i = findIndex(new Uint8Array([0, 0, 1]), new Uint8Array([0, 1])); +Deno.test("[bytes] indexOf2", () => { + const i = indexOf(new Uint8Array([0, 0, 1]), new Uint8Array([0, 1])); assertEquals(i, 1); }); -Deno.test("[bytes] findIndex3", () => { - const i = findIndex(encode("Deno"), encode("D")); +Deno.test("[bytes] indexOf3", () => { + const i = indexOf(encode("Deno"), encode("D")); assertEquals(i, 0); }); -Deno.test("[bytes] findLastIndex1", () => { - const i = findLastIndex( +Deno.test("[bytes] indexOf4", () => { + const i = indexOf(new Uint8Array(), new Uint8Array([0, 1])); + assertEquals(i, -1); +}); + +Deno.test("[bytes] indexOf with start index", () => { + const i = indexOf( + new Uint8Array([0, 1, 2, 0, 1, 2]), + new Uint8Array([0, 1]), + 1, + ); + assertEquals(i, 3); +}); + +Deno.test("[bytes] indexOf with start index 2", () => { + const i = indexOf( + new Uint8Array([0, 1, 2, 0, 1, 2]), + new Uint8Array([0, 1]), + 7, + ); + assertEquals(i, -1); +}); + +Deno.test("[bytes] lastIndexOf1", () => { + const i = lastIndexOf( new Uint8Array([0, 1, 2, 0, 1, 2, 0, 1, 3]), new Uint8Array([0, 1, 2]), ); assertEquals(i, 3); }); -Deno.test("[bytes] findLastIndex2", () => { - const i = findLastIndex(new Uint8Array([0, 1, 1]), new Uint8Array([0, 1])); +Deno.test("[bytes] lastIndexOf2", () => { + const i = lastIndexOf(new Uint8Array([0, 1, 1]), new Uint8Array([0, 1])); + assertEquals(i, 0); +}); + +Deno.test("[bytes] lastIndexOf3", () => { + const i = lastIndexOf(new Uint8Array(), new Uint8Array([0, 1])); + assertEquals(i, -1); +}); + +Deno.test("[bytes] lastIndexOf with start index", () => { + const i = lastIndexOf( + new Uint8Array([0, 1, 2, 0, 1, 2]), + new Uint8Array([0, 1]), + 2, + ); assertEquals(i, 0); }); -Deno.test("[bytes] equal", () => { - const v = equal(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 3])); +Deno.test("[bytes] lastIndexOf with start index 2", () => { + const i = lastIndexOf( + new Uint8Array([0, 1, 2, 0, 1, 2]), + new Uint8Array([0, 1]), + -1, + ); + assertEquals(i, -1); +}); + +Deno.test("[bytes] equals", () => { + const v = equals(new Uint8Array([0, 1, 2, 3]), new Uint8Array([0, 1, 2, 3])); assertEquals(v, true); }); -Deno.test("[bytes] hasPrefix", () => { - const v = hasPrefix(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); +Deno.test("[bytes] startsWith", () => { + const v = startsWith(new Uint8Array([0, 1, 2]), new Uint8Array([0, 1])); assertEquals(v, true); }); -Deno.test("[bytes] hasSuffix", () => { - const v = hasSuffix(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); +Deno.test("[bytes] endsWith", () => { + const v = endsWith(new Uint8Array([0, 1, 2]), new Uint8Array([1, 2])); assertEquals(v, true); }); @@ -111,7 +157,20 @@ Deno.test("[bytes] concat empty arrays", () => { assert(u2 !== joined); }); -Deno.test("[bytes] contain", () => { +Deno.test("[bytes] concat multiple arrays", () => { + const u1 = encode("Hello "); + const u2 = encode("W"); + const u3 = encode("o"); + const u4 = encode("r"); + const u5 = encode("l"); + const u6 = encode("d"); + const joined = concat(u1, u2, u3, u4, u5, u6); + assertEquals(decode(joined), "Hello World"); + assert(u1 !== joined); + assert(u2 !== joined); +}); + +Deno.test("[bytes] contains", () => { const source = encode("deno.land"); const pattern = encode("deno"); assert(contains(source, pattern)); @@ -119,36 +178,36 @@ Deno.test("[bytes] contain", () => { assert(contains(new Uint8Array([0, 1, 2, 3]), new Uint8Array([2, 3]))); }); -Deno.test("[io/tuil] copyBytes", function (): void { +Deno.test("[bytes] copy", function (): void { const dst = new Uint8Array(4); dst.fill(0); let src = Uint8Array.of(1, 2); - let len = copyBytes(src, dst, 0); + let len = copy(src, dst, 0); assert(len === 2); assertEquals(dst, Uint8Array.of(1, 2, 0, 0)); dst.fill(0); src = Uint8Array.of(1, 2); - len = copyBytes(src, dst, 1); + len = copy(src, dst, 1); assert(len === 2); assertEquals(dst, Uint8Array.of(0, 1, 2, 0)); dst.fill(0); src = Uint8Array.of(1, 2, 3, 4, 5); - len = copyBytes(src, dst); + len = copy(src, dst); assert(len === 4); assertEquals(dst, Uint8Array.of(1, 2, 3, 4)); dst.fill(0); src = Uint8Array.of(1, 2); - len = copyBytes(src, dst, 100); + len = copy(src, dst, 100); assert(len === 0); assertEquals(dst, Uint8Array.of(0, 0, 0, 0)); dst.fill(0); src = Uint8Array.of(3, 4); - len = copyBytes(src, dst, -2); + len = copy(src, dst, -2); assert(len === 2); assertEquals(dst, Uint8Array.of(3, 4, 0, 0)); }); diff --git a/std/io/bufio.ts b/std/io/bufio.ts index df3ecb002930ae..da44729b2c8424 100644 --- a/std/io/bufio.ts +++ b/std/io/bufio.ts @@ -7,7 +7,7 @@ type Reader = Deno.Reader; type Writer = Deno.Writer; type WriterSync = Deno.WriterSync; -import { copyBytes } from "../bytes/mod.ts"; +import { copy } from "../bytes/mod.ts"; import { assert } from "../_util/assert.ts"; const DEFAULT_BUF_SIZE = 4096; @@ -150,7 +150,7 @@ export class BufReader implements Reader { } // copy as much as we can - const copied = copyBytes(this.buf.subarray(this.r, this.w), p, 0); + const copied = copy(this.buf.subarray(this.r, this.w), p, 0); this.r += copied; // this.lastByte = this.buf[this.r - 1]; // this.lastCharSize = -1; @@ -502,7 +502,7 @@ export class BufWriter extends AbstractBufBase implements Writer { throw e; } } else { - numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes); + numBytesWritten = copy(data, this.buf, this.usedBufferBytes); this.usedBufferBytes += numBytesWritten; await this.flush(); } @@ -510,7 +510,7 @@ export class BufWriter extends AbstractBufBase implements Writer { data = data.subarray(numBytesWritten); } - numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes); + numBytesWritten = copy(data, this.buf, this.usedBufferBytes); this.usedBufferBytes += numBytesWritten; totalBytesWritten += numBytesWritten; return totalBytesWritten; @@ -595,7 +595,7 @@ export class BufWriterSync extends AbstractBufBase implements WriterSync { throw e; } } else { - numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes); + numBytesWritten = copy(data, this.buf, this.usedBufferBytes); this.usedBufferBytes += numBytesWritten; this.flush(); } @@ -603,7 +603,7 @@ export class BufWriterSync extends AbstractBufBase implements WriterSync { data = data.subarray(numBytesWritten); } - numBytesWritten = copyBytes(data, this.buf, this.usedBufferBytes); + numBytesWritten = copy(data, this.buf, this.usedBufferBytes); this.usedBufferBytes += numBytesWritten; totalBytesWritten += numBytesWritten; return totalBytesWritten; diff --git a/std/io/bufio_test.ts b/std/io/bufio_test.ts index 804d59e99f41da..3cba3b704a37ff 100644 --- a/std/io/bufio_test.ts +++ b/std/io/bufio_test.ts @@ -17,7 +17,7 @@ import { import * as iotest from "./_iotest.ts"; import { StringReader } from "./readers.ts"; import { StringWriter } from "./writers.ts"; -import { copyBytes } from "../bytes/mod.ts"; +import { copy } from "../bytes/mod.ts"; const encoder = new TextEncoder(); @@ -201,7 +201,7 @@ class TestReader implements Deno.Reader { if (nread === 0) { return Promise.resolve(null); } - copyBytes(this.data, buf as Uint8Array); + copy(this.data, buf as Uint8Array); this.data = this.data.subarray(nread); return Promise.resolve(nread); } diff --git a/std/mime/multipart.ts b/std/mime/multipart.ts index 9fd46b56109b6a..22e4e72e2b8e6e 100644 --- a/std/mime/multipart.ts +++ b/std/mime/multipart.ts @@ -1,5 +1,5 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -import { equal, findIndex, findLastIndex, hasPrefix } from "../bytes/mod.ts"; +import { equals, indexOf, lastIndexOf, startsWith } from "../bytes/mod.ts"; import { copyN } from "../io/ioutil.ts"; import { MultiReader } from "../io/readers.ts"; import { extname } from "../path/mod.ts"; @@ -101,7 +101,7 @@ export function scanUntilBoundary( ): number | null { if (total === 0) { // At beginning of body, allow dashBoundary. - if (hasPrefix(buf, dashBoundary)) { + if (startsWith(buf, dashBoundary)) { switch (matchAfterPrefix(buf, dashBoundary, eof)) { case -1: return dashBoundary.length; @@ -111,13 +111,13 @@ export function scanUntilBoundary( return null; } } - if (hasPrefix(dashBoundary, buf)) { + if (startsWith(dashBoundary, buf)) { return 0; } } // Search for "\n--boundary". - const i = findIndex(buf, newLineDashBoundary); + const i = indexOf(buf, newLineDashBoundary); if (i >= 0) { switch (matchAfterPrefix(buf.slice(i), newLineDashBoundary, eof)) { case -1: @@ -128,15 +128,15 @@ export function scanUntilBoundary( return i > 0 ? i : null; } } - if (hasPrefix(newLineDashBoundary, buf)) { + if (startsWith(newLineDashBoundary, buf)) { return 0; } // Otherwise, anything up to the final \n is not part of the boundary and so // must be part of the body. Also, if the section from the final \n onward is // not a prefix of the boundary, it too must be part of the body. - const j = findLastIndex(buf, newLineDashBoundary.slice(0, 1)); - if (j >= 0 && hasPrefix(newLineDashBoundary, buf.slice(j))) { + const j = lastIndexOf(buf, newLineDashBoundary.slice(0, 1)); + if (j >= 0 && startsWith(newLineDashBoundary, buf.slice(j))) { return j; } @@ -364,7 +364,7 @@ export class MultipartReader { if (this.currentPart) { this.currentPart.close(); } - if (equal(this.dashBoundary, encoder.encode("--"))) { + if (equals(this.dashBoundary, encoder.encode("--"))) { throw new Error("boundary is empty"); } let expectNewPart = false; @@ -393,7 +393,7 @@ export class MultipartReader { if (this.partsRead === 0) { continue; } - if (equal(line, this.newLine)) { + if (equals(line, this.newLine)) { expectNewPart = true; continue; } @@ -402,19 +402,19 @@ export class MultipartReader { } private isFinalBoundary(line: Uint8Array): boolean { - if (!hasPrefix(line, this.dashBoundaryDash)) { + if (!startsWith(line, this.dashBoundaryDash)) { return false; } const rest = line.slice(this.dashBoundaryDash.length, line.length); - return rest.length === 0 || equal(skipLWSPChar(rest), this.newLine); + return rest.length === 0 || equals(skipLWSPChar(rest), this.newLine); } private isBoundaryDelimiterLine(line: Uint8Array): boolean { - if (!hasPrefix(line, this.dashBoundary)) { + if (!startsWith(line, this.dashBoundary)) { return false; } const rest = line.slice(this.dashBoundary.length); - return equal(skipLWSPChar(rest), this.newLine); + return equals(skipLWSPChar(rest), this.newLine); } } diff --git a/std/ws/test.ts b/std/ws/test.ts index 9b7e7a710ca010..cef4ff8ff25f9c 100644 --- a/std/ws/test.ts +++ b/std/ws/test.ts @@ -327,7 +327,7 @@ Deno.test({ assertEquals(decode(second.payload), "second"); assertEquals(ping.opcode, OpCode.Ping); assertEquals(third.opcode, OpCode.BinaryFrame); - assertEquals(bytes.equal(third.payload, new Uint8Array([3])), true); + assertEquals(bytes.equals(third.payload, new Uint8Array([3])), true); }, }); From 5bff1c050bb45999aa8dd4bc63c636de5cf7654d Mon Sep 17 00:00:00 2001 From: Justin Mayhew Date: Sun, 6 Dec 2020 12:23:56 -0400 Subject: [PATCH 013/135] docs(examples): Synchronize links in TOC and index (#8635) --- docs/examples.md | 26 +++++++++++++------------- docs/toc.json | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/examples.md b/docs/examples.md index 6fa8b8ef59db51..450f600ab831d5 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -5,19 +5,19 @@ more about the runtime. ## Basic -- [Hello World](./examples/hello_world.md) -- [Import and Export Modules](./examples/import_export.md) -- [How to Manage Dependencies](./examples/manage_dependencies.md) -- [Fetch Data](./examples/fetch_data.md) -- [Read and Write Files](./examples/read_write_files.md) +- [Hello world](./examples/hello_world.md) +- [Import and export modules](./examples/import_export.md) +- [Manage dependencies](./examples/manage_dependencies.md) +- [Fetch data](./examples/fetch_data.md) +- [Read and write files](./examples/read_write_files.md) ## Advanced -- [Unix Cat](./examples/unix_cat.md) -- [File Server](./examples/file_server.md) -- [TCP Echo](./examples/tcp_echo.md) -- [Subprocess](./examples/subprocess.md) -- [Permissions](./examples/permissions.md) -- [OS Signals](./examples/os_signals.md) -- [File System Events](./examples/file_system_events.md) -- [Testing If Main](./examples/testing_if_main.md) +- [Unix cat program](./examples/unix_cat.md) +- [HTTP web server](./examples/http_server.md) +- [File server](./examples/file_server.md) +- [TCP echo server](./examples/tcp_echo.md) +- [Creating a subprocess](./examples/subprocess.md) +- [OS signals](./examples/os_signals.md) +- [File system events](./examples/file_system_events.md) +- [Module metadata](./examples/module_metadata.md) diff --git a/docs/toc.json b/docs/toc.json index cb3011d73e4636..df2c004c5d6bb9 100644 --- a/docs/toc.json +++ b/docs/toc.json @@ -51,7 +51,7 @@ "file_server": "File server", "tcp_echo": "TCP echo server", "subprocess": "Creating a subprocess", - "os_signals": "OS Signals", + "os_signals": "OS signals", "file_system_events": "File system events", "module_metadata": "Module metadata" } From 7135d34ccab7629da57c75ee239fcb0dda733eae Mon Sep 17 00:00:00 2001 From: crowlKats <13135287+crowlKats@users.noreply.github.com> Date: Sun, 6 Dec 2020 18:19:21 +0100 Subject: [PATCH 014/135] refactor(cli): remove Option from Flags.v8_flags (#8633) --- cli/flags.rs | 40 +++++++++++++++------------------------- cli/main.rs | 4 ++-- cli/tools/installer.rs | 4 ++-- 3 files changed, 19 insertions(+), 29 deletions(-) diff --git a/cli/flags.rs b/cli/flags.rs index c5c6f47f16dbe8..5ff21971d280ee 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -11,11 +11,6 @@ use std::net::SocketAddr; use std::path::PathBuf; use std::str::FromStr; -/// Creates vector of strings, Vec -macro_rules! svec { - ($($x:expr),*) => (vec![$($x.to_string()),*]); -} - #[derive(Clone, Debug, PartialEq)] pub enum DenoSubcommand { Bundle { @@ -129,7 +124,7 @@ pub struct Flags { pub repl: bool, pub seed: Option, pub unstable: bool, - pub v8_flags: Option>, + pub v8_flags: Vec, pub version: bool, pub watch: bool, pub write_allowlist: Vec, @@ -1465,8 +1460,7 @@ fn v8_flags_arg<'a, 'b>() -> Arg<'a, 'b> { fn v8_flags_arg_parse(flags: &mut Flags, matches: &ArgMatches) { if let Some(v8_flags) = matches.values_of("v8-flags") { - let s: Vec = v8_flags.map(String::from).collect(); - flags.v8_flags = Some(s); + flags.v8_flags = v8_flags.map(String::from).collect(); } } @@ -1501,16 +1495,7 @@ fn seed_arg_parse(flags: &mut Flags, matches: &ArgMatches) { let seed = seed_string.parse::().unwrap(); flags.seed = Some(seed); - let v8_seed_flag = format!("--random-seed={}", seed); - - match flags.v8_flags { - Some(ref mut v8_flags) => { - v8_flags.push(v8_seed_flag); - } - None => { - flags.v8_flags = Some(svec![v8_seed_flag]); - } - } + flags.v8_flags.push(format!("--random-seed={}", seed)); } } @@ -1631,6 +1616,11 @@ pub fn resolve_urls(urls: Vec) -> Vec { mod tests { use super::*; + /// Creates vector of strings, Vec + macro_rules! svec { + ($($x:expr),*) => (vec![$($x.to_string()),*]); +} + #[test] fn global_flags() { #[rustfmt::skip] @@ -1752,7 +1742,7 @@ mod tests { subcommand: DenoSubcommand::Run { script: "_".to_string(), }, - v8_flags: Some(svec!["--help"]), + v8_flags: svec!["--help"], ..Flags::default() } ); @@ -1769,7 +1759,7 @@ mod tests { subcommand: DenoSubcommand::Run { script: "script.ts".to_string(), }, - v8_flags: Some(svec!["--expose-gc", "--gc-stats=1"]), + v8_flags: svec!["--expose-gc", "--gc-stats=1"], ..Flags::default() } ); @@ -2256,7 +2246,7 @@ mod tests { lock_write: true, ca_file: Some("example.crt".to_string()), cached_only: true, - v8_flags: Some(svec!["--help", "--random-seed=1"]), + v8_flags: svec!["--help", "--random-seed=1"], seed: Some(1), inspect: Some("127.0.0.1:9229".parse().unwrap()), allow_net: true, @@ -2340,7 +2330,7 @@ mod tests { lock_write: true, ca_file: Some("example.crt".to_string()), cached_only: true, - v8_flags: Some(svec!["--help", "--random-seed=1"]), + v8_flags: svec!["--help", "--random-seed=1"], seed: Some(1), inspect: Some("127.0.0.1:9229".parse().unwrap()), allow_net: true, @@ -2681,7 +2671,7 @@ mod tests { script: "script.ts".to_string(), }, seed: Some(250_u64), - v8_flags: Some(svec!["--random-seed=250"]), + v8_flags: svec!["--random-seed=250"], ..Flags::default() } ); @@ -2704,7 +2694,7 @@ mod tests { script: "script.ts".to_string(), }, seed: Some(250_u64), - v8_flags: Some(svec!["--expose-gc", "--random-seed=250"]), + v8_flags: svec!["--expose-gc", "--random-seed=250"], ..Flags::default() } ); @@ -2756,7 +2746,7 @@ mod tests { lock_write: true, ca_file: Some("example.crt".to_string()), cached_only: true, - v8_flags: Some(svec!["--help", "--random-seed=1"]), + v8_flags: svec!["--help", "--random-seed=1"], seed: Some(1), inspect: Some("127.0.0.1:9229".parse().unwrap()), allow_net: true, diff --git a/cli/main.rs b/cli/main.rs index 916248e4c84332..e297d0c4c7de02 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -1051,8 +1051,8 @@ pub fn main() { } let flags = flags::flags_from_vec(args); - if let Some(ref v8_flags) = flags.v8_flags { - init_v8_flags(v8_flags); + if !flags.v8_flags.is_empty() { + init_v8_flags(&*flags.v8_flags); } init_logger(flags.log_level); diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index f2f5562c2ceb48..ec527949d1f38a 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -227,8 +227,8 @@ pub fn install( executable_args.push("--cached_only".to_string()); } - if let Some(v8_flags) = flags.v8_flags { - executable_args.push(format!("--v8-flags={}", v8_flags.join(","))); + if !flags.v8_flags.is_empty() { + executable_args.push(format!("--v8-flags={}", flags.v8_flags.join(","))); } if let Some(seed) = flags.seed { From c0ccbcdaeee04407b2198557cdc55ee4adf1ee7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 7 Dec 2020 04:30:40 +0100 Subject: [PATCH 015/135] refactor(cli): Reorganize worker code, use stronger memory ordering (#8638) --- cli/ops/websocket.rs | 4 +- cli/ops/worker_host.rs | 284 +++++++++++++---------------------------- cli/web_worker.rs | 180 ++++++++++++++++++-------- cli/worker.rs | 3 +- 4 files changed, 218 insertions(+), 253 deletions(-) diff --git a/cli/ops/websocket.rs b/cli/ops/websocket.rs index 40f5fd53bf84af..c04c3b476b064e 100644 --- a/cli/ops/websocket.rs +++ b/cli/ops/websocket.rs @@ -93,8 +93,8 @@ pub async fn op_ws_create( } let ca_file = { - let cli_state = super::global_state2(&state); - cli_state.flags.ca_file.clone() + let program_state = super::global_state2(&state); + program_state.flags.ca_file.clone() }; let uri: Uri = args.url.parse()?; let mut request = Request::builder().method(Method::GET).uri(&uri); diff --git a/cli/ops/worker_host.rs b/cli/ops/worker_host.rs index c464e6df2f2174..6a2d799682e384 100644 --- a/cli/ops/worker_host.rs +++ b/cli/ops/worker_host.rs @@ -1,10 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::colors; -use crate::ops::io::get_stdio; use crate::permissions::Permissions; -use crate::program_state::ProgramState; -use crate::tokio_util::create_basic_runtime; +use crate::web_worker::run_web_worker; use crate::web_worker::WebWorker; use crate::web_worker::WebWorkerHandle; use crate::web_worker::WorkerEvent; @@ -12,7 +9,6 @@ use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::error::JsError; use deno_core::futures::channel::mpsc; -use deno_core::futures::future::FutureExt; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; @@ -25,7 +21,6 @@ use std::cell::RefCell; use std::collections::HashMap; use std::convert::From; use std::rc::Rc; -use std::sync::Arc; use std::thread::JoinHandle; #[derive(Deserialize)] @@ -68,151 +63,13 @@ pub fn init( ); } -pub type WorkersTable = HashMap, WebWorkerHandle)>; -pub type WorkerId = u32; - -fn create_web_worker( - worker_id: u32, - name: String, - program_state: &Arc, - permissions: Permissions, - specifier: ModuleSpecifier, - has_deno_namespace: bool, -) -> Result { - let mut worker = WebWorker::new( - name.clone(), - permissions, - specifier, - program_state.clone(), - has_deno_namespace, - ); - - if has_deno_namespace { - let state = worker.js_runtime.op_state(); - let mut state = state.borrow_mut(); - let (stdin, stdout, stderr) = get_stdio(); - if let Some(stream) = stdin { - state.resource_table.add("stdin", Box::new(stream)); - } - if let Some(stream) = stdout { - state.resource_table.add("stdout", Box::new(stream)); - } - if let Some(stream) = stderr { - state.resource_table.add("stderr", Box::new(stream)); - } - } - - // Instead of using name for log we use `worker-${id}` because - // WebWorkers can have empty string as name. - let script = format!( - "bootstrap.workerRuntime(\"{}\", {}, \"worker-{}\")", - name, worker.has_deno_namespace, worker_id - ); - worker.execute(&script)?; - - Ok(worker) +pub struct WorkerThread { + join_handle: JoinHandle>, + worker_handle: WebWorkerHandle, } -// TODO(bartlomieju): check if order of actions is aligned to Worker spec -fn run_worker_thread( - worker_id: u32, - name: String, - program_state: &Arc, - permissions: Permissions, - specifier: ModuleSpecifier, - has_deno_namespace: bool, - maybe_source_code: Option, -) -> Result<(JoinHandle<()>, WebWorkerHandle), AnyError> { - let program_state = program_state.clone(); - let (handle_sender, handle_receiver) = - std::sync::mpsc::sync_channel::>(1); - - let builder = - std::thread::Builder::new().name(format!("deno-worker-{}", worker_id)); - let join_handle = builder.spawn(move || { - // Any error inside this block is terminal: - // - JS worker is useless - meaning it throws an exception and can't do anything else, - // all action done upon it should be noops - // - newly spawned thread exits - let result = create_web_worker( - worker_id, - name, - &program_state, - permissions, - specifier.clone(), - has_deno_namespace, - ); - - if let Err(err) = result { - handle_sender.send(Err(err)).unwrap(); - return; - } - - let mut worker = result.unwrap(); - let name = worker.name.to_string(); - // Send thread safe handle to newly created worker to host thread - handle_sender.send(Ok(worker.thread_safe_handle())).unwrap(); - drop(handle_sender); - - // At this point the only method of communication with host - // is using `worker.internal_channels`. - // - // Host can already push messages and interact with worker. - // - // Next steps: - // - create tokio runtime - // - load provided module or code - // - start driving worker's event loop - - let mut rt = create_basic_runtime(); - - // TODO: run with using select with terminate - - // Execute provided source code immediately - let result = if let Some(source_code) = maybe_source_code { - worker.execute(&source_code) - } else { - // TODO(bartlomieju): add "type": "classic", ie. ability to load - // script instead of module - let load_future = worker.execute_module(&specifier).boxed_local(); - - rt.block_on(load_future) - }; - - let mut sender = worker.internal_channels.sender.clone(); - - // If sender is closed it means that worker has already been closed from - // within using "globalThis.close()" - if sender.is_closed() { - return; - } - - if let Err(e) = result { - eprintln!( - "{}: Uncaught (in worker \"{}\") {}", - colors::red_bold("error"), - name, - e.to_string().trim_start_matches("Uncaught "), - ); - sender - .try_send(WorkerEvent::TerminalError(e)) - .expect("Failed to post message to host"); - - // Failure to execute script is a terminal error, bye, bye. - return; - } - - // TODO(bartlomieju): this thread should return result of event loop - // that means that we should store JoinHandle to thread to ensure - // that it actually terminates. - rt.block_on(worker.run_event_loop()) - .expect("Panic in event loop"); - debug!("Worker thread shuts down {}", &name); - })?; - - let worker_handle = handle_receiver.recv().unwrap()?; - Ok((join_handle, worker_handle)) -} +pub type WorkersTable = HashMap; +pub type WorkerId = u32; #[derive(Deserialize)] #[serde(rename_all = "camelCase")] @@ -249,22 +106,53 @@ fn op_create_worker( let module_specifier = ModuleSpecifier::resolve_url(&specifier)?; let worker_name = args_name.unwrap_or_else(|| "".to_string()); - let cli_state = super::program_state(state); + let program_state = super::program_state(state); + + let (handle_sender, handle_receiver) = + std::sync::mpsc::sync_channel::>(1); + + // Setup new thread + let thread_builder = + std::thread::Builder::new().name(format!("deno-worker-{}", worker_id)); + + // Spawn it + let join_handle = thread_builder.spawn(move || { + // Any error inside this block is terminal: + // - JS worker is useless - meaning it throws an exception and can't do anything else, + // all action done upon it should be noops + // - newly spawned thread exits + let worker = WebWorker::new( + worker_name, + permissions, + module_specifier.clone(), + program_state, + use_deno_namespace, + worker_id, + ); + + // Send thread safe handle to newly created worker to host thread + handle_sender.send(Ok(worker.thread_safe_handle())).unwrap(); + drop(handle_sender); + + // At this point the only method of communication with host + // is using `worker.internal_channels`. + // + // Host can already push messages and interact with worker. + run_web_worker(worker, module_specifier, maybe_source_code) + })?; + + let worker_handle = handle_receiver.recv().unwrap()?; + + let worker_thread = WorkerThread { + join_handle, + worker_handle, + }; - let (join_handle, worker_handle) = run_worker_thread( - worker_id, - worker_name, - &cli_state, - permissions, - module_specifier, - use_deno_namespace, - maybe_source_code, - )?; // At this point all interactions with worker happen using thread - // safe handler returned from previous function call + // safe handler returned from previous function calls state .borrow_mut::() - .insert(worker_id, (join_handle, worker_handle)); + .insert(worker_id, worker_thread); Ok(json!({ "id": worker_id })) } @@ -281,12 +169,16 @@ fn op_host_terminate_worker( ) -> Result { let args: WorkerArgs = serde_json::from_value(args)?; let id = args.id as u32; - let (join_handle, worker_handle) = state + let worker_thread = state .borrow_mut::() .remove(&id) .expect("No worker handle found"); - worker_handle.terminate(); - join_handle.join().expect("Panic in worker thread"); + worker_thread.worker_handle.terminate(); + worker_thread + .join_handle + .join() + .expect("Panic in worker thread") + .expect("Panic in worker event loop"); Ok(json!({})) } @@ -330,6 +222,22 @@ fn serialize_worker_event(event: WorkerEvent) -> Value { } } +/// Try to remove worker from workers table - NOTE: `Worker.terminate()` +/// might have been called already meaning that we won't find worker in +/// table - in that case ignore. +fn try_remove_and_close(state: Rc>, id: u32) { + let mut s = state.borrow_mut(); + let workers = s.borrow_mut::(); + if let Some(mut worker_thread) = workers.remove(&id) { + worker_thread.worker_handle.sender.close_channel(); + worker_thread + .join_handle + .join() + .expect("Worker thread panicked") + .expect("Panic in worker event loop"); + } +} + /// Get message from guest worker as host async fn op_host_get_message( state: Rc>, @@ -344,41 +252,25 @@ async fn op_host_get_message( let workers_table = s.borrow::(); let maybe_handle = workers_table.get(&id); if let Some(handle) = maybe_handle { - handle.1.clone() + handle.worker_handle.clone() } else { // If handle was not found it means worker has already shutdown return Ok(json!({ "type": "close" })); } }; - let response = match worker_handle.get_event().await? { - Some(event) => { - // Terminal error means that worker should be removed from worker table. - if let WorkerEvent::TerminalError(_) = &event { - let mut s = state.borrow_mut(); - if let Some((join_handle, mut worker_handle)) = - s.borrow_mut::().remove(&id) - { - worker_handle.sender.close_channel(); - join_handle.join().expect("Worker thread panicked"); - }; - } - serialize_worker_event(event) + let maybe_event = worker_handle.get_event().await?; + if let Some(event) = maybe_event { + // Terminal error means that worker should be removed from worker table. + if let WorkerEvent::TerminalError(_) = &event { + try_remove_and_close(state, id); } - None => { - // Worker shuts down - let mut s = state.borrow_mut(); - let workers = s.borrow_mut::(); - // Try to remove worker from workers table - NOTE: `Worker.terminate()` might have been called - // already meaning that we won't find worker in table - in that case ignore. - if let Some((join_handle, mut worker_handle)) = workers.remove(&id) { - worker_handle.sender.close_channel(); - join_handle.join().expect("Worker thread panicked"); - } - json!({ "type": "close" }) - } - }; - Ok(response) + return Ok(serialize_worker_event(event)); + } + + // If there was no event from worker it means it has already been closed. + try_remove_and_close(state, id); + Ok(json!({ "type": "close" })) } /// Post message to guest worker as host @@ -393,8 +285,10 @@ fn op_host_post_message( let msg = Vec::from(&*data[0]).into_boxed_slice(); debug!("post message to worker {}", id); - let workers = state.borrow::(); - let worker_handle = workers[&id].1.clone(); - worker_handle.post_message(msg)?; + let worker_thread = state + .borrow::() + .get(&id) + .expect("No worker handle found"); + worker_thread.worker_handle.post_message(msg)?; Ok(json!({})) } diff --git a/cli/web_worker.rs b/cli/web_worker.rs index 12b79cb2d6e99f..ddce8666e7c0ad 100644 --- a/cli/web_worker.rs +++ b/cli/web_worker.rs @@ -10,6 +10,7 @@ use crate::ops; use crate::permissions::Permissions; use crate::program_state::ProgramState; use crate::source_maps::apply_source_map; +use crate::tokio_util::create_basic_runtime; use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; use deno_core::futures::future::poll_fn; @@ -77,7 +78,7 @@ impl WebWorkerHandle { // This function can be called multiple times by whomever holds // the handle. However only a single "termination" should occur so // we need a guard here. - let already_terminated = self.terminated.swap(true, Ordering::Relaxed); + let already_terminated = self.terminated.swap(true, Ordering::SeqCst); if !already_terminated { self.isolate_handle.terminate_execution(); @@ -134,6 +135,7 @@ impl WebWorker { main_module: ModuleSpecifier, program_state: Arc, has_deno_namespace: bool, + worker_id: u32, ) -> Self { let module_loader = CliModuleLoader::new_for_worker(); let global_state_ = program_state.clone(); @@ -173,7 +175,7 @@ impl WebWorker { inspector, internal_channels, js_runtime, - name, + name: name.clone(), waker: AtomicWaker::new(), event_loop_idle: false, terminate_rx, @@ -223,9 +225,32 @@ impl WebWorker { ops::signal::init(js_runtime); ops::tls::init(js_runtime); ops::tty::init(js_runtime); + + let op_state = js_runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + let (stdin, stdout, stderr) = ops::io::get_stdio(); + if let Some(stream) = stdin { + op_state.resource_table.add("stdin", Box::new(stream)); + } + if let Some(stream) = stdout { + op_state.resource_table.add("stdout", Box::new(stream)); + } + if let Some(stream) = stderr { + op_state.resource_table.add("stderr", Box::new(stream)); + } } } + // Instead of using name for log we use `worker-${id}` because + // WebWorkers can have empty string as name. + let script = format!( + "bootstrap.workerRuntime(\"{}\", {}, \"worker-{}\")", + name, worker.has_deno_namespace, worker_id + ); + worker + .execute(&script) + .expect("Failed to execute worker bootstrap script"); + worker } @@ -250,13 +275,15 @@ impl WebWorker { self.handle.clone() } + pub fn has_been_terminated(&self) -> bool { + self.handle.terminated.load(Ordering::SeqCst) + } + pub fn poll_event_loop( &mut self, cx: &mut Context, ) -> Poll> { - let terminated = self.handle.terminated.load(Ordering::Relaxed); - - if terminated { + if self.has_been_terminated() { return Poll::Ready(Ok(())); } @@ -267,28 +294,20 @@ impl WebWorker { self.waker.register(cx.waker()); self.js_runtime.poll_event_loop(cx) }; - match poll_result { - Poll::Ready(r) => { - let terminated = self.handle.terminated.load(Ordering::Relaxed); - if terminated { - return Poll::Ready(Ok(())); - } - if let Err(e) = r { - eprintln!( - "{}: Uncaught (in worker \"{}\") {}", - colors::red_bold("error"), - self.name.to_string(), - e.to_string().trim_start_matches("Uncaught "), - ); - let mut sender = self.internal_channels.sender.clone(); - sender - .try_send(WorkerEvent::Error(e)) - .expect("Failed to post message to host"); - } - self.event_loop_idle = true; + if let Poll::Ready(r) = poll_result { + if self.has_been_terminated() { + return Poll::Ready(Ok(())); } - Poll::Pending => {} + + if let Err(e) = r { + print_worker_error(e.to_string(), &self.name); + let mut sender = self.internal_channels.sender.clone(); + sender + .try_send(WorkerEvent::Error(e)) + .expect("Failed to post message to host"); + } + self.event_loop_idle = true; } } @@ -298,33 +317,32 @@ impl WebWorker { return Poll::Ready(Ok(())); } - if let Poll::Ready(r) = self.internal_channels.receiver.poll_next_unpin(cx) - { - match r { - Some(msg) => { - let msg = String::from_utf8(msg.to_vec()).unwrap(); - let script = format!("workerMessageRecvCallback({})", msg); - - if let Err(e) = self.execute(&script) { - // If execution was terminated during message callback then - // just ignore it - if self.handle.terminated.load(Ordering::Relaxed) { - return Poll::Ready(Ok(())); - } + let maybe_msg_poll_result = + self.internal_channels.receiver.poll_next_unpin(cx); - // Otherwise forward error to host - let mut sender = self.internal_channels.sender.clone(); - sender - .try_send(WorkerEvent::Error(e)) - .expect("Failed to post message to host"); - } + if let Poll::Ready(maybe_msg) = maybe_msg_poll_result { + let msg = + maybe_msg.expect("Received `None` instead of message in worker"); + let msg = String::from_utf8(msg.to_vec()).unwrap(); + let script = format!("workerMessageRecvCallback({})", msg); - // Let event loop be polled again - self.event_loop_idle = false; - self.waker.wake(); + if let Err(e) = self.execute(&script) { + // If execution was terminated during message callback then + // just ignore it + if self.has_been_terminated() { + return Poll::Ready(Ok(())); } - None => unreachable!(), + + // Otherwise forward error to host + let mut sender = self.internal_channels.sender.clone(); + sender + .try_send(WorkerEvent::Error(e)) + .expect("Failed to post message to host"); } + + // Let event loop be polled again + self.event_loop_idle = false; + self.waker.wake(); } Poll::Pending @@ -343,6 +361,63 @@ impl Drop for WebWorker { } } +fn print_worker_error(error_str: String, name: &str) { + eprintln!( + "{}: Uncaught (in worker \"{}\") {}", + colors::red_bold("error"), + name, + error_str.trim_start_matches("Uncaught "), + ); +} + +/// This function should be called from a thread dedicated to this worker. +// TODO(bartlomieju): check if order of actions is aligned to Worker spec +pub fn run_web_worker( + mut worker: WebWorker, + specifier: ModuleSpecifier, + maybe_source_code: Option, +) -> Result<(), AnyError> { + let name = worker.name.to_string(); + + let mut rt = create_basic_runtime(); + + // TODO(bartlomieju): run following block using "select!" + // with terminate + + // Execute provided source code immediately + let result = if let Some(source_code) = maybe_source_code { + worker.execute(&source_code) + } else { + // TODO(bartlomieju): add "type": "classic", ie. ability to load + // script instead of module + let load_future = worker.execute_module(&specifier).boxed_local(); + + rt.block_on(load_future) + }; + + let mut sender = worker.internal_channels.sender.clone(); + + // If sender is closed it means that worker has already been closed from + // within using "globalThis.close()" + if sender.is_closed() { + return Ok(()); + } + + if let Err(e) = result { + print_worker_error(e.to_string(), &name); + sender + .try_send(WorkerEvent::TerminalError(e)) + .expect("Failed to post message to host"); + + // Failure to execute script is a terminal error, bye, bye. + return Ok(()); + } + + let result = rt.block_on(worker.run_event_loop()); + debug!("Worker thread shuts down {}", &name); + result +} + #[cfg(test)] mod tests { use super::*; @@ -354,17 +429,14 @@ mod tests { let main_module = ModuleSpecifier::resolve_url_or_path("./hello.js").unwrap(); let program_state = ProgramState::mock(vec!["deno".to_string()], None); - let mut worker = WebWorker::new( + WebWorker::new( "TEST".to_string(), Permissions::allow_all(), main_module, program_state, false, - ); - worker - .execute("bootstrap.workerRuntime(\"TEST\", false)") - .unwrap(); - worker + 1, + ) } #[tokio::test] diff --git a/cli/worker.rs b/cli/worker.rs index 3068ab1f799e62..c2ed8871b400da 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -7,7 +7,6 @@ use crate::js; use crate::metrics::Metrics; use crate::module_loader::CliModuleLoader; use crate::ops; -use crate::ops::io::get_stdio; use crate::permissions::Permissions; use crate::program_state::ProgramState; use crate::source_maps::apply_source_map; @@ -148,7 +147,7 @@ impl MainWorker { let op_state = js_runtime.op_state(); let mut op_state = op_state.borrow_mut(); let t = &mut op_state.resource_table; - let (stdin, stdout, stderr) = get_stdio(); + let (stdin, stdout, stderr) = ops::io::get_stdio(); if let Some(stream) = stdin { t.add("stdin", Box::new(stream)); } From c8e9b2654ec0d54c77bb3f49fa31c3986203d517 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 7 Dec 2020 11:03:03 +0100 Subject: [PATCH 016/135] refactor(cli): Simplify choosing type lib in CliModuleLoader (#8637) --- cli/module_loader.rs | 62 ++++++++++++++++++++++---------------------- cli/web_worker.rs | 2 +- cli/worker.rs | 3 +-- 3 files changed, 33 insertions(+), 34 deletions(-) diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 1715f7f6b73230..9dda2c24a5ab0b 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -22,23 +22,37 @@ pub struct CliModuleLoader { /// import map file will be resolved and set. pub import_map: Option, pub lib: TypeLib, - pub is_main: bool, + pub program_state: Arc, } impl CliModuleLoader { - pub fn new(maybe_import_map: Option) -> Rc { + pub fn new(program_state: Arc) -> Rc { + let lib = if program_state.flags.unstable { + TypeLib::UnstableDenoWindow + } else { + TypeLib::DenoWindow + }; + + let import_map = program_state.maybe_import_map.clone(); + Rc::new(CliModuleLoader { - import_map: maybe_import_map, - lib: TypeLib::DenoWindow, - is_main: true, + import_map, + lib, + program_state, }) } - pub fn new_for_worker() -> Rc { + pub fn new_for_worker(program_state: Arc) -> Rc { + let lib = if program_state.flags.unstable { + TypeLib::UnstableDenoWorker + } else { + TypeLib::DenoWorker + }; + Rc::new(CliModuleLoader { import_map: None, - lib: TypeLib::DenoWorker, - is_main: false, + lib, + program_state, }) } } @@ -46,18 +60,13 @@ impl CliModuleLoader { impl ModuleLoader for CliModuleLoader { fn resolve( &self, - op_state: Rc>, + _op_state: Rc>, specifier: &str, referrer: &str, is_main: bool, ) -> Result { - let program_state = { - let state = op_state.borrow(); - state.borrow::>().clone() - }; - // FIXME(bartlomieju): hacky way to provide compatibility with repl - let referrer = if referrer.is_empty() && program_state.flags.repl { + let referrer = if referrer.is_empty() && self.program_state.flags.repl { "" } else { referrer @@ -80,19 +89,18 @@ impl ModuleLoader for CliModuleLoader { fn load( &self, - op_state: Rc>, + _op_state: Rc>, module_specifier: &ModuleSpecifier, maybe_referrer: Option, _is_dynamic: bool, ) -> Pin> { let module_specifier = module_specifier.to_owned(); let module_url_specified = module_specifier.to_string(); - let program_state = { - let state = op_state.borrow(); - state.borrow::>().clone() - }; + let program_state = self.program_state.clone(); - // TODO(@kitsonk) this shouldn't be async + // NOTE: this block is async only because of `deno_core` + // interface requirements; module was already loaded + // when constructing module graph during call to `prepare_load`. let fut = async move { let compiled_module = program_state .fetch_compiled_module(module_specifier, maybe_referrer)?; @@ -117,21 +125,13 @@ impl ModuleLoader for CliModuleLoader { is_dynamic: bool, ) -> Pin>>> { let specifier = specifier.clone(); + let program_state = self.program_state.clone(); let maybe_import_map = self.import_map.clone(); let state = op_state.borrow(); // The permissions that should be applied to any dynamically imported module let dynamic_permissions = state.borrow::().clone(); - let program_state = state.borrow::>().clone(); - let lib = if program_state.flags.unstable { - if self.lib == TypeLib::DenoWindow { - TypeLib::UnstableDenoWindow - } else { - TypeLib::UnstableDenoWorker - } - } else { - self.lib.clone() - }; + let lib = self.lib.clone(); drop(state); // TODO(bartlomieju): `prepare_module_load` should take `load_id` param diff --git a/cli/web_worker.rs b/cli/web_worker.rs index ddce8666e7c0ad..44806d520c4aca 100644 --- a/cli/web_worker.rs +++ b/cli/web_worker.rs @@ -137,7 +137,7 @@ impl WebWorker { has_deno_namespace: bool, worker_id: u32, ) -> Self { - let module_loader = CliModuleLoader::new_for_worker(); + let module_loader = CliModuleLoader::new_for_worker(program_state.clone()); let global_state_ = program_state.clone(); let js_error_create_fn = Box::new(move |core_js_error| { diff --git a/cli/worker.rs b/cli/worker.rs index c2ed8871b400da..dda26291a41cde 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -45,8 +45,7 @@ impl MainWorker { main_module: ModuleSpecifier, permissions: Permissions, ) -> Self { - let module_loader = - CliModuleLoader::new(program_state.maybe_import_map.clone()); + let module_loader = CliModuleLoader::new(program_state.clone()); let global_state_ = program_state.clone(); From 301d3e4b6849d24154ac2d65c00a9b30223d000e Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Mon, 7 Dec 2020 21:46:39 +1100 Subject: [PATCH 017/135] feat: add mvp language server (#8515) Resolves #8400 --- Cargo.lock | 66 +- cli/Cargo.toml | 22 +- cli/ast.rs | 2 +- cli/file_fetcher.rs | 4 +- cli/flags.rs | 38 +- cli/http_cache.rs | 2 +- cli/lsp/README.md | 23 + cli/lsp/analysis.rs | 324 +++++ cli/lsp/capabilities.rs | 59 + cli/lsp/config.rs | 49 + cli/lsp/diagnostics.rs | 268 ++++ cli/lsp/dispatch.rs | 185 +++ cli/lsp/handlers.rs | 266 ++++ cli/lsp/lsp_extensions.rs | 26 + cli/lsp/memory_cache.rs | 126 ++ cli/lsp/mod.rs | 415 +++++++ cli/lsp/sources.rs | 372 ++++++ cli/lsp/state.rs | 292 +++++ cli/lsp/text.rs | 514 ++++++++ cli/lsp/tsc.rs | 1210 +++++++++++++++++++ cli/lsp/utils.rs | 114 ++ cli/main.rs | 6 + cli/module_graph.rs | 14 +- cli/tests/integration_tests.rs | 2 +- cli/tests/lsp/did_open_notification.json | 12 + cli/tests/lsp/exit_notification.json | 5 + cli/tests/lsp/hover_request.json | 14 + cli/tests/lsp/initialize_request.json | 23 + cli/tests/lsp/initialized_notification.json | 5 + cli/tests/lsp/shutdown_request.json | 6 + cli/tests/lsp_tests.rs | 88 ++ cli/tests/type_directives_01.ts.out | 2 +- cli/tests/type_directives_02.ts.out | 2 +- cli/tools/lint.rs | 2 +- cli/tsc.rs | 6 +- cli/tsc/99_main_compiler.js | 254 +++- cli/tsc/compiler.d.ts | 103 ++ cli/tsc_config.rs | 4 +- 38 files changed, 4878 insertions(+), 47 deletions(-) create mode 100644 cli/lsp/README.md create mode 100644 cli/lsp/analysis.rs create mode 100644 cli/lsp/capabilities.rs create mode 100644 cli/lsp/config.rs create mode 100644 cli/lsp/diagnostics.rs create mode 100644 cli/lsp/dispatch.rs create mode 100644 cli/lsp/handlers.rs create mode 100644 cli/lsp/lsp_extensions.rs create mode 100644 cli/lsp/memory_cache.rs create mode 100644 cli/lsp/mod.rs create mode 100644 cli/lsp/sources.rs create mode 100644 cli/lsp/state.rs create mode 100644 cli/lsp/text.rs create mode 100644 cli/lsp/tsc.rs create mode 100644 cli/lsp/utils.rs create mode 100644 cli/tests/lsp/did_open_notification.json create mode 100644 cli/tests/lsp/exit_notification.json create mode 100644 cli/tests/lsp/hover_request.json create mode 100644 cli/tests/lsp/initialize_request.json create mode 100644 cli/tests/lsp/initialized_notification.json create mode 100644 cli/tests/lsp/shutdown_request.json create mode 100644 cli/tests/lsp_tests.rs create mode 100644 cli/tsc/compiler.d.ts diff --git a/Cargo.lock b/Cargo.lock index a34c10ed920da9..224d0f322b73fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -366,10 +366,20 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" dependencies = [ - "crossbeam-utils", + "crossbeam-utils 0.7.2", "maybe-uninit", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.1", +] + [[package]] name = "crossbeam-utils" version = "0.7.2" @@ -381,6 +391,17 @@ dependencies = [ "lazy_static", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" +dependencies = [ + "autocfg 1.0.1", + "cfg-if 1.0.0", + "lazy_static", +] + [[package]] name = "darling" version = "0.10.2" @@ -437,6 +458,7 @@ dependencies = [ "bytes 0.5.6", "chrono", "clap", + "crossbeam-channel 0.5.0", "deno_core", "deno_crypto", "deno_doc", @@ -457,9 +479,12 @@ dependencies = [ "lazy_static", "libc", "log", + "lsp-server", + "lsp-types", "nix", "notify", "os_pipe", + "percent-encoding", "regex", "ring", "rustyline", @@ -1314,6 +1339,32 @@ dependencies = [ "cfg-if 0.1.10", ] +[[package]] +name = "lsp-server" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69b18dfe0e4a380b872aa79d8e0ee6c3d7a9682466e84b83ad807c88b3545f79" +dependencies = [ + "crossbeam-channel 0.5.0", + "log", + "serde", + "serde_json", +] + +[[package]] +name = "lsp-types" +version = "0.84.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b95be71fe205e44de754185bcf86447b65813ce1ceb298f8d3793ade5fff08d" +dependencies = [ + "base64 0.12.3", + "bitflags", + "serde", + "serde_json", + "serde_repr", + "url", +] + [[package]] name = "matches" version = "0.1.8" @@ -1513,7 +1564,7 @@ checksum = "77d03607cf88b4b160ba0e9ed425fff3cee3b55ac813f0c685b3a3772da37d0e" dependencies = [ "anymap", "bitflags", - "crossbeam-channel", + "crossbeam-channel 0.4.4", "filetime", "fsevent", "fsevent-sys", @@ -2282,6 +2333,17 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_repr" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dc6b7951b17b051f3210b063f12cc17320e2fe30ae05b0fe2a3abb068551c76" +dependencies = [ + "proc-macro2 1.0.24", + "quote 1.0.7", + "syn 1.0.48", +] + [[package]] name = "serde_urlencoded" version = "0.6.1" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a8d6e9c2b68d72..95ffac7fed4ad0 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -32,22 +32,24 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_crypto = { path = "../op_crates/crypto", version = "0.3.0" } deno_core = { path = "../core", version = "0.69.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.3.0" } deno_doc = "0.1.17" +deno_fetch = { path = "../op_crates/fetch", version = "0.12.0" } deno_lint = "0.2.12" deno_web = { path = "../op_crates/web", version = "0.20.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.12.0" } atty = "0.2.14" base64 = "0.12.3" bytes = "0.5.6" byteorder = "1.3.4" clap = "2.33.3" +crossbeam-channel = "0.5.0" dissimilar = "1.0.2" dlopen = "0.1.8" -encoding_rs = "0.8.24" dprint-plugin-typescript = "0.35.0" +encoding_rs = "0.8.24" +env_logger = "0.7.1" filetime = "0.2.12" http = "0.2.1" indexmap = "1.6.0" @@ -55,31 +57,33 @@ jsonc-parser = "0.14.0" lazy_static = "1.4.0" libc = "0.2.77" log = "0.4.11" -env_logger = "0.7.1" +lsp-server = "0.5.0" +lsp-types = { version = "0.84.0", features = ["proposed"] } notify = "5.0.0-pre.3" +percent-encoding = "2.1.0" regex = "1.3.9" ring = "0.16.15" rustyline = { version = "7.0.0", default-features = false } rustyline-derive = "0.4.0" +semver-parser = "0.9.0" serde = { version = "1.0.116", features = ["derive"] } shell-escape = "0.1.5" -sys-info = "0.7.0" sourcemap = "6.0.1" swc_bundler = "0.17.5" swc_common = { version = "0.10.6", features = ["sourcemap"] } swc_ecmascript = { version = "0.14.4", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } +sys-info = "0.7.0" tempfile = "3.1.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } tokio-rustls = "0.14.1" # Keep in-sync with warp. tokio-tungstenite = "0.11.0" -webpki = "0.21.3" -webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. +uuid = { version = "0.8.1", features = ["v4"] } walkdir = "2.3.1" warp = { version = "0.2.5", features = ["tls"] } -semver-parser = "0.9.0" -uuid = { version = "0.8.1", features = ["v4"] } +webpki = "0.21.3" +webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. [target.'cfg(windows)'.dependencies] winapi = { version = "0.3.9", features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } diff --git a/cli/ast.rs b/cli/ast.rs index 10d7b53830feb9..ef64683106770a 100644 --- a/cli/ast.rs +++ b/cli/ast.rs @@ -354,7 +354,7 @@ impl ParsedModule { } } -fn parse_with_source_map( +pub fn parse_with_source_map( specifier: &str, source: &str, media_type: &MediaType, diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 0d11852d1f9c37..5b2f6f74cb2330 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -131,7 +131,7 @@ fn fetch_local(specifier: &ModuleSpecifier) -> Result { /// Given a vector of bytes and optionally a charset, decode the bytes to a /// string. -fn get_source_from_bytes( +pub fn get_source_from_bytes( bytes: Vec, maybe_charset: Option, ) -> Result { @@ -161,7 +161,7 @@ fn get_validated_scheme( /// Resolve a media type and optionally the charset from a module specifier and /// the value of a content type header. -fn map_content_type( +pub fn map_content_type( specifier: &ModuleSpecifier, maybe_content_type: Option, ) -> (MediaType, Option) { diff --git a/cli/flags.rs b/cli/flags.rs index 5ff21971d280ee..2210d756560e85 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -17,6 +17,9 @@ pub enum DenoSubcommand { source_file: String, out_file: Option, }, + Cache { + files: Vec, + }, Compile { source_file: String, output: Option, @@ -35,9 +38,6 @@ pub enum DenoSubcommand { code: String, as_typescript: bool, }, - Cache { - files: Vec, - }, Fmt { check: bool, files: Vec, @@ -54,6 +54,7 @@ pub enum DenoSubcommand { root: Option, force: bool, }, + LanguageServer, Lint { files: Vec, ignore: Vec, @@ -293,6 +294,8 @@ pub fn flags_from_vec_safe(args: Vec) -> clap::Result { lint_parse(&mut flags, m); } else if let Some(m) = matches.subcommand_matches("compile") { compile_parse(&mut flags, m); + } else if let Some(m) = matches.subcommand_matches("lsp") { + language_server_parse(&mut flags, m); } else { repl_parse(&mut flags, &matches); } @@ -349,6 +352,7 @@ If the flag is set, restrict these messages to errors.", .subcommand(fmt_subcommand()) .subcommand(info_subcommand()) .subcommand(install_subcommand()) + .subcommand(language_server_subcommand()) .subcommand(lint_subcommand()) .subcommand(repl_subcommand()) .subcommand(run_subcommand()) @@ -685,6 +689,10 @@ fn doc_parse(flags: &mut Flags, matches: &clap::ArgMatches) { }; } +fn language_server_parse(flags: &mut Flags, _matches: &clap::ArgMatches) { + flags.subcommand = DenoSubcommand::LanguageServer; +} + fn lint_parse(flags: &mut Flags, matches: &clap::ArgMatches) { let files = match matches.values_of("files") { Some(f) => f.map(PathBuf::from).collect(), @@ -1076,6 +1084,18 @@ Show documentation for runtime built-ins: ) } +fn language_server_subcommand<'a, 'b>() -> App<'a, 'b> { + SubCommand::with_name("lsp") + .setting(AppSettings::Hidden) + .about("Start the language server") + .long_about( + r#"Start the Deno language server which will take input +from stdin and provide output to stdout. + deno lsp +"#, + ) +} + fn lint_subcommand<'a, 'b>() -> App<'a, 'b> { SubCommand::with_name("lint") .about("Lint source files") @@ -1952,6 +1972,18 @@ mod tests { ); } + #[test] + fn language_server() { + let r = flags_from_vec_safe(svec!["deno", "lsp"]); + assert_eq!( + r.unwrap(), + Flags { + subcommand: DenoSubcommand::LanguageServer, + ..Flags::default() + } + ); + } + #[test] fn lint() { let r = flags_from_vec_safe(svec![ diff --git a/cli/http_cache.rs b/cli/http_cache.rs index 9cf2adc1a56771..dd5f4dc3fd8f9c 100644 --- a/cli/http_cache.rs +++ b/cli/http_cache.rs @@ -72,7 +72,7 @@ pub fn url_to_filename(url: &Url) -> PathBuf { cache_filename } -#[derive(Clone)] +#[derive(Debug, Clone, Default)] pub struct HttpCache { pub location: PathBuf, } diff --git a/cli/lsp/README.md b/cli/lsp/README.md new file mode 100644 index 00000000000000..dcc9532733f4bf --- /dev/null +++ b/cli/lsp/README.md @@ -0,0 +1,23 @@ +# Deno Language Server + +The Deno Language Server provides a server implementation of the +[Language Server Protocol](https://microsoft.github.io/language-server-protocol/) +which is specifically tailored to provide a _Deno_ view of code. It is +integrated into the command line and can be started via the `lsp` sub-command. + +> :warning: The Language Server is highly experimental and far from feature +> complete. + +This document gives an overview of the structure of the language server. + +## Acknowledgement + +The structure of the language server was heavily influenced and adapted from +[`rust-analyzer`](https://rust-analyzer.github.io/). + +## Structure + +When the language server is started, a `ServerState` instance is created which +holds all the state of the language server, as well as provides the +infrastructure for receiving and sending notifications and requests from a +language server client. diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs new file mode 100644 index 00000000000000..370b41c45f9a89 --- /dev/null +++ b/cli/lsp/analysis.rs @@ -0,0 +1,324 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use crate::ast; +use crate::import_map::ImportMap; +use crate::media_type::MediaType; +use crate::module_graph::parse_deno_types; +use crate::module_graph::parse_ts_reference; +use crate::module_graph::TypeScriptReference; +use crate::tools::lint::create_linter; + +use deno_core::error::AnyError; +use deno_core::ModuleSpecifier; +use deno_lint::rules; +use lsp_types::Position; +use lsp_types::Range; +use std::cell::RefCell; +use std::collections::HashMap; +use std::rc::Rc; + +/// Category of self-generated diagnostic messages (those not coming from) +/// TypeScript. +pub enum Category { + /// A lint diagnostic, where the first element is the message. + Lint { + message: String, + code: String, + hint: Option, + }, +} + +/// A structure to hold a reference to a diagnostic message. +pub struct Reference { + category: Category, + range: Range, +} + +fn as_lsp_range(range: &deno_lint::diagnostic::Range) -> Range { + Range { + start: Position { + line: (range.start.line - 1) as u32, + character: range.start.col as u32, + }, + end: Position { + line: (range.end.line - 1) as u32, + character: range.end.col as u32, + }, + } +} + +pub fn get_lint_references( + specifier: &ModuleSpecifier, + media_type: &MediaType, + source_code: &str, +) -> Result, AnyError> { + let syntax = ast::get_syntax(media_type); + let lint_rules = rules::get_recommended_rules(); + let mut linter = create_linter(syntax, lint_rules); + // TODO(@kitsonk) we should consider caching the swc source file versions for + // reuse by other processes + let (_, lint_diagnostics) = + linter.lint(specifier.to_string(), source_code.to_string())?; + + Ok( + lint_diagnostics + .into_iter() + .map(|d| Reference { + category: Category::Lint { + message: d.message, + code: d.code, + hint: d.hint, + }, + range: as_lsp_range(&d.range), + }) + .collect(), + ) +} + +pub fn references_to_diagnostics( + references: Vec, +) -> Vec { + references + .into_iter() + .map(|r| match r.category { + Category::Lint { message, code, .. } => lsp_types::Diagnostic { + range: r.range, + severity: Some(lsp_types::DiagnosticSeverity::Warning), + code: Some(lsp_types::NumberOrString::String(code)), + code_description: None, + // TODO(@kitsonk) this won't make sense for every diagnostic + source: Some("deno-lint".to_string()), + message, + related_information: None, + tags: None, // we should tag unused code + data: None, + }, + }) + .collect() +} + +#[derive(Debug, Default, Clone, PartialEq, Eq)] +pub struct Dependency { + pub is_dynamic: bool, + pub maybe_code: Option, + pub maybe_type: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ResolvedImport { + Resolved(ModuleSpecifier), + Err(String), +} + +pub fn resolve_import( + specifier: &str, + referrer: &ModuleSpecifier, + maybe_import_map: Option>>, +) -> ResolvedImport { + let maybe_mapped = if let Some(import_map) = maybe_import_map { + if let Ok(maybe_specifier) = + import_map.borrow().resolve(specifier, referrer.as_str()) + { + maybe_specifier + } else { + None + } + } else { + None + }; + let remapped = maybe_mapped.is_some(); + let specifier = if let Some(remapped) = maybe_mapped { + remapped + } else { + match ModuleSpecifier::resolve_import(specifier, referrer.as_str()) { + Ok(resolved) => resolved, + Err(err) => return ResolvedImport::Err(err.to_string()), + } + }; + let referrer_scheme = referrer.as_url().scheme(); + let specifier_scheme = specifier.as_url().scheme(); + if referrer_scheme == "https" && specifier_scheme == "http" { + return ResolvedImport::Err( + "Modules imported via https are not allowed to import http modules." + .to_string(), + ); + } + if (referrer_scheme == "https" || referrer_scheme == "http") + && !(specifier_scheme == "https" || specifier_scheme == "http") + && !remapped + { + return ResolvedImport::Err("Remote modules are not allowed to import local modules. Consider using a dynamic import instead.".to_string()); + } + + ResolvedImport::Resolved(specifier) +} + +// TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in +// Module::parse() and should be refactored out to a common function. +pub fn analyze_dependencies( + specifier: &ModuleSpecifier, + source: &str, + media_type: &MediaType, + maybe_import_map: Option>>, +) -> Option<(HashMap, Option)> { + let specifier_str = specifier.to_string(); + let source_map = Rc::new(swc_common::SourceMap::default()); + let mut maybe_type = None; + if let Ok(parsed_module) = + ast::parse_with_source_map(&specifier_str, source, &media_type, source_map) + { + let mut dependencies = HashMap::::new(); + + // Parse leading comments for supported triple slash references. + for comment in parsed_module.get_leading_comments().iter() { + if let Some(ts_reference) = parse_ts_reference(&comment.text) { + match ts_reference { + TypeScriptReference::Path(import) => { + let dep = dependencies.entry(import.clone()).or_default(); + let resolved_import = + resolve_import(&import, specifier, maybe_import_map.clone()); + dep.maybe_code = Some(resolved_import); + } + TypeScriptReference::Types(import) => { + let resolved_import = + resolve_import(&import, specifier, maybe_import_map.clone()); + if media_type == &MediaType::JavaScript + || media_type == &MediaType::JSX + { + maybe_type = Some(resolved_import) + } else { + let dep = dependencies.entry(import).or_default(); + dep.maybe_type = Some(resolved_import); + } + } + } + } + } + + // Parse ES and type only imports + let descriptors = parsed_module.analyze_dependencies(); + for desc in descriptors.into_iter().filter(|desc| { + desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require + }) { + let resolved_import = + resolve_import(&desc.specifier, specifier, maybe_import_map.clone()); + + // Check for `@deno-types` pragmas that effect the import + let maybe_resolved_type_import = + if let Some(comment) = desc.leading_comments.last() { + if let Some(deno_types) = parse_deno_types(&comment.text).as_ref() { + Some(resolve_import( + deno_types, + specifier, + maybe_import_map.clone(), + )) + } else { + None + } + } else { + None + }; + + let dep = dependencies.entry(desc.specifier.to_string()).or_default(); + dep.is_dynamic = desc.is_dynamic; + match desc.kind { + swc_ecmascript::dep_graph::DependencyKind::ExportType + | swc_ecmascript::dep_graph::DependencyKind::ImportType => { + dep.maybe_type = Some(resolved_import) + } + _ => dep.maybe_code = Some(resolved_import), + } + if maybe_resolved_type_import.is_some() && dep.maybe_type.is_none() { + dep.maybe_type = maybe_resolved_type_import; + } + } + + Some((dependencies, maybe_type)) + } else { + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_as_lsp_range() { + let fixture = deno_lint::diagnostic::Range { + start: deno_lint::diagnostic::Position { + line: 1, + col: 2, + byte_pos: 23, + }, + end: deno_lint::diagnostic::Position { + line: 2, + col: 0, + byte_pos: 33, + }, + }; + let actual = as_lsp_range(&fixture); + assert_eq!( + actual, + lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 2, + }, + end: lsp_types::Position { + line: 1, + character: 0, + }, + } + ); + } + + #[test] + fn test_analyze_dependencies() { + let specifier = + ModuleSpecifier::resolve_url("file:///a.ts").expect("bad specifier"); + let source = r#"import { + Application, + Context, + Router, + Status, + } from "https://deno.land/x/oak@v6.3.2/mod.ts"; + + // @deno-types="https://deno.land/x/types/react/index.d.ts"; + import * as React from "https://cdn.skypack.dev/react"; + "#; + let actual = + analyze_dependencies(&specifier, source, &MediaType::TypeScript, None); + assert!(actual.is_some()); + let (actual, maybe_type) = actual.unwrap(); + assert!(maybe_type.is_none()); + assert_eq!(actual.len(), 2); + assert_eq!( + actual.get("https://cdn.skypack.dev/react").cloned(), + Some(Dependency { + is_dynamic: false, + maybe_code: Some(ResolvedImport::Resolved( + ModuleSpecifier::resolve_url("https://cdn.skypack.dev/react") + .unwrap() + )), + maybe_type: Some(ResolvedImport::Resolved( + ModuleSpecifier::resolve_url( + "https://deno.land/x/types/react/index.d.ts" + ) + .unwrap() + )), + }) + ); + assert_eq!( + actual.get("https://deno.land/x/oak@v6.3.2/mod.ts").cloned(), + Some(Dependency { + is_dynamic: false, + maybe_code: Some(ResolvedImport::Resolved( + ModuleSpecifier::resolve_url("https://deno.land/x/oak@v6.3.2/mod.ts") + .unwrap() + )), + maybe_type: None, + }) + ); + } +} diff --git a/cli/lsp/capabilities.rs b/cli/lsp/capabilities.rs new file mode 100644 index 00000000000000..cf8f150cac6e43 --- /dev/null +++ b/cli/lsp/capabilities.rs @@ -0,0 +1,59 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +///! +///! Provides information about what capabilities that are supported by the +///! language server, which helps determine what messages are sent from the +///! client. +///! +use lsp_types::ClientCapabilities; +use lsp_types::HoverProviderCapability; +use lsp_types::OneOf; +use lsp_types::SaveOptions; +use lsp_types::ServerCapabilities; +use lsp_types::TextDocumentSyncCapability; +use lsp_types::TextDocumentSyncKind; +use lsp_types::TextDocumentSyncOptions; + +pub fn server_capabilities( + _client_capabilities: &ClientCapabilities, +) -> ServerCapabilities { + ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Options( + TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::Incremental), + will_save: None, + will_save_wait_until: None, + save: Some(SaveOptions::default().into()), + }, + )), + hover_provider: Some(HoverProviderCapability::Simple(true)), + completion_provider: None, + signature_help_provider: None, + declaration_provider: None, + definition_provider: Some(OneOf::Left(true)), + type_definition_provider: None, + implementation_provider: None, + references_provider: Some(OneOf::Left(true)), + document_highlight_provider: Some(OneOf::Left(true)), + document_symbol_provider: None, + workspace_symbol_provider: None, + code_action_provider: None, + code_lens_provider: None, + document_formatting_provider: Some(OneOf::Left(true)), + document_range_formatting_provider: None, + document_on_type_formatting_provider: None, + selection_range_provider: None, + semantic_highlighting: None, + folding_range_provider: None, + rename_provider: None, + document_link_provider: None, + color_provider: None, + execute_command_provider: None, + workspace: None, + call_hierarchy_provider: None, + semantic_tokens_provider: None, + on_type_rename_provider: None, + experimental: None, + } +} diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs new file mode 100644 index 00000000000000..ebc145708920e5 --- /dev/null +++ b/cli/lsp/config.rs @@ -0,0 +1,49 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +use deno_core::serde::Deserialize; +use deno_core::serde_json; +use deno_core::serde_json::Value; + +#[derive(Debug, Clone, Default)] +pub struct ClientCapabilities { + pub status_notification: bool, +} + +#[derive(Debug, Clone, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WorkspaceSettings { + pub enable: bool, + pub config: Option, + pub import_map: Option, + pub lint: bool, + pub unstable: bool, +} + +#[derive(Debug, Clone, Default)] +pub struct Config { + pub client_capabilities: ClientCapabilities, + pub settings: WorkspaceSettings, +} + +impl Config { + pub fn update(&mut self, value: Value) -> Result<(), AnyError> { + let settings: WorkspaceSettings = serde_json::from_value(value)?; + self.settings = settings; + Ok(()) + } + + #[allow(clippy::redundant_closure_call)] + pub fn update_capabilities( + &mut self, + capabilities: &lsp_types::ClientCapabilities, + ) { + if let Some(experimental) = &capabilities.experimental { + let get_bool = + |k: &str| experimental.get(k).and_then(|it| it.as_bool()) == Some(true); + + self.client_capabilities.status_notification = + get_bool("statusNotification"); + } + } +} diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs new file mode 100644 index 00000000000000..a7f027c1bbd200 --- /dev/null +++ b/cli/lsp/diagnostics.rs @@ -0,0 +1,268 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::analysis::get_lint_references; +use super::analysis::references_to_diagnostics; +use super::memory_cache::FileId; +use super::state::ServerStateSnapshot; +use super::tsc; + +use crate::diagnostics; +use crate::media_type::MediaType; + +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::serde_json::Value; +use deno_core::url::Url; +use deno_core::JsRuntime; +use std::collections::HashMap; +use std::collections::HashSet; +use std::mem; + +impl<'a> From<&'a diagnostics::DiagnosticCategory> + for lsp_types::DiagnosticSeverity +{ + fn from(category: &'a diagnostics::DiagnosticCategory) -> Self { + match category { + diagnostics::DiagnosticCategory::Error => { + lsp_types::DiagnosticSeverity::Error + } + diagnostics::DiagnosticCategory::Warning => { + lsp_types::DiagnosticSeverity::Warning + } + diagnostics::DiagnosticCategory::Suggestion => { + lsp_types::DiagnosticSeverity::Hint + } + diagnostics::DiagnosticCategory::Message => { + lsp_types::DiagnosticSeverity::Information + } + } + } +} + +impl<'a> From<&'a diagnostics::Position> for lsp_types::Position { + fn from(pos: &'a diagnostics::Position) -> Self { + Self { + line: pos.line as u32, + character: pos.character as u32, + } + } +} + +fn to_lsp_range( + start: &diagnostics::Position, + end: &diagnostics::Position, +) -> lsp_types::Range { + lsp_types::Range { + start: start.into(), + end: end.into(), + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub enum DiagnosticSource { + Lint, + TypeScript, +} + +#[derive(Debug, Default, Clone)] +pub struct DiagnosticCollection { + map: HashMap<(FileId, DiagnosticSource), Vec>, + versions: HashMap, + changes: HashSet, +} + +impl DiagnosticCollection { + pub fn set( + &mut self, + file_id: FileId, + source: DiagnosticSource, + version: Option, + diagnostics: Vec, + ) { + self.map.insert((file_id, source), diagnostics); + if let Some(version) = version { + self.versions.insert(file_id, version); + } + self.changes.insert(file_id); + } + + pub fn diagnostics_for( + &self, + file_id: FileId, + source: DiagnosticSource, + ) -> impl Iterator { + self.map.get(&(file_id, source)).into_iter().flatten() + } + + pub fn get_version(&self, file_id: &FileId) -> Option { + self.versions.get(file_id).cloned() + } + + pub fn take_changes(&mut self) -> Option> { + if self.changes.is_empty() { + return None; + } + Some(mem::take(&mut self.changes)) + } +} + +pub type DiagnosticVec = Vec<(FileId, Option, Vec)>; + +pub fn generate_linting_diagnostics( + state: &ServerStateSnapshot, +) -> DiagnosticVec { + if !state.config.settings.lint { + return Vec::new(); + } + let mut diagnostics = Vec::new(); + let file_cache = state.file_cache.read().unwrap(); + for (specifier, doc_data) in state.doc_data.iter() { + let file_id = file_cache.lookup(specifier).unwrap(); + let version = doc_data.version; + let current_version = state.diagnostics.get_version(&file_id); + if version != current_version { + let media_type = MediaType::from(specifier); + if let Ok(source_code) = file_cache.get_contents(file_id) { + if let Ok(references) = + get_lint_references(specifier, &media_type, &source_code) + { + if !references.is_empty() { + diagnostics.push(( + file_id, + version, + references_to_diagnostics(references), + )); + } else { + diagnostics.push((file_id, version, Vec::new())); + } + } + } else { + error!("Missing file contents for: {}", specifier); + } + } + } + + diagnostics +} + +type TsDiagnostics = Vec; + +fn get_diagnostic_message(diagnostic: &diagnostics::Diagnostic) -> String { + if let Some(message) = diagnostic.message_text.clone() { + message + } else if let Some(message_chain) = diagnostic.message_chain.clone() { + message_chain.format_message(0) + } else { + "[missing message]".to_string() + } +} + +fn to_lsp_related_information( + related_information: &Option>, +) -> Option> { + if let Some(related) = related_information { + Some( + related + .iter() + .filter_map(|ri| { + if let (Some(source), Some(start), Some(end)) = + (&ri.source, &ri.start, &ri.end) + { + let uri = Url::parse(&source).unwrap(); + Some(lsp_types::DiagnosticRelatedInformation { + location: lsp_types::Location { + uri, + range: to_lsp_range(start, end), + }, + message: get_diagnostic_message(&ri), + }) + } else { + None + } + }) + .collect(), + ) + } else { + None + } +} + +fn ts_json_to_diagnostics( + value: Value, +) -> Result, AnyError> { + let ts_diagnostics: TsDiagnostics = serde_json::from_value(value)?; + Ok( + ts_diagnostics + .iter() + .filter_map(|d| { + if let (Some(start), Some(end)) = (&d.start, &d.end) { + Some(lsp_types::Diagnostic { + range: to_lsp_range(start, end), + severity: Some((&d.category).into()), + code: Some(lsp_types::NumberOrString::Number(d.code as i32)), + code_description: None, + source: Some("deno-ts".to_string()), + message: get_diagnostic_message(d), + related_information: to_lsp_related_information( + &d.related_information, + ), + tags: match d.code { + // These are codes that indicate the variable is unused. + 6133 | 6192 | 6196 => { + Some(vec![lsp_types::DiagnosticTag::Unnecessary]) + } + _ => None, + }, + data: None, + }) + } else { + None + } + }) + .collect(), + ) +} + +pub fn generate_ts_diagnostics( + state: &ServerStateSnapshot, + runtime: &mut JsRuntime, +) -> Result { + if !state.config.settings.enable { + return Ok(Vec::new()); + } + let mut diagnostics = Vec::new(); + let file_cache = state.file_cache.read().unwrap(); + for (specifier, doc_data) in state.doc_data.iter() { + let file_id = file_cache.lookup(specifier).unwrap(); + let version = doc_data.version; + let current_version = state.diagnostics.get_version(&file_id); + if version != current_version { + // TODO(@kitsonk): consider refactoring to get all diagnostics in one shot + // for a file. + let request_semantic_diagnostics = + tsc::RequestMethod::GetSemanticDiagnostics(specifier.clone()); + let mut ts_diagnostics = ts_json_to_diagnostics(tsc::request( + runtime, + state, + request_semantic_diagnostics, + )?)?; + let request_suggestion_diagnostics = + tsc::RequestMethod::GetSuggestionDiagnostics(specifier.clone()); + ts_diagnostics.append(&mut ts_json_to_diagnostics(tsc::request( + runtime, + state, + request_suggestion_diagnostics, + )?)?); + let request_syntactic_diagnostics = + tsc::RequestMethod::GetSyntacticDiagnostics(specifier.clone()); + ts_diagnostics.append(&mut ts_json_to_diagnostics(tsc::request( + runtime, + state, + request_syntactic_diagnostics, + )?)?); + diagnostics.push((file_id, version, ts_diagnostics)); + } + } + + Ok(diagnostics) +} diff --git a/cli/lsp/dispatch.rs b/cli/lsp/dispatch.rs new file mode 100644 index 00000000000000..774bdcef9bbb09 --- /dev/null +++ b/cli/lsp/dispatch.rs @@ -0,0 +1,185 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::state::ServerState; +use super::state::ServerStateSnapshot; +use super::state::Task; +use super::utils::from_json; +use super::utils::is_canceled; + +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use lsp_server::ErrorCode; +use lsp_server::Notification; +use lsp_server::Request; +use lsp_server::RequestId; +use lsp_server::Response; +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::fmt; +use std::panic; + +pub struct NotificationDispatcher<'a> { + pub notification: Option, + pub server_state: &'a mut ServerState, +} + +impl<'a> NotificationDispatcher<'a> { + pub fn on( + &mut self, + f: fn(&mut ServerState, N::Params) -> Result<(), AnyError>, + ) -> Result<&mut Self, AnyError> + where + N: lsp_types::notification::Notification + 'static, + N::Params: DeserializeOwned + Send + 'static, + { + let notification = match self.notification.take() { + Some(it) => it, + None => return Ok(self), + }; + let params = match notification.extract::(N::METHOD) { + Ok(it) => it, + Err(notification) => { + self.notification = Some(notification); + return Ok(self); + } + }; + f(self.server_state, params)?; + Ok(self) + } + + pub fn finish(&mut self) { + if let Some(notification) = &self.notification { + if !notification.method.starts_with("$/") { + error!("unhandled notification: {:?}", notification); + } + } + } +} + +fn result_to_response( + id: RequestId, + result: Result, +) -> Response +where + R: lsp_types::request::Request + 'static, + R::Params: DeserializeOwned + 'static, + R::Result: Serialize + 'static, +{ + match result { + Ok(response) => Response::new_ok(id, &response), + Err(err) => { + if is_canceled(&*err) { + Response::new_err( + id, + ErrorCode::ContentModified as i32, + "content modified".to_string(), + ) + } else { + Response::new_err(id, ErrorCode::InternalError as i32, err.to_string()) + } + } + } +} + +pub struct RequestDispatcher<'a> { + pub request: Option, + pub server_state: &'a mut ServerState, +} + +impl<'a> RequestDispatcher<'a> { + pub fn finish(&mut self) { + if let Some(request) = self.request.take() { + error!("unknown request: {:?}", request); + let response = Response::new_err( + request.id, + ErrorCode::MethodNotFound as i32, + "unknown request".to_string(), + ); + self.server_state.respond(response); + } + } + + /// Handle a request which will respond to the LSP client asynchronously via + /// a spawned thread. + pub fn on( + &mut self, + f: fn(ServerStateSnapshot, R::Params) -> Result, + ) -> &mut Self + where + R: lsp_types::request::Request + 'static, + R::Params: DeserializeOwned + Send + fmt::Debug + 'static, + R::Result: Serialize + 'static, + { + let (id, params) = match self.parse::() { + Some(it) => it, + None => return self, + }; + self.server_state.spawn({ + let state = self.server_state.snapshot(); + move || { + let result = f(state, params); + Task::Response(result_to_response::(id, result)) + } + }); + + self + } + + /// Handle a request which will respond synchronously, returning a result if + /// the request cannot be handled or has issues. + pub fn on_sync( + &mut self, + f: fn(&mut ServerState, R::Params) -> Result, + ) -> Result<&mut Self, AnyError> + where + R: lsp_types::request::Request + 'static, + R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug + 'static, + R::Result: Serialize + 'static, + { + let (id, params) = match self.parse::() { + Some(it) => it, + None => return Ok(self), + }; + let state = panic::AssertUnwindSafe(&mut *self.server_state); + + let response = panic::catch_unwind(move || { + let result = f(state.0, params); + result_to_response::(id, result) + }) + .map_err(|_err| { + custom_error( + "SyncTaskPanic", + format!("sync task {:?} panicked", R::METHOD), + ) + })?; + self.server_state.respond(response); + Ok(self) + } + + fn parse(&mut self) -> Option<(RequestId, R::Params)> + where + R: lsp_types::request::Request + 'static, + R::Params: DeserializeOwned + 'static, + { + let request = match &self.request { + Some(request) if request.method == R::METHOD => { + self.request.take().unwrap() + } + _ => return None, + }; + + let response = from_json(R::METHOD, request.params); + match response { + Ok(params) => Some((request.id, params)), + Err(err) => { + let response = Response::new_err( + request.id, + ErrorCode::InvalidParams as i32, + err.to_string(), + ); + self.server_state.respond(response); + None + } + } + } +} diff --git a/cli/lsp/handlers.rs b/cli/lsp/handlers.rs new file mode 100644 index 00000000000000..6dd7321c795653 --- /dev/null +++ b/cli/lsp/handlers.rs @@ -0,0 +1,266 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::lsp_extensions; +use super::state::ServerState; +use super::state::ServerStateSnapshot; +use super::text; +use super::tsc; +use super::utils; + +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::ModuleSpecifier; +use dprint_plugin_typescript as dprint; +use lsp_types::DocumentFormattingParams; +use lsp_types::DocumentHighlight; +use lsp_types::DocumentHighlightParams; +use lsp_types::GotoDefinitionParams; +use lsp_types::GotoDefinitionResponse; +use lsp_types::Hover; +use lsp_types::HoverParams; +use lsp_types::Location; +use lsp_types::ReferenceParams; +use lsp_types::TextEdit; +use std::path::PathBuf; + +fn get_line_index( + state: &mut ServerState, + specifier: &ModuleSpecifier, +) -> Result, AnyError> { + let line_index = if specifier.as_url().scheme() == "asset" { + if let Some(source) = tsc::get_asset(specifier.as_url().path()) { + text::index_lines(source) + } else { + return Err(custom_error( + "NotFound", + format!("asset source missing: {}", specifier), + )); + } + } else { + let file_cache = state.file_cache.read().unwrap(); + if let Some(file_id) = file_cache.lookup(specifier) { + let file_text = file_cache.get_contents(file_id)?; + text::index_lines(&file_text) + } else { + let mut sources = state.sources.write().unwrap(); + if let Some(line_index) = sources.get_line_index(specifier) { + line_index + } else { + return Err(custom_error( + "NotFound", + format!("source for specifier not found: {}", specifier), + )); + } + } + }; + Ok(line_index) +} + +pub fn handle_formatting( + state: ServerStateSnapshot, + params: DocumentFormattingParams, +) -> Result>, AnyError> { + let specifier = utils::normalize_url(params.text_document.uri.clone()); + let file_cache = state.file_cache.read().unwrap(); + let file_id = file_cache.lookup(&specifier).unwrap(); + let file_text = file_cache.get_contents(file_id)?; + + let file_path = if let Ok(file_path) = params.text_document.uri.to_file_path() + { + file_path + } else { + PathBuf::from(params.text_document.uri.path()) + }; + let config = dprint::configuration::ConfigurationBuilder::new() + .deno() + .build(); + + // TODO(@kitsonk) this could be handled better in `cli/tools/fmt.rs` in the + // future. + let new_text = dprint::format_text(&file_path, &file_text, &config) + .map_err(|e| custom_error("FormatError", e))?; + + let text_edits = text::get_edits(&file_text, &new_text); + if text_edits.is_empty() { + Ok(None) + } else { + Ok(Some(text_edits)) + } +} + +pub fn handle_document_highlight( + state: &mut ServerState, + params: DocumentHighlightParams, +) -> Result>, AnyError> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + let line_index = get_line_index(state, &specifier)?; + let server_state = state.snapshot(); + let files_to_search = vec![specifier.clone()]; + let maybe_document_highlights: Option> = + serde_json::from_value(tsc::request( + &mut state.ts_runtime, + &server_state, + tsc::RequestMethod::GetDocumentHighlights(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + files_to_search, + )), + )?)?; + + if let Some(document_highlights) = maybe_document_highlights { + Ok(Some( + document_highlights + .into_iter() + .map(|dh| dh.to_highlight(&line_index)) + .flatten() + .collect(), + )) + } else { + Ok(None) + } +} + +pub fn handle_goto_definition( + state: &mut ServerState, + params: GotoDefinitionParams, +) -> Result, AnyError> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + let line_index = get_line_index(state, &specifier)?; + let server_state = state.snapshot(); + let maybe_definition: Option = + serde_json::from_value(tsc::request( + &mut state.ts_runtime, + &server_state, + tsc::RequestMethod::GetDefinition(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + )), + )?)?; + + if let Some(definition) = maybe_definition { + Ok( + definition + .to_definition(&line_index, |s| get_line_index(state, &s).unwrap()), + ) + } else { + Ok(None) + } +} + +pub fn handle_hover( + state: &mut ServerState, + params: HoverParams, +) -> Result, AnyError> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + let line_index = get_line_index(state, &specifier)?; + let server_state = state.snapshot(); + let maybe_quick_info: Option = + serde_json::from_value(tsc::request( + &mut state.ts_runtime, + &server_state, + tsc::RequestMethod::GetQuickInfo(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + )), + )?)?; + + if let Some(quick_info) = maybe_quick_info { + Ok(Some(quick_info.to_hover(&line_index))) + } else { + Ok(None) + } +} + +pub fn handle_references( + state: &mut ServerState, + params: ReferenceParams, +) -> Result>, AnyError> { + let specifier = + utils::normalize_url(params.text_document_position.text_document.uri); + let line_index = get_line_index(state, &specifier)?; + let server_state = state.snapshot(); + let maybe_references: Option> = + serde_json::from_value(tsc::request( + &mut state.ts_runtime, + &server_state, + tsc::RequestMethod::GetReferences(( + specifier, + text::to_char_pos(&line_index, params.text_document_position.position), + )), + )?)?; + + if let Some(references) = maybe_references { + let mut results = Vec::new(); + for reference in references { + if !params.context.include_declaration && reference.is_definition { + continue; + } + let reference_specifier = + ModuleSpecifier::resolve_url(&reference.file_name).unwrap(); + let line_index = get_line_index(state, &reference_specifier)?; + results.push(reference.to_location(&line_index)); + } + + Ok(Some(results)) + } else { + Ok(None) + } +} + +pub fn handle_virtual_text_document( + state: ServerStateSnapshot, + params: lsp_extensions::VirtualTextDocumentParams, +) -> Result { + let specifier = utils::normalize_url(params.text_document.uri); + let url = specifier.as_url(); + let contents = if url.as_str() == "deno:///status.md" { + let file_cache = state.file_cache.read().unwrap(); + format!( + r#"# Deno Language Server Status + +- Documents in memory: {} + +"#, + file_cache.len() + ) + } else { + match url.scheme() { + "asset" => { + if let Some(text) = tsc::get_asset(url.path()) { + text.to_string() + } else { + error!("Missing asset: {}", specifier); + "".to_string() + } + } + _ => { + let mut sources = state.sources.write().unwrap(); + if let Some(text) = sources.get_text(&specifier) { + text + } else { + return Err(custom_error( + "NotFound", + format!("The cached sources was not found: {}", specifier), + )); + } + } + } + }; + Ok(contents) +} diff --git a/cli/lsp/lsp_extensions.rs b/cli/lsp/lsp_extensions.rs new file mode 100644 index 00000000000000..eb0a62464d4977 --- /dev/null +++ b/cli/lsp/lsp_extensions.rs @@ -0,0 +1,26 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +///! +///! Extensions to the language service protocol that are specific to Deno. +///! +use deno_core::serde::Deserialize; +use deno_core::serde::Serialize; +use lsp_types::request::Request; +use lsp_types::TextDocumentIdentifier; + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualTextDocumentParams { + pub text_document: TextDocumentIdentifier, +} + +/// Request a _virtual_ text document from the server. Used for example to +/// provide a status document of the language server which can be viewed in the +/// IDE. +pub enum VirtualTextDocument {} + +impl Request for VirtualTextDocument { + type Params = VirtualTextDocumentParams; + type Result = String; + const METHOD: &'static str = "deno/virtualTextDocument"; +} diff --git a/cli/lsp/memory_cache.rs b/cli/lsp/memory_cache.rs new file mode 100644 index 00000000000000..75c5bdb2511139 --- /dev/null +++ b/cli/lsp/memory_cache.rs @@ -0,0 +1,126 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +use deno_core::ModuleSpecifier; +use std::collections::HashMap; +use std::fmt; +use std::mem; + +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct FileId(pub u32); + +#[derive(Eq, PartialEq, Copy, Clone, Debug)] +pub enum ChangeKind { + Create, + Modify, + Delete, +} + +pub struct ChangedFile { + pub change_kind: ChangeKind, + pub file_id: FileId, +} + +#[derive(Default)] +struct SpecifierInterner { + map: HashMap, + vec: Vec, +} + +impl SpecifierInterner { + pub fn get(&self, specifier: &ModuleSpecifier) -> Option { + self.map.get(specifier).copied() + } + + pub fn intern(&mut self, specifier: ModuleSpecifier) -> FileId { + if let Some(id) = self.get(&specifier) { + return id; + } + let id = FileId(self.vec.len() as u32); + self.map.insert(specifier.clone(), id); + self.vec.push(specifier); + id + } + + pub fn lookup(&self, id: FileId) -> &ModuleSpecifier { + &self.vec[id.0 as usize] + } +} + +#[derive(Default)] +pub struct MemoryCache { + data: Vec>>, + interner: SpecifierInterner, + changes: Vec, +} + +impl MemoryCache { + fn alloc_file_id(&mut self, specifier: ModuleSpecifier) -> FileId { + let file_id = self.interner.intern(specifier); + let idx = file_id.0 as usize; + let len = self.data.len().max(idx + 1); + self.data.resize_with(len, || None); + file_id + } + + fn get(&self, file_id: FileId) -> &Option> { + &self.data[file_id.0 as usize] + } + + pub fn get_contents(&self, file_id: FileId) -> Result { + String::from_utf8(self.get(file_id).as_deref().unwrap().to_vec()) + .map_err(|err| err.into()) + } + + fn get_mut(&mut self, file_id: FileId) -> &mut Option> { + &mut self.data[file_id.0 as usize] + } + + pub fn get_specifier(&self, file_id: FileId) -> &ModuleSpecifier { + self.interner.lookup(file_id) + } + + pub fn len(&self) -> usize { + self.data.len() + } + + pub fn lookup(&self, specifier: &ModuleSpecifier) -> Option { + self + .interner + .get(specifier) + .filter(|&it| self.get(it).is_some()) + } + + pub fn set_contents( + &mut self, + specifier: ModuleSpecifier, + contents: Option>, + ) { + let file_id = self.alloc_file_id(specifier); + let change_kind = match (self.get(file_id), &contents) { + (None, None) => return, + (None, Some(_)) => ChangeKind::Create, + (Some(_), None) => ChangeKind::Delete, + (Some(old), Some(new)) if old == new => return, + (Some(_), Some(_)) => ChangeKind::Modify, + }; + + *self.get_mut(file_id) = contents; + self.changes.push(ChangedFile { + file_id, + change_kind, + }) + } + + pub fn take_changes(&mut self) -> Vec { + mem::take(&mut self.changes) + } +} + +impl fmt::Debug for MemoryCache { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("MemoryCache") + .field("no_files", &self.data.len()) + .finish() + } +} diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs new file mode 100644 index 00000000000000..c26c5d89e08e36 --- /dev/null +++ b/cli/lsp/mod.rs @@ -0,0 +1,415 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +mod analysis; +mod capabilities; +mod config; +mod diagnostics; +mod dispatch; +mod handlers; +mod lsp_extensions; +mod memory_cache; +mod sources; +mod state; +mod text; +mod tsc; +mod utils; + +use config::Config; +use diagnostics::DiagnosticSource; +use dispatch::NotificationDispatcher; +use dispatch::RequestDispatcher; +use state::DocumentData; +use state::Event; +use state::ServerState; +use state::Status; +use state::Task; +use text::apply_content_changes; + +use crate::tsc_config::TsConfig; + +use crossbeam_channel::Receiver; +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::serde_json::json; +use lsp_server::Connection; +use lsp_server::ErrorCode; +use lsp_server::Message; +use lsp_server::Notification; +use lsp_server::Request; +use lsp_server::RequestId; +use lsp_server::Response; +use lsp_types::notification::Notification as _; +use lsp_types::Diagnostic; +use lsp_types::InitializeParams; +use lsp_types::InitializeResult; +use lsp_types::ServerInfo; +use std::env; +use std::time::Instant; + +pub fn start() -> Result<(), AnyError> { + info!("Starting Deno language server..."); + + let (connection, io_threads) = Connection::stdio(); + let (initialize_id, initialize_params) = connection.initialize_start()?; + let initialize_params: InitializeParams = + serde_json::from_value(initialize_params)?; + + let capabilities = + capabilities::server_capabilities(&initialize_params.capabilities); + + let version = format!( + "{} ({}, {})", + crate::version::deno(), + env!("PROFILE"), + env!("TARGET") + ); + + info!(" version: {}", version); + + let initialize_result = InitializeResult { + capabilities, + server_info: Some(ServerInfo { + name: "deno-language-server".to_string(), + version: Some(version), + }), + }; + let initialize_result = serde_json::to_value(initialize_result)?; + + connection.initialize_finish(initialize_id, initialize_result)?; + + if let Some(client_info) = initialize_params.client_info { + info!( + "Connected to \"{}\" {}", + client_info.name, + client_info.version.unwrap_or_default() + ); + } + + let mut config = Config::default(); + if let Some(value) = initialize_params.initialization_options { + config.update(value)?; + } + config.update_capabilities(&initialize_params.capabilities); + + let mut server_state = state::ServerState::new(connection.sender, config); + let state = server_state.snapshot(); + + // TODO(@kitsonk) need to make this configurable, respect unstable + let ts_config = TsConfig::new(json!({ + "allowJs": true, + "experimentalDecorators": true, + "isolatedModules": true, + "lib": ["deno.ns", "deno.window"], + "module": "esnext", + "noEmit": true, + "strict": true, + "target": "esnext", + })); + tsc::request( + &mut server_state.ts_runtime, + &state, + tsc::RequestMethod::Configure(ts_config), + )?; + + // listen for events and run the main loop + server_state.run(connection.receiver)?; + + io_threads.join()?; + info!("Stop language server"); + Ok(()) +} + +impl ServerState { + fn handle_event(&mut self, event: Event) -> Result<(), AnyError> { + let received = Instant::now(); + debug!("handle_event({:?})", event); + + match event { + Event::Message(message) => match message { + Message::Request(request) => self.on_request(request, received)?, + Message::Notification(notification) => { + self.on_notification(notification)? + } + Message::Response(response) => self.complete_request(response), + }, + Event::Task(mut task) => loop { + match task { + Task::Response(response) => self.respond(response), + Task::Diagnostics((source, diagnostics_per_file)) => { + for (file_id, version, diagnostics) in diagnostics_per_file { + self.diagnostics.set( + file_id, + source.clone(), + version, + diagnostics, + ); + } + } + } + + task = match self.task_receiver.try_recv() { + Ok(task) => task, + Err(_) => break, + }; + }, + } + + // process server sent notifications, like diagnostics + // TODO(@kitsonk) currently all of these refresh all open documents, though + // in a lot of cases, like linting, we would only care about the files + // themselves that have changed + if self.process_changes() { + debug!("process changes"); + let state = self.snapshot(); + self.spawn(move || { + let diagnostics = diagnostics::generate_linting_diagnostics(&state); + Task::Diagnostics((DiagnosticSource::Lint, diagnostics)) + }); + // TODO(@kitsonk) isolates do not have Send to be safely sent between + // threads, so I am not sure this is the best way to handle queuing up of + // getting the diagnostics from the isolate. + let state = self.snapshot(); + let diagnostics = + diagnostics::generate_ts_diagnostics(&state, &mut self.ts_runtime)?; + self.spawn(move || { + Task::Diagnostics((DiagnosticSource::TypeScript, diagnostics)) + }); + } + + // process any changes to the diagnostics + if let Some(diagnostic_changes) = self.diagnostics.take_changes() { + debug!("diagnostics have changed"); + let state = self.snapshot(); + for file_id in diagnostic_changes { + let file_cache = state.file_cache.read().unwrap(); + // TODO(@kitsonk) not totally happy with the way we collect and store + // different types of diagnostics and offer them up to the client, we + // do need to send "empty" vectors though when a particular feature is + // disabled, otherwise the client will not clear down previous + // diagnostics + let mut diagnostics: Vec = if state.config.settings.lint { + self + .diagnostics + .diagnostics_for(file_id, DiagnosticSource::Lint) + .cloned() + .collect() + } else { + vec![] + }; + if state.config.settings.enable { + diagnostics.extend( + self + .diagnostics + .diagnostics_for(file_id, DiagnosticSource::TypeScript) + .cloned(), + ); + } + let specifier = file_cache.get_specifier(file_id); + let uri = specifier.as_url().clone(); + let version = if let Some(doc_data) = self.doc_data.get(specifier) { + doc_data.version + } else { + None + }; + self.send_notification::( + lsp_types::PublishDiagnosticsParams { + uri, + diagnostics, + version, + }, + ); + } + } + + Ok(()) + } + + fn on_notification( + &mut self, + notification: Notification, + ) -> Result<(), AnyError> { + NotificationDispatcher { + notification: Some(notification), + server_state: self, + } + // TODO(@kitsonk) this is just stubbed out and we don't currently actually + // cancel in progress work, though most of our work isn't long running + .on::(|state, params| { + let id: RequestId = match params.id { + lsp_types::NumberOrString::Number(id) => id.into(), + lsp_types::NumberOrString::String(id) => id.into(), + }; + state.cancel(id); + Ok(()) + })? + .on::(|state, params| { + if params.text_document.uri.scheme() == "deno" { + // we can ignore virtual text documents opening, as they don't need to + // be tracked in memory, as they are static assets that won't change + // already managed by the language service + return Ok(()); + } + let specifier = utils::normalize_url(params.text_document.uri); + if state + .doc_data + .insert( + specifier.clone(), + DocumentData::new( + specifier.clone(), + params.text_document.version, + ¶ms.text_document.text, + None, + ), + ) + .is_some() + { + error!("duplicate DidOpenTextDocument: {}", specifier); + } + state + .file_cache + .write() + .unwrap() + .set_contents(specifier, Some(params.text_document.text.into_bytes())); + + Ok(()) + })? + .on::(|state, params| { + let specifier = utils::normalize_url(params.text_document.uri); + let mut file_cache = state.file_cache.write().unwrap(); + let file_id = file_cache.lookup(&specifier).unwrap(); + let mut content = file_cache.get_contents(file_id)?; + apply_content_changes(&mut content, params.content_changes); + let doc_data = state.doc_data.get_mut(&specifier).unwrap(); + doc_data.update(params.text_document.version, &content, None); + file_cache.set_contents(specifier, Some(content.into_bytes())); + + Ok(()) + })? + .on::(|state, params| { + if params.text_document.uri.scheme() == "deno" { + // we can ignore virtual text documents opening, as they don't need to + // be tracked in memory, as they are static assets that won't change + // already managed by the language service + return Ok(()); + } + let specifier = utils::normalize_url(params.text_document.uri); + if state.doc_data.remove(&specifier).is_none() { + error!("orphaned document: {}", specifier); + } + // TODO(@kitsonk) should we do garbage collection on the diagnostics? + + Ok(()) + })? + .on::(|_state, _params| { + // nothing to do yet... cleanup things? + + Ok(()) + })? + .on::(|state, _params| { + state.send_request::( + lsp_types::ConfigurationParams { + items: vec![lsp_types::ConfigurationItem { + scope_uri: None, + section: Some("deno".to_string()), + }], + }, + |state, response| { + let Response { error, result, .. } = response; + + match (error, result) { + (Some(err), _) => { + error!("failed to fetch the extension settings: {:?}", err); + } + (None, Some(config)) => { + if let Some(config) = config.get(0) { + if let Err(err) = state.config.update(config.clone()) { + error!("failed to update settings: {}", err); + } + } + } + (None, None) => { + error!("received empty extension settings from the client"); + } + } + }, + ); + + Ok(()) + })? + .finish(); + + Ok(()) + } + + fn on_request( + &mut self, + request: Request, + received: Instant, + ) -> Result<(), AnyError> { + self.register_request(&request, received); + + if self.shutdown_requested { + self.respond(Response::new_err( + request.id, + ErrorCode::InvalidRequest as i32, + "Shutdown already requested".to_string(), + )); + return Ok(()); + } + + if self.status == Status::Loading && request.method != "shutdown" { + self.respond(Response::new_err( + request.id, + ErrorCode::ContentModified as i32, + "Deno Language Server is still loading...".to_string(), + )); + return Ok(()); + } + + RequestDispatcher { + request: Some(request), + server_state: self, + } + .on_sync::(|s, ()| { + s.shutdown_requested = true; + Ok(()) + })? + .on_sync::( + handlers::handle_document_highlight, + )? + .on_sync::( + handlers::handle_goto_definition, + )? + .on_sync::(handlers::handle_hover)? + .on_sync::(handlers::handle_references)? + .on::(handlers::handle_formatting) + .on::( + handlers::handle_virtual_text_document, + ) + .finish(); + + Ok(()) + } + + /// Start consuming events from the provided receiver channel. + pub fn run(mut self, inbox: Receiver) -> Result<(), AnyError> { + // currently we don't need to do any other loading or tasks, so as soon as + // we run we are "ready" + self.transition(Status::Ready); + + while let Some(event) = self.next_event(&inbox) { + if let Event::Message(Message::Notification(notification)) = &event { + if notification.method == lsp_types::notification::Exit::METHOD { + return Ok(()); + } + } + self.handle_event(event)? + } + + Err(custom_error( + "ClientError", + "Client exited without proper shutdown sequence.", + )) + } +} diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs new file mode 100644 index 00000000000000..4f80044a29b0d7 --- /dev/null +++ b/cli/lsp/sources.rs @@ -0,0 +1,372 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::analysis; +use super::text; + +use crate::file_fetcher::get_source_from_bytes; +use crate::file_fetcher::map_content_type; +use crate::http_cache; +use crate::http_cache::HttpCache; +use crate::media_type::MediaType; +use crate::text_encoding; + +use deno_core::serde_json; +use deno_core::ModuleSpecifier; +use std::collections::HashMap; +use std::fs; +use std::path::Path; +use std::path::PathBuf; +use std::time::SystemTime; + +#[derive(Debug, Clone, Default)] +struct Metadata { + dependencies: Option>, + maybe_types: Option, + media_type: MediaType, + source: String, + version: String, +} + +#[derive(Debug, Clone, Default)] +pub struct Sources { + http_cache: HttpCache, + metadata: HashMap, + redirects: HashMap, + remotes: HashMap, +} + +impl Sources { + pub fn new(location: &Path) -> Self { + Self { + http_cache: HttpCache::new(location), + ..Default::default() + } + } + + pub fn contains(&mut self, specifier: &ModuleSpecifier) -> bool { + if let Some(specifier) = self.resolve_specifier(specifier) { + if self.get_metadata(&specifier).is_some() { + return true; + } + } + false + } + + pub fn get_length(&mut self, specifier: &ModuleSpecifier) -> Option { + let specifier = self.resolve_specifier(specifier)?; + let metadata = self.get_metadata(&specifier)?; + Some(metadata.source.chars().count()) + } + + pub fn get_line_index( + &mut self, + specifier: &ModuleSpecifier, + ) -> Option> { + let specifier = self.resolve_specifier(specifier)?; + let metadata = self.get_metadata(&specifier)?; + Some(text::index_lines(&metadata.source)) + } + + pub fn get_media_type( + &mut self, + specifier: &ModuleSpecifier, + ) -> Option { + let specifier = self.resolve_specifier(specifier)?; + let metadata = self.get_metadata(&specifier)?; + Some(metadata.media_type) + } + + fn get_metadata(&mut self, specifier: &ModuleSpecifier) -> Option { + if let Some(metadata) = self.metadata.get(specifier).cloned() { + if let Some(current_version) = self.get_script_version(specifier) { + if metadata.version == current_version { + return Some(metadata); + } + } + } + let version = self.get_script_version(specifier)?; + let path = self.get_path(specifier)?; + if let Ok(bytes) = fs::read(path) { + if specifier.as_url().scheme() == "file" { + let charset = text_encoding::detect_charset(&bytes).to_string(); + if let Ok(source) = get_source_from_bytes(bytes, Some(charset)) { + let media_type = MediaType::from(specifier); + let mut maybe_types = None; + let dependencies = if let Some((dependencies, mt)) = + analysis::analyze_dependencies( + &specifier, + &source, + &media_type, + None, + ) { + maybe_types = mt; + Some(dependencies) + } else { + None + }; + let metadata = Metadata { + dependencies, + maybe_types, + media_type, + source, + version, + }; + self.metadata.insert(specifier.clone(), metadata.clone()); + Some(metadata) + } else { + None + } + } else { + let headers = self.get_remote_headers(specifier)?; + let maybe_content_type = headers.get("content-type").cloned(); + let (media_type, maybe_charset) = + map_content_type(specifier, maybe_content_type); + if let Ok(source) = get_source_from_bytes(bytes, maybe_charset) { + let mut maybe_types = + if let Some(types) = headers.get("x-typescript-types") { + Some(analysis::resolve_import(types, &specifier, None)) + } else { + None + }; + let dependencies = if let Some((dependencies, mt)) = + analysis::analyze_dependencies( + &specifier, + &source, + &media_type, + None, + ) { + if maybe_types.is_none() { + maybe_types = mt; + } + Some(dependencies) + } else { + None + }; + let metadata = Metadata { + dependencies, + maybe_types, + media_type, + source, + version, + }; + self.metadata.insert(specifier.clone(), metadata.clone()); + Some(metadata) + } else { + None + } + } + } else { + None + } + } + + fn get_path(&mut self, specifier: &ModuleSpecifier) -> Option { + let specifier = self.resolve_specifier(specifier)?; + if specifier.as_url().scheme() == "file" { + if let Ok(path) = specifier.as_url().to_file_path() { + Some(path) + } else { + None + } + } else if let Some(path) = self.remotes.get(&specifier) { + Some(path.clone()) + } else { + let path = self.http_cache.get_cache_filename(&specifier.as_url()); + if path.is_file() { + self.remotes.insert(specifier.clone(), path.clone()); + Some(path) + } else { + None + } + } + } + + fn get_remote_headers( + &self, + specifier: &ModuleSpecifier, + ) -> Option> { + let cache_filename = self.http_cache.get_cache_filename(specifier.as_url()); + let metadata_path = http_cache::Metadata::filename(&cache_filename); + if let Ok(metadata) = fs::read_to_string(metadata_path) { + if let Ok(metadata) = + serde_json::from_str::<'_, http_cache::Metadata>(&metadata) + { + return Some(metadata.headers); + } + } + None + } + + pub fn get_script_version( + &mut self, + specifier: &ModuleSpecifier, + ) -> Option { + if let Some(path) = self.get_path(specifier) { + if let Ok(metadata) = fs::metadata(path) { + if let Ok(modified) = metadata.modified() { + return if let Ok(n) = modified.duration_since(SystemTime::UNIX_EPOCH) + { + Some(format!("{}", n.as_millis())) + } else { + Some("1".to_string()) + }; + } else { + return Some("1".to_string()); + } + } + } + None + } + + pub fn get_text(&mut self, specifier: &ModuleSpecifier) -> Option { + let specifier = self.resolve_specifier(specifier)?; + let metadata = self.get_metadata(&specifier)?; + Some(metadata.source) + } + + fn resolution_result( + &mut self, + resolved_specifier: &ModuleSpecifier, + ) -> Option<(ModuleSpecifier, MediaType)> { + let resolved_specifier = self.resolve_specifier(resolved_specifier)?; + let media_type = + if let Some(metadata) = self.metadata.get(&resolved_specifier) { + metadata.media_type + } else { + MediaType::from(&resolved_specifier) + }; + Some((resolved_specifier, media_type)) + } + + pub fn resolve_import( + &mut self, + specifier: &str, + referrer: &ModuleSpecifier, + ) -> Option<(ModuleSpecifier, MediaType)> { + let referrer = self.resolve_specifier(referrer)?; + let metadata = self.get_metadata(&referrer)?; + let dependencies = &metadata.dependencies?; + let dependency = dependencies.get(specifier)?; + if let Some(type_dependency) = &dependency.maybe_type { + if let analysis::ResolvedImport::Resolved(resolved_specifier) = + type_dependency + { + self.resolution_result(resolved_specifier) + } else { + None + } + } else { + let code_dependency = &dependency.maybe_code.clone()?; + if let analysis::ResolvedImport::Resolved(resolved_specifier) = + code_dependency + { + self.resolution_result(resolved_specifier) + } else { + None + } + } + } + + fn resolve_specifier( + &mut self, + specifier: &ModuleSpecifier, + ) -> Option { + if specifier.as_url().scheme() == "file" { + if let Ok(path) = specifier.as_url().to_file_path() { + if path.is_file() { + return Some(specifier.clone()); + } + } + } else { + if let Some(specifier) = self.redirects.get(specifier) { + return Some(specifier.clone()); + } + if let Some(redirect) = self.resolve_remote_specifier(specifier, 10) { + self.redirects.insert(specifier.clone(), redirect.clone()); + return Some(redirect); + } + } + None + } + + fn resolve_remote_specifier( + &self, + specifier: &ModuleSpecifier, + redirect_limit: isize, + ) -> Option { + let cached_filename = + self.http_cache.get_cache_filename(specifier.as_url()); + if redirect_limit >= 0 && cached_filename.is_file() { + if let Some(headers) = self.get_remote_headers(specifier) { + if let Some(redirect_to) = headers.get("location") { + if let Ok(redirect) = + ModuleSpecifier::resolve_import(redirect_to, specifier.as_str()) + { + return self + .resolve_remote_specifier(&redirect, redirect_limit - 1); + } + } else { + return Some(specifier.clone()); + } + } + } + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::env; + use tempfile::TempDir; + + fn setup() -> (Sources, PathBuf) { + let temp_dir = TempDir::new().expect("could not create temp dir"); + let location = temp_dir.path().join("deps"); + let sources = Sources::new(&location); + (sources, location) + } + + #[test] + fn test_sources_get_script_version() { + let (mut sources, _) = setup(); + let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let tests = c.join("tests"); + let specifier = ModuleSpecifier::resolve_path( + &tests.join("001_hello.js").to_string_lossy(), + ) + .unwrap(); + let actual = sources.get_script_version(&specifier); + assert!(actual.is_some()); + } + + #[test] + fn test_sources_get_text() { + let (mut sources, _) = setup(); + let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let tests = c.join("tests"); + let specifier = ModuleSpecifier::resolve_path( + &tests.join("001_hello.js").to_string_lossy(), + ) + .unwrap(); + let actual = sources.get_text(&specifier); + assert!(actual.is_some()); + let actual = actual.unwrap(); + assert_eq!(actual, "console.log(\"Hello World\");\n"); + } + + #[test] + fn test_sources_get_length() { + let (mut sources, _) = setup(); + let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let tests = c.join("tests"); + let specifier = ModuleSpecifier::resolve_path( + &tests.join("001_hello.js").to_string_lossy(), + ) + .unwrap(); + let actual = sources.get_length(&specifier); + assert!(actual.is_some()); + let actual = actual.unwrap(); + assert_eq!(actual, 28); + } +} diff --git a/cli/lsp/state.rs b/cli/lsp/state.rs new file mode 100644 index 00000000000000..18a1e4023dfe9c --- /dev/null +++ b/cli/lsp/state.rs @@ -0,0 +1,292 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::analysis; +use super::config::Config; +use super::diagnostics::DiagnosticCollection; +use super::diagnostics::DiagnosticSource; +use super::diagnostics::DiagnosticVec; +use super::memory_cache::MemoryCache; +use super::sources::Sources; +use super::tsc; +use super::utils::notification_is; + +use crate::deno_dir; +use crate::import_map::ImportMap; +use crate::media_type::MediaType; + +use crossbeam_channel::select; +use crossbeam_channel::unbounded; +use crossbeam_channel::Receiver; +use crossbeam_channel::Sender; +use deno_core::JsRuntime; +use deno_core::ModuleSpecifier; +use lsp_server::Message; +use lsp_server::Notification; +use lsp_server::Request; +use lsp_server::RequestId; +use lsp_server::Response; +use std::cell::RefCell; +use std::collections::HashMap; +use std::env; +use std::fmt; +use std::rc::Rc; +use std::sync::Arc; +use std::sync::RwLock; +use std::time::Instant; + +type ReqHandler = fn(&mut ServerState, Response); +type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>; + +pub enum Event { + Message(Message), + Task(Task), +} + +impl fmt::Debug for Event { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let debug_verbose_not = + |notification: &Notification, f: &mut fmt::Formatter| { + f.debug_struct("Notification") + .field("method", ¬ification.method) + .finish() + }; + + match self { + Event::Message(Message::Notification(notification)) => { + if notification_is::( + notification, + ) || notification_is::( + notification, + ) { + return debug_verbose_not(notification, f); + } + } + Event::Task(Task::Response(response)) => { + return f + .debug_struct("Response") + .field("id", &response.id) + .field("error", &response.error) + .finish(); + } + _ => (), + } + match self { + Event::Message(it) => fmt::Debug::fmt(it, f), + Event::Task(it) => fmt::Debug::fmt(it, f), + } + } +} + +#[derive(Eq, PartialEq, Copy, Clone)] +pub enum Status { + Loading, + Ready, +} + +impl Default for Status { + fn default() -> Self { + Status::Loading + } +} + +#[derive(Debug)] +pub enum Task { + Diagnostics((DiagnosticSource, DiagnosticVec)), + Response(Response), +} + +#[derive(Debug, Clone)] +pub struct DocumentData { + pub dependencies: Option>, + pub version: Option, + specifier: ModuleSpecifier, +} + +impl DocumentData { + pub fn new( + specifier: ModuleSpecifier, + version: i32, + source: &str, + maybe_import_map: Option>>, + ) -> Self { + let dependencies = if let Some((dependencies, _)) = + analysis::analyze_dependencies( + &specifier, + source, + &MediaType::from(&specifier), + maybe_import_map, + ) { + Some(dependencies) + } else { + None + }; + Self { + dependencies, + version: Some(version), + specifier, + } + } + + pub fn update( + &mut self, + version: i32, + source: &str, + maybe_import_map: Option>>, + ) { + self.dependencies = if let Some((dependencies, _)) = + analysis::analyze_dependencies( + &self.specifier, + source, + &MediaType::from(&self.specifier), + maybe_import_map, + ) { + Some(dependencies) + } else { + None + }; + self.version = Some(version) + } +} + +/// An immutable snapshot of the server state at a point in time. +#[derive(Debug, Clone, Default)] +pub struct ServerStateSnapshot { + pub config: Config, + pub diagnostics: DiagnosticCollection, + pub doc_data: HashMap, + pub file_cache: Arc>, + pub sources: Arc>, +} + +pub struct ServerState { + pub config: Config, + pub diagnostics: DiagnosticCollection, + pub doc_data: HashMap, + pub file_cache: Arc>, + req_queue: ReqQueue, + sender: Sender, + pub sources: Arc>, + pub shutdown_requested: bool, + pub status: Status, + task_sender: Sender, + pub task_receiver: Receiver, + pub ts_runtime: JsRuntime, +} + +impl ServerState { + pub fn new(sender: Sender, config: Config) -> Self { + let (task_sender, task_receiver) = unbounded(); + let custom_root = env::var("DENO_DIR").map(String::into).ok(); + let dir = + deno_dir::DenoDir::new(custom_root).expect("could not access DENO_DIR"); + let location = dir.root.join("deps"); + let sources = Sources::new(&location); + // TODO(@kitsonk) we need to allow displaying diagnostics here, but the + // current compiler snapshot sends them to stdio which would totally break + // the language server... + let ts_runtime = tsc::start(false).expect("could not start tsc"); + + Self { + config, + diagnostics: Default::default(), + doc_data: HashMap::new(), + file_cache: Arc::new(RwLock::new(Default::default())), + req_queue: Default::default(), + sender, + sources: Arc::new(RwLock::new(sources)), + shutdown_requested: false, + status: Default::default(), + task_receiver, + task_sender, + ts_runtime, + } + } + + pub fn cancel(&mut self, request_id: RequestId) { + if let Some(response) = self.req_queue.incoming.cancel(request_id) { + self.send(response.into()); + } + } + + pub fn complete_request(&mut self, response: Response) { + let handler = self.req_queue.outgoing.complete(response.id.clone()); + handler(self, response) + } + + pub fn next_event(&self, inbox: &Receiver) -> Option { + select! { + recv(inbox) -> msg => msg.ok().map(Event::Message), + recv(self.task_receiver) -> task => Some(Event::Task(task.unwrap())), + } + } + + /// Handle any changes and return a `bool` that indicates if there were + /// important changes to the state. + pub fn process_changes(&mut self) -> bool { + let mut file_cache = self.file_cache.write().unwrap(); + let changed_files = file_cache.take_changes(); + // other processing of changed files should be done here as needed + !changed_files.is_empty() + } + + pub fn register_request(&mut self, request: &Request, received: Instant) { + self + .req_queue + .incoming + .register(request.id.clone(), (request.method.clone(), received)); + } + + pub fn respond(&mut self, response: Response) { + if let Some((_, _)) = self.req_queue.incoming.complete(response.id.clone()) + { + self.send(response.into()); + } + } + + fn send(&mut self, message: Message) { + self.sender.send(message).unwrap() + } + + pub fn send_notification( + &mut self, + params: N::Params, + ) { + let notification = Notification::new(N::METHOD.to_string(), params); + self.send(notification.into()); + } + + pub fn send_request( + &mut self, + params: R::Params, + handler: ReqHandler, + ) { + let request = + self + .req_queue + .outgoing + .register(R::METHOD.to_string(), params, handler); + self.send(request.into()); + } + + pub fn snapshot(&self) -> ServerStateSnapshot { + ServerStateSnapshot { + config: self.config.clone(), + diagnostics: self.diagnostics.clone(), + doc_data: self.doc_data.clone(), + file_cache: Arc::clone(&self.file_cache), + sources: Arc::clone(&self.sources), + } + } + + pub fn spawn(&mut self, task: F) + where + F: FnOnce() -> Task + Send + 'static, + { + let sender = self.task_sender.clone(); + tokio::task::spawn_blocking(move || sender.send(task()).unwrap()); + } + + pub fn transition(&mut self, new_status: Status) { + self.status = new_status; + } +} diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs new file mode 100644 index 00000000000000..5bca534c1bc7d1 --- /dev/null +++ b/cli/lsp/text.rs @@ -0,0 +1,514 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::serde_json::json; +use deno_core::serde_json::Value; +use dissimilar::diff; +use dissimilar::Chunk; +use lsp_types::TextEdit; +use std::ops::Bound; +use std::ops::Range; +use std::ops::RangeBounds; + +// TODO(@kitson) in general all of these text handling routines don't handle +// JavaScript encoding in the same way and likely cause issues when trying to +// arbitrate between chars and Unicode graphemes. There be dragons. + +/// Generate a character position for the start of each line. For example: +/// +/// ```rust +/// let actual = index_lines("a\nb\n"); +/// assert_eq!(actual, vec![0, 2, 4]); +/// ``` +/// +pub fn index_lines(text: &str) -> Vec { + let mut indexes = vec![0_u32]; + for (i, c) in text.chars().enumerate() { + if c == '\n' { + indexes.push((i + 1) as u32); + } + } + indexes +} + +enum IndexValid { + All, + UpTo(u32), +} + +impl IndexValid { + fn covers(&self, line: u32) -> bool { + match *self { + IndexValid::UpTo(to) => to > line, + IndexValid::All => true, + } + } +} + +fn to_range(line_index: &[u32], range: lsp_types::Range) -> Range { + let start = + (line_index[range.start.line as usize] + range.start.character) as usize; + let end = + (line_index[range.end.line as usize] + range.end.character) as usize; + Range { start, end } +} + +pub fn to_position(line_index: &[u32], char_pos: u32) -> lsp_types::Position { + let mut line = 0_usize; + let mut line_start = 0_u32; + for (pos, v) in line_index.iter().enumerate() { + if char_pos < *v { + break; + } + line_start = *v; + line = pos; + } + + lsp_types::Position { + line: line as u32, + character: char_pos - line_start, + } +} + +pub fn to_char_pos(line_index: &[u32], position: lsp_types::Position) -> u32 { + if let Some(line_start) = line_index.get(position.line as usize) { + line_start + position.character + } else { + 0_u32 + } +} + +/// Apply a vector of document changes to the supplied string. +pub fn apply_content_changes( + content: &mut String, + content_changes: Vec, +) { + let mut line_index = index_lines(&content); + let mut index_valid = IndexValid::All; + for change in content_changes { + if let Some(range) = change.range { + if !index_valid.covers(range.start.line) { + line_index = index_lines(&content); + } + let range = to_range(&line_index, range); + content.replace_range(range, &change.text); + } else { + *content = change.text; + index_valid = IndexValid::UpTo(0); + } + } +} + +/// Compare two strings and return a vector of text edit records which are +/// supported by the Language Server Protocol. +pub fn get_edits(a: &str, b: &str) -> Vec { + let chunks = diff(a, b); + let mut text_edits = Vec::::new(); + let line_index = index_lines(a); + let mut iter = chunks.iter().peekable(); + let mut a_pos = 0_u32; + loop { + let chunk = iter.next(); + match chunk { + None => break, + Some(Chunk::Equal(e)) => { + a_pos += e.chars().count() as u32; + } + Some(Chunk::Delete(d)) => { + let start = to_position(&line_index, a_pos); + a_pos += d.chars().count() as u32; + let end = to_position(&line_index, a_pos); + let range = lsp_types::Range { start, end }; + match iter.peek() { + Some(Chunk::Insert(i)) => { + iter.next(); + text_edits.push(TextEdit { + range, + new_text: i.to_string(), + }); + } + _ => text_edits.push(TextEdit { + range, + new_text: "".to_string(), + }), + } + } + Some(Chunk::Insert(i)) => { + let pos = to_position(&line_index, a_pos); + let range = lsp_types::Range { + start: pos, + end: pos, + }; + text_edits.push(TextEdit { + range, + new_text: i.to_string(), + }); + } + } + } + + text_edits +} + +/// Convert a difference between two strings into a change range used by the +/// TypeScript Language Service. +pub fn get_range_change(a: &str, b: &str) -> Value { + let chunks = diff(a, b); + let mut iter = chunks.iter().peekable(); + let mut started = false; + let mut start = 0; + let mut end = 0; + let mut new_length = 0; + let mut equal = 0; + let mut a_pos = 0; + loop { + let chunk = iter.next(); + match chunk { + None => break, + Some(Chunk::Equal(e)) => { + a_pos += e.chars().count(); + equal += e.chars().count(); + } + Some(Chunk::Delete(d)) => { + if !started { + start = a_pos; + started = true; + equal = 0; + } + a_pos += d.chars().count(); + if started { + end = a_pos; + new_length += equal; + equal = 0; + } + } + Some(Chunk::Insert(i)) => { + if !started { + start = a_pos; + end = a_pos; + started = true; + equal = 0; + } else { + end += equal; + } + new_length += i.chars().count() + equal; + equal = 0; + } + } + } + + json!({ + "span": { + "start": start, + "length": end - start, + }, + "newLength": new_length, + }) +} + +/// Provide a slice of a string based on a character range. +pub fn slice(s: &str, range: impl RangeBounds) -> &str { + let start = match range.start_bound() { + Bound::Included(bound) | Bound::Excluded(bound) => *bound, + Bound::Unbounded => 0, + }; + let len = match range.end_bound() { + Bound::Included(bound) => *bound + 1, + Bound::Excluded(bound) => *bound, + Bound::Unbounded => s.len(), + } - start; + substring(s, start, start + len) +} + +/// Provide a substring based on the start and end character index positions. +pub fn substring(s: &str, start: usize, end: usize) -> &str { + let len = end - start; + let mut char_pos = 0; + let mut byte_start = 0; + let mut it = s.chars(); + loop { + if char_pos == start { + break; + } + if let Some(c) = it.next() { + char_pos += 1; + byte_start += c.len_utf8(); + } else { + break; + } + } + char_pos = 0; + let mut byte_end = byte_start; + loop { + if char_pos == len { + break; + } + if let Some(c) = it.next() { + char_pos += 1; + byte_end += c.len_utf8(); + } else { + break; + } + } + &s[byte_start..byte_end] +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_apply_content_changes() { + let mut content = "a\nb\nc\nd".to_string(); + let content_changes = vec![lsp_types::TextDocumentContentChangeEvent { + range: Some(lsp_types::Range { + start: lsp_types::Position { + line: 1, + character: 0, + }, + end: lsp_types::Position { + line: 1, + character: 1, + }, + }), + range_length: Some(1), + text: "e".to_string(), + }]; + apply_content_changes(&mut content, content_changes); + assert_eq!(content, "a\ne\nc\nd"); + } + + #[test] + fn test_get_edits() { + let a = "abcdefg"; + let b = "a\nb\nchije\nfg\n"; + let actual = get_edits(a, b); + assert_eq!( + actual, + vec![ + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 1 + }, + end: lsp_types::Position { + line: 0, + character: 5 + } + }, + new_text: "\nb\nchije\n".to_string() + }, + TextEdit { + range: lsp_types::Range { + start: lsp_types::Position { + line: 0, + character: 7 + }, + end: lsp_types::Position { + line: 0, + character: 7 + } + }, + new_text: "\n".to_string() + }, + ] + ); + } + + #[test] + fn test_get_range_change() { + let a = "abcdefg"; + let b = "abedcfg"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span": { + "start": 2, + "length": 3, + }, + "newLength": 3 + }) + ); + + let a = "abfg"; + let b = "abcdefg"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span": { + "start": 2, + "length": 0, + }, + "newLength": 3 + }) + ); + + let a = "abcdefg"; + let b = "abfg"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span": { + "start": 2, + "length": 3, + }, + "newLength": 0 + }) + ); + + let a = "abcdefg"; + let b = "abfghij"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span": { + "start": 2, + "length": 5, + }, + "newLength": 5 + }) + ); + + let a = "abcdefghijk"; + let b = "axcxexfxixk"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span": { + "start": 1, + "length": 9, + }, + "newLength": 9 + }) + ); + + let a = "abcde"; + let b = "ab(c)de"; + let actual = get_range_change(a, b); + assert_eq!( + actual, + json!({ + "span" : { + "start": 2, + "length": 1, + }, + "newLength": 3 + }) + ); + } + + #[test] + fn test_index_lines() { + let actual = index_lines("a\nb\r\nc"); + assert_eq!(actual, vec![0, 2, 5]); + } + + #[test] + fn test_to_position() { + let line_index = index_lines("a\nb\r\nc\n"); + assert_eq!( + to_position(&line_index, 6), + lsp_types::Position { + line: 2, + character: 1, + } + ); + assert_eq!( + to_position(&line_index, 0), + lsp_types::Position { + line: 0, + character: 0, + } + ); + assert_eq!( + to_position(&line_index, 3), + lsp_types::Position { + line: 1, + character: 1, + } + ); + } + + #[test] + fn test_to_position_mbc() { + let line_index = index_lines("y̆\n😱🦕\n🤯\n"); + assert_eq!( + to_position(&line_index, 0), + lsp_types::Position { + line: 0, + character: 0, + } + ); + assert_eq!( + to_position(&line_index, 2), + lsp_types::Position { + line: 0, + character: 2, + } + ); + assert_eq!( + to_position(&line_index, 3), + lsp_types::Position { + line: 1, + character: 0, + } + ); + assert_eq!( + to_position(&line_index, 4), + lsp_types::Position { + line: 1, + character: 1, + } + ); + assert_eq!( + to_position(&line_index, 5), + lsp_types::Position { + line: 1, + character: 2, + } + ); + assert_eq!( + to_position(&line_index, 6), + lsp_types::Position { + line: 2, + character: 0, + } + ); + assert_eq!( + to_position(&line_index, 7), + lsp_types::Position { + line: 2, + character: 1, + } + ); + assert_eq!( + to_position(&line_index, 8), + lsp_types::Position { + line: 3, + character: 0, + } + ); + } + + #[test] + fn test_substring() { + assert_eq!(substring("Deno", 1, 3), "en"); + assert_eq!(substring("y̆y̆", 2, 4), "y̆"); + // this doesn't work like JavaScript, as 🦕 is treated as a single char in + // Rust, but as two chars in JavaScript. + // assert_eq!(substring("🦕🦕", 2, 4), "🦕"); + } + + #[test] + fn test_slice() { + assert_eq!(slice("Deno", 1..3), "en"); + assert_eq!(slice("Deno", 1..=3), "eno"); + assert_eq!(slice("Deno Land", 1..), "eno Land"); + assert_eq!(slice("Deno", ..3), "Den"); + } +} diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs new file mode 100644 index 00000000000000..65f6ebbdb39957 --- /dev/null +++ b/cli/lsp/tsc.rs @@ -0,0 +1,1210 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use super::analysis::ResolvedImport; +use super::state::ServerStateSnapshot; +use super::text; +use super::utils; + +use crate::js; +use crate::media_type::MediaType; +use crate::tsc::ResolveArgs; +use crate::tsc_config::TsConfig; + +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::json_op_sync; +use deno_core::serde::Deserialize; +use deno_core::serde_json; +use deno_core::serde_json::json; +use deno_core::serde_json::Value; +use deno_core::JsRuntime; +use deno_core::ModuleSpecifier; +use deno_core::OpFn; +use deno_core::RuntimeOptions; +use regex::Captures; +use regex::Regex; +use std::borrow::Cow; +use std::collections::HashMap; + +/// Provide static assets for the language server. +/// +/// TODO(@kitsonk) this should be DRY'ed up with `cli/tsc.rs` and the +/// `cli/build.rs` +pub fn get_asset(asset: &str) -> Option<&'static str> { + macro_rules! inc { + ($e:expr) => { + Some(include_str!(concat!("../dts/", $e))) + }; + } + match asset { + // These are not included in the snapshot + "/lib.dom.d.ts" => inc!("lib.dom.d.ts"), + "/lib.dom.iterable.d.ts" => inc!("lib.dom.iterable.d.ts"), + "/lib.es6.d.ts" => inc!("lib.es6.d.ts"), + "/lib.es2016.full.d.ts" => inc!("lib.es2016.full.d.ts"), + "/lib.es2017.full.d.ts" => inc!("lib.es2017.full.d.ts"), + "/lib.es2018.full.d.ts" => inc!("lib.es2018.full.d.ts"), + "/lib.es2019.full.d.ts" => inc!("lib.es2019.full.d.ts"), + "/lib.es2020.full.d.ts" => inc!("lib.es2020.full.d.ts"), + "/lib.esnext.full.d.ts" => inc!("lib.esnext.full.d.ts"), + "/lib.scripthost.d.ts" => inc!("lib.scripthost.d.ts"), + "/lib.webworker.d.ts" => inc!("lib.webworker.d.ts"), + "/lib.webworker.importscripts.d.ts" => { + inc!("lib.webworker.importscripts.d.ts") + } + "/lib.webworker.iterable.d.ts" => inc!("lib.webworker.iterable.d.ts"), + // These come from op crates + // TODO(@kitsonk) these is even hackier than the rest of this... + "/lib.deno.web.d.ts" => { + Some(include_str!("../../op_crates/web/lib.deno_web.d.ts")) + } + "/lib.deno.fetch.d.ts" => { + Some(include_str!("../../op_crates/fetch/lib.deno_fetch.d.ts")) + } + // These are included in the snapshot for TypeScript, and could be retrieved + // from there? + "/lib.d.ts" => inc!("lib.d.ts"), + "/lib.deno.ns.d.ts" => inc!("lib.deno.ns.d.ts"), + "/lib.deno.shared_globals.d.ts" => inc!("lib.deno.shared_globals.d.ts"), + "/lib.deno.unstable.d.ts" => inc!("lib.deno.unstable.d.ts"), + "/lib.deno.window.d.ts" => inc!("lib.deno.window.d.ts"), + "/lib.deno.worker.d.ts" => inc!("lib.deno.worker.d.ts"), + "/lib.es5.d.ts" => inc!("lib.es5.d.ts"), + "/lib.es2015.collection.d.ts" => inc!("lib.es2015.collection.d.ts"), + "/lib.es2015.core.d.ts" => inc!("lib.es2015.core.d.ts"), + "/lib.es2015.d.ts" => inc!("lib.es2015.d.ts"), + "/lib.es2015.generator.d.ts" => inc!("lib.es2015.generator.d.ts"), + "/lib.es2015.iterable.d.ts" => inc!("lib.es2015.iterable.d.ts"), + "/lib.es2015.promise.d.ts" => inc!("lib.es2015.promise.d.ts"), + "/lib.es2015.proxy.d.ts" => inc!("lib.es2015.proxy.d.ts"), + "/lib.es2015.reflect.d.ts" => inc!("lib.es2015.reflect.d.ts"), + "/lib.es2015.symbol.d.ts" => inc!("lib.es2015.symbol.d.ts"), + "/lib.es2015.symbol.wellknown.d.ts" => { + inc!("lib.es2015.symbol.wellknown.d.ts") + } + "/lib.es2016.array.include.d.ts" => inc!("lib.es2016.array.include.d.ts"), + "/lib.es2016.d.ts" => inc!("lib.es2016.d.ts"), + "/lib.es2017.d.ts" => inc!("lib.es2017.d.ts"), + "/lib.es2017.intl.d.ts" => inc!("lib.es2017.intl.d.ts"), + "/lib.es2017.object.d.ts" => inc!("lib.es2017.object.d.ts"), + "/lib.es2017.sharedmemory.d.ts" => inc!("lib.es2017.sharedmemory.d.ts"), + "/lib.es2017.string.d.ts" => inc!("lib.es2017.string.d.ts"), + "/lib.es2017.typedarrays.d.ts" => inc!("lib.es2017.typedarrays.d.ts"), + "/lib.es2018.asyncgenerator.d.ts" => inc!("lib.es2018.asyncgenerator.d.ts"), + "/lib.es2018.asynciterable.d.ts" => inc!("lib.es2018.asynciterable.d.ts"), + "/lib.es2018.d.ts" => inc!("lib.es2018.d.ts"), + "/lib.es2018.intl.d.ts" => inc!("lib.es2018.intl.d.ts"), + "/lib.es2018.promise.d.ts" => inc!("lib.es2018.promise.d.ts"), + "/lib.es2018.regexp.d.ts" => inc!("lib.es2018.regexp.d.ts"), + "/lib.es2019.array.d.ts" => inc!("lib.es2019.array.d.ts"), + "/lib.es2019.d.ts" => inc!("lib.es2019.d.ts"), + "/lib.es2019.object.d.ts" => inc!("lib.es2019.object.d.ts"), + "/lib.es2019.string.d.ts" => inc!("lib.es2019.string.d.ts"), + "/lib.es2019.symbol.d.ts" => inc!("lib.es2019.symbol.d.ts"), + "/lib.es2020.bigint.d.ts" => inc!("lib.es2020.bigint.d.ts"), + "/lib.es2020.d.ts" => inc!("lib.es2020.d.ts"), + "/lib.es2020.intl.d.ts" => inc!("lib.es2020.intl.d.ts"), + "/lib.es2020.promise.d.ts" => inc!("lib.es2020.promise.d.ts"), + "/lib.es2020.sharedmemory.d.ts" => inc!("lib.es2020.sharedmemory.d.ts"), + "/lib.es2020.string.d.ts" => inc!("lib.es2020.string.d.ts"), + "/lib.es2020.symbol.wellknown.d.ts" => { + inc!("lib.es2020.symbol.wellknown.d.ts") + } + "/lib.esnext.d.ts" => inc!("lib.esnext.d.ts"), + "/lib.esnext.intl.d.ts" => inc!("lib.esnext.intl.d.ts"), + "/lib.esnext.promise.d.ts" => inc!("lib.esnext.promise.d.ts"), + "/lib.esnext.string.d.ts" => inc!("lib.esnext.string.d.ts"), + "/lib.esnext.weakref.d.ts" => inc!("lib.esnext.weakref.d.ts"), + _ => None, + } +} + +fn display_parts_to_string( + maybe_parts: Option>, +) -> Option { + maybe_parts.map(|parts| { + parts + .into_iter() + .map(|p| p.text) + .collect::>() + .join("") + }) +} + +fn get_tag_body_text(tag: &JSDocTagInfo) -> Option { + tag.text.as_ref().map(|text| match tag.name.as_str() { + "example" => { + let caption_regex = + Regex::new(r"(.*?)\s*\r?\n((?:\s|\S)*)").unwrap(); + if caption_regex.is_match(&text) { + caption_regex + .replace(text, |c: &Captures| { + format!("{}\n\n{}", &c[1], make_codeblock(&c[2])) + }) + .to_string() + } else { + make_codeblock(text) + } + } + "author" => { + let email_match_regex = Regex::new(r"(.+)\s<([-.\w]+@[-.\w]+)>").unwrap(); + email_match_regex + .replace(text, |c: &Captures| format!("{} {}", &c[1], &c[2])) + .to_string() + } + "default" => make_codeblock(text), + _ => replace_links(text), + }) +} + +fn get_tag_documentation(tag: &JSDocTagInfo) -> String { + match tag.name.as_str() { + "augments" | "extends" | "param" | "template" => { + if let Some(text) = &tag.text { + let part_regex = Regex::new(r"^(\S+)\s*-?\s*").unwrap(); + let body: Vec<&str> = part_regex.split(&text).collect(); + if body.len() == 3 { + let param = body[1]; + let doc = body[2]; + let label = format!("*@{}* `{}`", tag.name, param); + if doc.is_empty() { + return label; + } + if doc.contains('\n') { + return format!("{} \n{}", label, replace_links(doc)); + } else { + return format!("{} - {}", label, replace_links(doc)); + } + } + } + } + _ => (), + } + let label = format!("*@{}*", tag.name); + let maybe_text = get_tag_body_text(tag); + if let Some(text) = maybe_text { + if text.contains('\n') { + format!("{} \n{}", label, text) + } else { + format!("{} - {}", label, text) + } + } else { + label + } +} + +fn make_codeblock(text: &str) -> String { + let codeblock_regex = Regex::new(r"^\s*[~`]{3}").unwrap(); + if codeblock_regex.is_match(text) { + text.to_string() + } else { + format!("```\n{}\n```", text) + } +} + +/// Replace JSDoc like links (`{@link http://example.com}`) with markdown links +fn replace_links(text: &str) -> String { + let jsdoc_links_regex = Regex::new(r"(?i)\{@(link|linkplain|linkcode) (https?://[^ |}]+?)(?:[| ]([^{}\n]+?))?\}").unwrap(); + jsdoc_links_regex + .replace_all(text, |c: &Captures| match &c[1] { + "linkcode" => format!( + "[`{}`]({})", + if c.get(3).is_none() { + &c[2] + } else { + c[3].trim() + }, + &c[2] + ), + _ => format!( + "[{}]({})", + if c.get(3).is_none() { + &c[2] + } else { + c[3].trim() + }, + &c[2] + ), + }) + .to_string() +} + +#[derive(Debug, Deserialize)] +pub enum ScriptElementKind { + #[serde(rename = "")] + Unknown, + #[serde(rename = "warning")] + Warning, + #[serde(rename = "keyword")] + Keyword, + #[serde(rename = "script")] + ScriptElement, + #[serde(rename = "module")] + ModuleElement, + #[serde(rename = "class")] + ClassElement, + #[serde(rename = "local class")] + LocalClassElement, + #[serde(rename = "interface")] + InterfaceElement, + #[serde(rename = "type")] + TypeElement, + #[serde(rename = "enum")] + EnumElement, + #[serde(rename = "enum member")] + EnumMemberElement, + #[serde(rename = "var")] + VariableElement, + #[serde(rename = "local var")] + LocalVariableElement, + #[serde(rename = "function")] + FunctionElement, + #[serde(rename = "local function")] + LocalFunctionElement, + #[serde(rename = "method")] + MemberFunctionElement, + #[serde(rename = "getter")] + MemberGetAccessorElement, + #[serde(rename = "setter")] + MemberSetAccessorElement, + #[serde(rename = "property")] + MemberVariableElement, + #[serde(rename = "constructor")] + ConstructorImplementationElement, + #[serde(rename = "call")] + CallSignatureElement, + #[serde(rename = "index")] + IndexSignatureElement, + #[serde(rename = "construct")] + ConstructSignatureElement, + #[serde(rename = "parameter")] + ParameterElement, + #[serde(rename = "type parameter")] + TypeParameterElement, + #[serde(rename = "primitive type")] + PrimitiveType, + #[serde(rename = "label")] + Label, + #[serde(rename = "alias")] + Alias, + #[serde(rename = "const")] + ConstElement, + #[serde(rename = "let")] + LetElement, + #[serde(rename = "directory")] + Directory, + #[serde(rename = "external module name")] + ExternalModuleName, + #[serde(rename = "JSX attribute")] + JsxAttribute, + #[serde(rename = "string")] + String, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TextSpan { + start: u32, + length: u32, +} + +impl TextSpan { + pub fn to_range(&self, line_index: &[u32]) -> lsp_types::Range { + lsp_types::Range { + start: text::to_position(line_index, self.start), + end: text::to_position(line_index, self.start + self.length), + } + } +} + +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SymbolDisplayPart { + text: String, + kind: String, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct JSDocTagInfo { + name: String, + text: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct QuickInfo { + kind: ScriptElementKind, + kind_modifiers: String, + text_span: TextSpan, + display_parts: Option>, + documentation: Option>, + tags: Option>, +} + +impl QuickInfo { + pub fn to_hover(&self, line_index: &[u32]) -> lsp_types::Hover { + let mut contents = Vec::::new(); + if let Some(display_string) = + display_parts_to_string(self.display_parts.clone()) + { + contents.push(lsp_types::MarkedString::from_language_code( + "typescript".to_string(), + display_string, + )); + } + if let Some(documentation) = + display_parts_to_string(self.documentation.clone()) + { + contents.push(lsp_types::MarkedString::from_markdown(documentation)); + } + if let Some(tags) = &self.tags { + let tags_preview = tags + .iter() + .map(get_tag_documentation) + .collect::>() + .join(" \n\n"); + if !tags_preview.is_empty() { + contents.push(lsp_types::MarkedString::from_markdown(format!( + "\n\n{}", + tags_preview + ))); + } + } + lsp_types::Hover { + contents: lsp_types::HoverContents::Array(contents), + range: Some(self.text_span.to_range(line_index)), + } + } +} + +#[derive(Debug, Deserialize)] +pub enum HighlightSpanKind { + #[serde(rename = "none")] + None, + #[serde(rename = "definition")] + Definition, + #[serde(rename = "reference")] + Reference, + #[serde(rename = "writtenReference")] + WrittenReference, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct HighlightSpan { + file_name: Option, + is_in_string: Option, + text_span: TextSpan, + context_span: Option, + kind: HighlightSpanKind, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DefinitionInfo { + kind: ScriptElementKind, + name: String, + container_kind: Option, + container_name: Option, + text_span: TextSpan, + pub file_name: String, + original_text_span: Option, + original_file_name: Option, + context_span: Option, + original_context_span: Option, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DefinitionInfoAndBoundSpan { + pub definitions: Option>, + text_span: TextSpan, +} + +impl DefinitionInfoAndBoundSpan { + pub fn to_definition( + &self, + line_index: &[u32], + mut index_provider: F, + ) -> Option + where + F: FnMut(ModuleSpecifier) -> Vec, + { + if let Some(definitions) = &self.definitions { + let location_links = definitions + .iter() + .map(|di| { + let target_specifier = + ModuleSpecifier::resolve_url(&di.file_name).unwrap(); + let target_line_index = index_provider(target_specifier); + let target_uri = utils::normalize_file_name(&di.file_name).unwrap(); + let (target_range, target_selection_range) = + if let Some(context_span) = &di.context_span { + ( + context_span.to_range(&target_line_index), + di.text_span.to_range(&target_line_index), + ) + } else { + ( + di.text_span.to_range(&target_line_index), + di.text_span.to_range(&target_line_index), + ) + }; + lsp_types::LocationLink { + origin_selection_range: Some(self.text_span.to_range(line_index)), + target_uri, + target_range, + target_selection_range, + } + }) + .collect(); + + Some(lsp_types::GotoDefinitionResponse::Link(location_links)) + } else { + None + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DocumentHighlights { + file_name: String, + highlight_spans: Vec, +} + +impl DocumentHighlights { + pub fn to_highlight( + &self, + line_index: &[u32], + ) -> Vec { + self + .highlight_spans + .iter() + .map(|hs| lsp_types::DocumentHighlight { + range: hs.text_span.to_range(line_index), + kind: match hs.kind { + HighlightSpanKind::WrittenReference => { + Some(lsp_types::DocumentHighlightKind::Write) + } + _ => Some(lsp_types::DocumentHighlightKind::Read), + }, + }) + .collect() + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReferenceEntry { + is_write_access: bool, + pub is_definition: bool, + is_in_string: Option, + text_span: TextSpan, + pub file_name: String, + original_text_span: Option, + original_file_name: Option, + context_span: Option, + original_context_span: Option, +} + +impl ReferenceEntry { + pub fn to_location(&self, line_index: &[u32]) -> lsp_types::Location { + let uri = utils::normalize_file_name(&self.file_name).unwrap(); + lsp_types::Location { + uri, + range: self.text_span.to_range(line_index), + } + } +} + +#[derive(Debug, Clone, Deserialize)] +struct Response { + id: usize, + data: Value, +} + +struct State<'a> { + last_id: usize, + response: Option, + server_state: ServerStateSnapshot, + snapshots: HashMap<(Cow<'a, str>, Cow<'a, str>), String>, +} + +impl<'a> State<'a> { + fn new(server_state: ServerStateSnapshot) -> Self { + Self { + last_id: 1, + response: None, + server_state, + snapshots: Default::default(), + } + } +} + +/// If a snapshot is missing from the state cache, add it. +fn cache_snapshot( + state: &mut State, + specifier: String, + version: String, +) -> Result<(), AnyError> { + if !state + .snapshots + .contains_key(&(specifier.clone().into(), version.clone().into())) + { + let s = ModuleSpecifier::resolve_url(&specifier)?; + let file_cache = state.server_state.file_cache.read().unwrap(); + let file_id = file_cache.lookup(&s).unwrap(); + let content = file_cache.get_contents(file_id)?; + state + .snapshots + .insert((specifier.into(), version.into()), content); + } + Ok(()) +} + +fn op(op_fn: F) -> Box +where + F: Fn(&mut State, Value) -> Result + 'static, +{ + json_op_sync(move |s, args, _bufs| { + let state = s.borrow_mut::(); + op_fn(state, args) + }) +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SourceSnapshotArgs { + specifier: String, + version: String, +} + +/// The language service is dropping a reference to a source file snapshot, and +/// we can drop our version of that document. +fn dispose(state: &mut State, args: Value) -> Result { + let v: SourceSnapshotArgs = serde_json::from_value(args)?; + state + .snapshots + .remove(&(v.specifier.into(), v.version.into())); + Ok(json!(true)) +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct GetChangeRangeArgs { + specifier: String, + old_length: u32, + old_version: String, + version: String, +} + +/// The language service wants to compare an old snapshot with a new snapshot to +/// determine what source hash changed. +fn get_change_range(state: &mut State, args: Value) -> Result { + let v: GetChangeRangeArgs = serde_json::from_value(args.clone())?; + cache_snapshot(state, v.specifier.clone(), v.version.clone())?; + if let Some(current) = state + .snapshots + .get(&(v.specifier.clone().into(), v.version.into())) + { + if let Some(prev) = state + .snapshots + .get(&(v.specifier.clone().into(), v.old_version.clone().into())) + { + Ok(text::get_range_change(prev, current)) + } else { + // when a local file is opened up in the editor, the compiler might + // already have a snapshot of it in memory, and will request it, but we + // now are working off in memory versions of the document, and so need + // to tell tsc to reset the whole document + Ok(json!({ + "span": { + "start": 0, + "length": v.old_length, + }, + "newLength": current.chars().count(), + })) + } + } else { + Err(custom_error( + "MissingSnapshot", + format!( + "The current snapshot version is missing.\n Args: \"{}\"", + args + ), + )) + } +} + +fn get_length(state: &mut State, args: Value) -> Result { + let v: SourceSnapshotArgs = serde_json::from_value(args)?; + let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; + if state.server_state.doc_data.contains_key(&specifier) { + cache_snapshot(state, v.specifier.clone(), v.version.clone())?; + let content = state + .snapshots + .get(&(v.specifier.into(), v.version.into())) + .unwrap(); + Ok(json!(content.chars().count())) + } else { + let mut sources = state.server_state.sources.write().unwrap(); + Ok(json!(sources.get_length(&specifier).unwrap())) + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct GetTextArgs { + specifier: String, + version: String, + start: usize, + end: usize, +} + +fn get_text(state: &mut State, args: Value) -> Result { + let v: GetTextArgs = serde_json::from_value(args)?; + let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; + let content = if state.server_state.doc_data.contains_key(&specifier) { + cache_snapshot(state, v.specifier.clone(), v.version.clone())?; + state + .snapshots + .get(&(v.specifier.into(), v.version.into())) + .unwrap() + .clone() + } else { + let mut sources = state.server_state.sources.write().unwrap(); + sources.get_text(&specifier).unwrap() + }; + Ok(json!(text::slice(&content, v.start..v.end))) +} + +fn resolve(state: &mut State, args: Value) -> Result { + let v: ResolveArgs = serde_json::from_value(args)?; + let mut resolved = Vec::>::new(); + let referrer = ModuleSpecifier::resolve_url(&v.base)?; + let mut sources = if let Ok(sources) = state.server_state.sources.write() { + sources + } else { + return Err(custom_error("Deadlock", "deadlock locking sources")); + }; + + if let Some(doc_data) = state.server_state.doc_data.get(&referrer) { + if let Some(dependencies) = &doc_data.dependencies { + for specifier in &v.specifiers { + if specifier.starts_with("asset:///") { + resolved.push(Some(( + specifier.clone(), + MediaType::from(specifier).as_ts_extension(), + ))) + } else if let Some(dependency) = dependencies.get(specifier) { + let resolved_import = + if let Some(resolved_import) = &dependency.maybe_type { + resolved_import.clone() + } else if let Some(resolved_import) = &dependency.maybe_code { + resolved_import.clone() + } else { + ResolvedImport::Err("missing dependency".to_string()) + }; + if let ResolvedImport::Resolved(resolved_specifier) = resolved_import + { + let media_type = if let Some(media_type) = + sources.get_media_type(&resolved_specifier) + { + media_type + } else { + MediaType::from(&resolved_specifier) + }; + resolved.push(Some(( + resolved_specifier.to_string(), + media_type.as_ts_extension(), + ))); + } else { + resolved.push(None); + } + } + } + } + } else if sources.contains(&referrer) { + for specifier in &v.specifiers { + if let Some((resolved_specifier, media_type)) = + sources.resolve_import(specifier, &referrer) + { + resolved.push(Some(( + resolved_specifier.to_string(), + media_type.as_ts_extension(), + ))); + } else { + resolved.push(None); + } + } + } else { + return Err(custom_error( + "NotFound", + "the referring specifier is unexpectedly missing", + )); + } + + Ok(json!(resolved)) +} + +fn respond(state: &mut State, args: Value) -> Result { + state.response = Some(serde_json::from_value(args)?); + Ok(json!(true)) +} + +fn script_names(state: &mut State, _args: Value) -> Result { + let script_names: Vec<&ModuleSpecifier> = + state.server_state.doc_data.keys().collect(); + Ok(json!(script_names)) +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct ScriptVersionArgs { + specifier: String, +} + +fn script_version(state: &mut State, args: Value) -> Result { + let v: ScriptVersionArgs = serde_json::from_value(args)?; + let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; + let maybe_doc_data = state.server_state.doc_data.get(&specifier); + if let Some(doc_data) = maybe_doc_data { + if let Some(version) = doc_data.version { + return Ok(json!(version.to_string())); + } + } else { + let mut sources = state.server_state.sources.write().unwrap(); + if let Some(version) = sources.get_script_version(&specifier) { + return Ok(json!(version)); + } + } + + Ok(json!(None::)) +} + +/// Create and setup a JsRuntime based on a snapshot. It is expected that the +/// supplied snapshot is an isolate that contains the TypeScript language +/// server. +pub fn start(debug: bool) -> Result { + let mut runtime = JsRuntime::new(RuntimeOptions { + startup_snapshot: Some(js::compiler_isolate_init()), + ..Default::default() + }); + + { + let op_state = runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + op_state.put(State::new(ServerStateSnapshot::default())); + } + + runtime.register_op("op_dispose", op(dispose)); + runtime.register_op("op_get_change_range", op(get_change_range)); + runtime.register_op("op_get_length", op(get_length)); + runtime.register_op("op_get_text", op(get_text)); + runtime.register_op("op_resolve", op(resolve)); + runtime.register_op("op_respond", op(respond)); + runtime.register_op("op_script_names", op(script_names)); + runtime.register_op("op_script_version", op(script_version)); + + let init_config = json!({ "debug": debug }); + let init_src = format!("globalThis.serverInit({});", init_config); + + runtime.execute("[native code]", &init_src)?; + Ok(runtime) +} + +/// Methods that are supported by the Language Service in the compiler isolate. +pub enum RequestMethod { + /// Configure the compilation settings for the server. + Configure(TsConfig), + /// Return semantic diagnostics for given file. + GetSemanticDiagnostics(ModuleSpecifier), + /// Returns suggestion diagnostics for given file. + GetSuggestionDiagnostics(ModuleSpecifier), + /// Return syntactic diagnostics for a given file. + GetSyntacticDiagnostics(ModuleSpecifier), + /// Return quick info at position (hover information). + GetQuickInfo((ModuleSpecifier, u32)), + /// Return document highlights at position. + GetDocumentHighlights((ModuleSpecifier, u32, Vec)), + /// Get document references for a specific position. + GetReferences((ModuleSpecifier, u32)), + /// Get declaration information for a specific position. + GetDefinition((ModuleSpecifier, u32)), +} + +impl RequestMethod { + pub fn to_value(&self, id: usize) -> Value { + match self { + RequestMethod::Configure(config) => json!({ + "id": id, + "method": "configure", + "compilerOptions": config, + }), + RequestMethod::GetSemanticDiagnostics(specifier) => json!({ + "id": id, + "method": "getSemanticDiagnostics", + "specifier": specifier, + }), + RequestMethod::GetSuggestionDiagnostics(specifier) => json!({ + "id": id, + "method": "getSuggestionDiagnostics", + "specifier": specifier, + }), + RequestMethod::GetSyntacticDiagnostics(specifier) => json!({ + "id": id, + "method": "getSyntacticDiagnostics", + "specifier": specifier, + }), + RequestMethod::GetQuickInfo((specifier, position)) => json!({ + "id": id, + "method": "getQuickInfo", + "specifier": specifier, + "position": position, + }), + RequestMethod::GetDocumentHighlights(( + specifier, + position, + files_to_search, + )) => json!({ + "id": id, + "method": "getDocumentHighlights", + "specifier": specifier, + "position": position, + "filesToSearch": files_to_search, + }), + RequestMethod::GetReferences((specifier, position)) => json!({ + "id": id, + "method": "getReferences", + "specifier": specifier, + "position": position, + }), + RequestMethod::GetDefinition((specifier, position)) => json!({ + "id": id, + "method": "getDefinition", + "specifier": specifier, + "position": position, + }), + } + } +} + +/// Send a request into a runtime and return the JSON value of the response. +pub fn request( + runtime: &mut JsRuntime, + server_state: &ServerStateSnapshot, + method: RequestMethod, +) -> Result { + let id = { + let op_state = runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + let state = op_state.borrow_mut::(); + state.server_state = server_state.clone(); + state.last_id += 1; + state.last_id + }; + let request_params = method.to_value(id); + let request_src = format!("globalThis.serverRequest({});", request_params); + runtime.execute("[native_code]", &request_src)?; + + let op_state = runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + let state = op_state.borrow_mut::(); + + if let Some(response) = state.response.clone() { + state.response = None; + Ok(response.data) + } else { + Err(custom_error( + "RequestError", + "The response was not received for the request.", + )) + } +} + +#[cfg(test)] +mod tests { + use super::super::memory_cache::MemoryCache; + use super::super::state::DocumentData; + use super::*; + use std::collections::HashMap; + use std::sync::Arc; + use std::sync::RwLock; + + fn mock_server_state(sources: Vec<(&str, &str, i32)>) -> ServerStateSnapshot { + let mut doc_data = HashMap::new(); + let mut file_cache = MemoryCache::default(); + for (specifier, content, version) in sources { + let specifier = ModuleSpecifier::resolve_url(specifier) + .expect("failed to create specifier"); + doc_data.insert( + specifier.clone(), + DocumentData::new(specifier.clone(), version, content, None), + ); + file_cache.set_contents(specifier, Some(content.as_bytes().to_vec())); + } + let file_cache = Arc::new(RwLock::new(file_cache)); + ServerStateSnapshot { + config: Default::default(), + diagnostics: Default::default(), + doc_data, + file_cache, + sources: Default::default(), + } + } + + fn setup( + debug: bool, + config: Value, + sources: Vec<(&str, &str, i32)>, + ) -> (JsRuntime, ServerStateSnapshot) { + let server_state = mock_server_state(sources.clone()); + let mut runtime = start(debug).expect("could not start server"); + let ts_config = TsConfig::new(config); + assert_eq!( + request( + &mut runtime, + &server_state, + RequestMethod::Configure(ts_config) + ) + .expect("failed request"), + json!(true) + ); + (runtime, server_state) + } + + #[test] + fn test_replace_links() { + let actual = replace_links(r"test {@link http://deno.land/x/mod.ts} test"); + assert_eq!( + actual, + r"test [http://deno.land/x/mod.ts](http://deno.land/x/mod.ts) test" + ); + let actual = + replace_links(r"test {@link http://deno.land/x/mod.ts a link} test"); + assert_eq!(actual, r"test [a link](http://deno.land/x/mod.ts) test"); + let actual = + replace_links(r"test {@linkcode http://deno.land/x/mod.ts a link} test"); + assert_eq!(actual, r"test [`a link`](http://deno.land/x/mod.ts) test"); + } + + #[test] + fn test_project_configure() { + setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "noEmit": true, + }), + vec![], + ); + } + + #[test] + fn test_project_reconfigure() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "noEmit": true, + }), + vec![], + ); + let ts_config = TsConfig::new(json!({ + "target": "esnext", + "module": "esnext", + "noEmit": true, + "lib": ["deno.ns", "deno.worker"] + })); + let result = request( + &mut runtime, + &server_state, + RequestMethod::Configure(ts_config), + ); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!(response, json!(true)); + } + + #[test] + fn test_get_semantic_diagnostics() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "noEmit": true, + }), + vec![("file:///a.ts", r#"console.log("hello deno");"#, 1)], + ); + let specifier = ModuleSpecifier::resolve_url("file:///a.ts") + .expect("could not resolve url"); + let result = request( + &mut runtime, + &server_state, + RequestMethod::GetSemanticDiagnostics(specifier), + ); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!( + response, + json!([ + { + "start": { + "line": 0, + "character": 0, + }, + "end": { + "line": 0, + "character": 7 + }, + "fileName": "file:///a.ts", + "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the `lib` compiler option to include 'dom'.", + "sourceLine": "console.log(\"hello deno\");", + "category": 1, + "code": 2584 + } + ]) + ); + } + + #[test] + fn test_module_resolution() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + vec![( + "file:///a.ts", + r#" + import { B } from "https://deno.land/x/b/mod.ts"; + + const b = new B(); + + console.log(b); + "#, + 1, + )], + ); + let specifier = ModuleSpecifier::resolve_url("file:///a.ts") + .expect("could not resolve url"); + let result = request( + &mut runtime, + &server_state, + RequestMethod::GetSemanticDiagnostics(specifier), + ); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!(response, json!([])); + } + + #[test] + fn test_bad_module_specifiers() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + vec![( + "file:///a.ts", + r#" + import { A } from "."; + "#, + 1, + )], + ); + let specifier = ModuleSpecifier::resolve_url("file:///a.ts") + .expect("could not resolve url"); + let result = request( + &mut runtime, + &server_state, + RequestMethod::GetSyntacticDiagnostics(specifier), + ); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!(response, json!([])); + } + + #[test] + fn test_remote_modules() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + vec![( + "file:///a.ts", + r#" + import { B } from "https://deno.land/x/b/mod.ts"; + + const b = new B(); + + console.log(b); + "#, + 1, + )], + ); + let specifier = ModuleSpecifier::resolve_url("file:///a.ts") + .expect("could not resolve url"); + let result = request( + &mut runtime, + &server_state, + RequestMethod::GetSyntacticDiagnostics(specifier), + ); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!(response, json!([])); + } + + #[test] + fn test_partial_modules() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + vec![( + "file:///a.ts", + r#" + import { + Application, + Context, + Router, + Status, + } from "https://deno.land/x/oak@v6.3.2/mod.ts"; + + import * as test from + "#, + 1, + )], + ); + let specifier = ModuleSpecifier::resolve_url("file:///a.ts") + .expect("could not resolve url"); + let result = request( + &mut runtime, + &server_state, + RequestMethod::GetSyntacticDiagnostics(specifier), + ); + println!("{:?}", result); + // assert!(result.is_ok()); + // let response = result.unwrap(); + // assert_eq!(response, json!([])); + } +} diff --git a/cli/lsp/utils.rs b/cli/lsp/utils.rs new file mode 100644 index 00000000000000..0c3d5a635c9810 --- /dev/null +++ b/cli/lsp/utils.rs @@ -0,0 +1,114 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::custom_error; +use deno_core::error::AnyError; +use deno_core::serde_json::Value; +use deno_core::url::Position; +use deno_core::url::Url; +use deno_core::ModuleSpecifier; +use lsp_server::Notification; +use serde::de::DeserializeOwned; +use std::error::Error; +use std::fmt; + +// TODO(@kitsonk) support actually supporting cancellation requests from the +// client. + +pub struct Canceled { + _private: (), +} + +impl Canceled { + #[allow(unused)] + pub fn new() -> Self { + Self { _private: () } + } + + #[allow(unused)] + pub fn throw() -> ! { + std::panic::resume_unwind(Box::new(Canceled::new())) + } +} + +impl fmt::Display for Canceled { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "cancelled") + } +} + +impl fmt::Debug for Canceled { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Canceled") + } +} + +impl Error for Canceled {} + +pub fn from_json( + what: &'static str, + json: Value, +) -> Result { + let response = T::deserialize(&json).map_err(|err| { + custom_error( + "DeserializeFailed", + format!("Failed to deserialize {}: {}; {}", what, err, json), + ) + })?; + Ok(response) +} + +pub fn is_canceled(e: &(dyn Error + 'static)) -> bool { + e.downcast_ref::().is_some() +} + +pub fn notification_is( + notification: &Notification, +) -> bool { + notification.method == N::METHOD +} + +/// Normalizes a file name returned from the TypeScript compiler into a URI that +/// should be sent by the language server to the client. +pub fn normalize_file_name(file_name: &str) -> Result { + let specifier_str = if file_name.starts_with("file://") { + file_name.to_string() + } else { + format!("deno:///{}", file_name.replacen("://", "/", 1)) + }; + Url::parse(&specifier_str).map_err(|err| err.into()) +} + +/// Normalize URLs from the client, where "virtual" `deno:///` URLs are +/// converted into proper module specifiers. +pub fn normalize_url(url: Url) -> ModuleSpecifier { + if url.scheme() == "deno" + && (url.path().starts_with("/http") || url.path().starts_with("/asset")) + { + let specifier_str = url[Position::BeforePath..] + .replacen("/", "", 1) + .replacen("/", "://", 1); + if let Ok(specifier) = + percent_encoding::percent_decode_str(&specifier_str).decode_utf8() + { + if let Ok(specifier) = ModuleSpecifier::resolve_url(&specifier) { + return specifier; + } + } + } + ModuleSpecifier::from(url) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_normalize_url() { + let fixture = Url::parse("deno:///https/deno.land/x/mod.ts").unwrap(); + let actual = normalize_url(fixture); + assert_eq!( + actual, + ModuleSpecifier::resolve_url("https://deno.land/x/mod.ts").unwrap() + ); + } +} diff --git a/cli/main.rs b/cli/main.rs index e297d0c4c7de02..2e40df66bd75aa 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -28,6 +28,7 @@ mod info; mod inspector; mod js; mod lockfile; +mod lsp; mod media_type; mod metrics; mod module_graph; @@ -258,6 +259,10 @@ async fn install_command( tools::installer::install(flags, &module_url, args, name, root, force) } +async fn language_server_command() -> Result<(), AnyError> { + lsp::start() +} + async fn lint_command( flags: Flags, files: Vec, @@ -992,6 +997,7 @@ fn get_subcommand( } => { install_command(flags, module_url, args, name, root, force).boxed_local() } + DenoSubcommand::LanguageServer => language_server_command().boxed_local(), DenoSubcommand::Lint { files, rules, diff --git a/cli/module_graph.rs b/cli/module_graph.rs index 8c6f6955236231..4144ee5ee9df75 100644 --- a/cli/module_graph.rs +++ b/cli/module_graph.rs @@ -31,6 +31,8 @@ use crate::AnyError; use deno_core::error::Context; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::StreamExt; +use deno_core::serde::Deserialize; +use deno_core::serde::Deserializer; use deno_core::serde::Serialize; use deno_core::serde::Serializer; use deno_core::serde_json::json; @@ -38,8 +40,6 @@ use deno_core::serde_json::Value; use deno_core::ModuleResolutionError; use deno_core::ModuleSpecifier; use regex::Regex; -use serde::Deserialize; -use serde::Deserializer; use std::cell::RefCell; use std::collections::HashSet; use std::collections::{BTreeSet, HashMap}; @@ -182,14 +182,14 @@ impl swc_bundler::Load for BundleLoader<'_> { /// An enum which represents the parsed out values of references in source code. #[derive(Debug, Clone, Eq, PartialEq)] -enum TypeScriptReference { +pub enum TypeScriptReference { Path(String), Types(String), } /// Determine if a comment contains a triple slash reference and optionally /// return its kind and value. -fn parse_ts_reference(comment: &str) -> Option { +pub fn parse_ts_reference(comment: &str) -> Option { if !TRIPLE_SLASH_REFERENCE_RE.is_match(comment) { None } else if let Some(captures) = PATH_REFERENCE_RE.captures(comment) { @@ -207,7 +207,7 @@ fn parse_ts_reference(comment: &str) -> Option { /// Determine if a comment contains a `@deno-types` pragma and optionally return /// its value. -fn parse_deno_types(comment: &str) -> Option { +pub fn parse_deno_types(comment: &str) -> Option { if let Some(captures) = DENO_TYPES_RE.captures(comment) { if let Some(m) = captures.get(1) { Some(m.as_str().to_string()) @@ -230,8 +230,8 @@ fn get_version(source: &str, version: &str, config: &[u8]) -> String { /// A logical representation of a module within a graph. #[derive(Debug, Clone)] -struct Module { - dependencies: DependencyMap, +pub struct Module { + pub dependencies: DependencyMap, is_dirty: bool, is_parsed: bool, maybe_emit: Option, diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index aca2df99c0dbcb..42172a7718d6c2 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -914,7 +914,7 @@ fn ts_reload() { assert!(std::str::from_utf8(&output.stdout) .unwrap() .trim() - .contains("\"host.writeFile(\\\"deno://002_hello.js\\\")\"")); + .contains("host.writeFile(\"deno://002_hello.js\")")); } #[test] diff --git a/cli/tests/lsp/did_open_notification.json b/cli/tests/lsp/did_open_notification.json new file mode 100644 index 00000000000000..04f12a7b3a0ecf --- /dev/null +++ b/cli/tests/lsp/did_open_notification.json @@ -0,0 +1,12 @@ +{ + "jsonrpc": "2.0", + "method": "textDocument/didOpen", + "params": { + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": "console.log(Deno.args);\n" + } + } +} diff --git a/cli/tests/lsp/exit_notification.json b/cli/tests/lsp/exit_notification.json new file mode 100644 index 00000000000000..799a0d1d53c6c8 --- /dev/null +++ b/cli/tests/lsp/exit_notification.json @@ -0,0 +1,5 @@ +{ + "jsonrpc": "2.0", + "method": "exit", + "params": null +} diff --git a/cli/tests/lsp/hover_request.json b/cli/tests/lsp/hover_request.json new file mode 100644 index 00000000000000..f12bd52df608a3 --- /dev/null +++ b/cli/tests/lsp/hover_request.json @@ -0,0 +1,14 @@ +{ + "jsonrpc": "2.0", + "id": 2, + "method": "textDocument/hover", + "params": { + "textDocument": { + "uri": "file:///a/file.ts" + }, + "position": { + "line": 0, + "character": 19 + } + } +} diff --git a/cli/tests/lsp/initialize_request.json b/cli/tests/lsp/initialize_request.json new file mode 100644 index 00000000000000..960420bfd376ec --- /dev/null +++ b/cli/tests/lsp/initialize_request.json @@ -0,0 +1,23 @@ +{ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": { + "processId": 0, + "clientInfo": { + "name": "test-harness", + "version": "1.0.0" + }, + "rootUri": null, + "capabilities": { + "textDocument": { + "synchronization": { + "dynamicRegistration": true, + "willSave": true, + "willSaveWaitUntil": true, + "didSave": true + } + } + } + } +} diff --git a/cli/tests/lsp/initialized_notification.json b/cli/tests/lsp/initialized_notification.json new file mode 100644 index 00000000000000..972f8abc8a36dd --- /dev/null +++ b/cli/tests/lsp/initialized_notification.json @@ -0,0 +1,5 @@ +{ + "jsonrpc": "2.0", + "method": "initialized", + "params": {} +} diff --git a/cli/tests/lsp/shutdown_request.json b/cli/tests/lsp/shutdown_request.json new file mode 100644 index 00000000000000..fd4d784607643c --- /dev/null +++ b/cli/tests/lsp/shutdown_request.json @@ -0,0 +1,6 @@ +{ + "jsonrpc": "2.0", + "id": 3, + "method": "shutdown", + "params": null +} diff --git a/cli/tests/lsp_tests.rs b/cli/tests/lsp_tests.rs new file mode 100644 index 00000000000000..7de655ac80fe05 --- /dev/null +++ b/cli/tests/lsp_tests.rs @@ -0,0 +1,88 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +///! +///! Integration test for the Deno Language Server (`deno lsp`) +///! +use std::fs; +use std::io::Read; +use std::io::Write; +use std::process::Stdio; + +struct LspIntegrationTest { + pub fixtures: Vec<&'static str>, +} + +impl LspIntegrationTest { + pub fn run(&self) -> (String, String) { + let root_path = test_util::root_path(); + let deno_exe = test_util::deno_exe_path(); + let tests_dir = root_path.join("cli/tests/lsp"); + println!("tests_dir: {:?} deno_exe: {:?}", tests_dir, deno_exe); + let mut command = test_util::deno_cmd(); + command + .arg("lsp") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let process = command.spawn().expect("failed to execute deno"); + + for fixture in &self.fixtures { + let mut stdin = process.stdin.as_ref().unwrap(); + let fixture_path = tests_dir.join(fixture); + let content = + fs::read_to_string(&fixture_path).expect("could not read fixture"); + let content_length = content.chars().count(); + write!( + stdin, + "Content-Length: {}\r\n\r\n{}", + content_length, content + ) + .unwrap(); + } + + let mut so = String::new(); + process.stdout.unwrap().read_to_string(&mut so).unwrap(); + + let mut se = String::new(); + process.stderr.unwrap().read_to_string(&mut se).unwrap(); + + (so, se) + } +} + +#[test] +fn test_lsp_startup_shutdown() { + let test = LspIntegrationTest { + fixtures: vec![ + "initialize_request.json", + "initialized_notification.json", + "shutdown_request.json", + "exit_notification.json", + ], + }; + let (response, out) = test.run(); + assert!(response.contains("deno-language-server")); + assert!(out.contains("Connected to \"test-harness\" 1.0.0")); +} + +#[test] +fn test_lsp_hover() { + // a straight forward integration tests starts up the lsp, opens a document + // which logs `Deno.args` to the console, and hovers over the `args` property + // to get the intellisense about it, which is a total end-to-end test that + // includes sending information in and out of the TypeScript compiler. + let test = LspIntegrationTest { + fixtures: vec![ + "initialize_request.json", + "initialized_notification.json", + "did_open_notification.json", + "hover_request.json", + "shutdown_request.json", + "exit_notification.json", + ], + }; + let (response, out) = test.run(); + assert!(response.contains("const Deno.args: string[]")); + assert!(out.contains("Connected to \"test-harness\" 1.0.0")); +} diff --git a/cli/tests/type_directives_01.ts.out b/cli/tests/type_directives_01.ts.out index 8d285d3a87056e..77ed3ae264b430 100644 --- a/cli/tests/type_directives_01.ts.out +++ b/cli/tests/type_directives_01.ts.out @@ -1,3 +1,3 @@ [WILDCARD] -DEBUG TS - "host.getSourceFile(\"http://127.0.0.1:4545/xTypeScriptTypes.d.ts\", Latest)" +DEBUG TS - host.getSourceFile("http://127.0.0.1:4545/xTypeScriptTypes.d.ts", Latest) [WILDCARD] \ No newline at end of file diff --git a/cli/tests/type_directives_02.ts.out b/cli/tests/type_directives_02.ts.out index aea1d4fd083a67..7949dfab546b9e 100644 --- a/cli/tests/type_directives_02.ts.out +++ b/cli/tests/type_directives_02.ts.out @@ -1,3 +1,3 @@ [WILDCARD] -DEBUG TS - "host.getSourceFile(\"file:///[WILDCARD]cli/tests/subdir/type_reference.d.ts\", Latest)" +DEBUG TS - host.getSourceFile("file:///[WILDCARD]cli/tests/subdir/type_reference.d.ts", Latest) [WILDCARD] \ No newline at end of file diff --git a/cli/tools/lint.rs b/cli/tools/lint.rs index c40dcfd5488c40..dc9a51a8928b3e 100644 --- a/cli/tools/lint.rs +++ b/cli/tools/lint.rs @@ -122,7 +122,7 @@ pub fn print_rules_list(json: bool) { } } -fn create_linter(syntax: Syntax, rules: Vec>) -> Linter { +pub fn create_linter(syntax: Syntax, rules: Vec>) -> Linter { LinterBuilder::default() .ignore_file_directive("deno-lint-ignore-file") .ignore_diagnostic_directive("deno-lint-ignore") diff --git a/cli/tsc.rs b/cli/tsc.rs index 36668f6f7d5fa3..69373b2fa84159 100644 --- a/cli/tsc.rs +++ b/cli/tsc.rs @@ -284,12 +284,12 @@ fn load(state: &mut State, args: Value) -> Result { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] -struct ResolveArgs { +pub struct ResolveArgs { /// The base specifier that the supplied specifier strings should be resolved /// relative to. - base: String, + pub base: String, /// A list of specifiers that should be resolved. - specifiers: Vec, + pub specifiers: Vec, } fn resolve(state: &mut State, args: Value) -> Result { diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index bb8458c93895dc..f379d6baeba90a 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -1,5 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +// @ts-check +/// // deno-lint-ignore-file no-undef // This module is the entry point for "compiler" isolate, ie. the one @@ -11,6 +13,7 @@ delete Object.prototype.__proto__; ((window) => { + /** @type {DenoCore} */ const core = window.Deno.core; let logDebug = false; @@ -25,7 +28,9 @@ delete Object.prototype.__proto__; function debug(...args) { if (logDebug) { - const stringifiedArgs = args.map((arg) => JSON.stringify(arg)).join(" "); + const stringifiedArgs = args.map((arg) => + typeof arg === "string" ? arg : JSON.stringify(arg) + ).join(" "); core.print(`DEBUG ${logSource} - ${stringifiedArgs}\n`); } } @@ -86,6 +91,7 @@ delete Object.prototype.__proto__; /** @param {ts.Diagnostic[]} diagnostics */ function fromTypeScriptDiagnostic(diagnostics) { return diagnostics.map(({ relatedInformation: ri, source, ...diag }) => { + /** @type {any} */ const value = fromRelatedInformation(diag); value.relatedInformation = ri ? ri.map(fromRelatedInformation) @@ -106,7 +112,7 @@ delete Object.prototype.__proto__; * Deno, as they provide misleading or incorrect information. */ const IGNORED_DIAGNOSTICS = [ // TS1208: All files must be modules when the '--isolatedModules' flag is - // provided. We can ignore because we guarantuee that all files are + // provided. We can ignore because we guarantee that all files are // modules. 1208, // TS1375: 'await' expressions are only allowed at the top level of a file @@ -148,10 +154,72 @@ delete Object.prototype.__proto__; target: ts.ScriptTarget.ESNext, }; + class ScriptSnapshot { + /** @type {string} */ + specifier; + /** @type {string} */ + version; + /** + * @param {string} specifier + * @param {string} version + */ + constructor(specifier, version) { + this.specifier = specifier; + this.version = version; + } + /** + * @param {number} start + * @param {number} end + * @returns {string} + */ + getText(start, end) { + const { specifier, version } = this; + debug( + `snapshot.getText(${start}, ${end}) specifier: ${specifier} version: ${version}`, + ); + return core.jsonOpSync("op_get_text", { specifier, version, start, end }); + } + /** + * @returns {number} + */ + getLength() { + const { specifier, version } = this; + debug(`snapshot.getLength() specifier: ${specifier} version: ${version}`); + return core.jsonOpSync("op_get_length", { specifier, version }); + } + /** + * @param {ScriptSnapshot} oldSnapshot + * @returns {ts.TextChangeRange | undefined} + */ + getChangeRange(oldSnapshot) { + const { specifier, version } = this; + const { version: oldVersion } = oldSnapshot; + const oldLength = oldSnapshot.getLength(); + debug( + `snapshot.getLength() specifier: ${specifier} oldVersion: ${oldVersion} version: ${version}`, + ); + return core.jsonOpSync( + "op_get_change_range", + { specifier, oldLength, oldVersion, version }, + ); + } + dispose() { + const { specifier, version } = this; + debug(`snapshot.dispose() specifier: ${specifier} version: ${version}`); + core.jsonOpSync("op_dispose", { specifier, version }); + } + } + + /** @type {ts.CompilerOptions} */ + let compilationSettings = {}; + + /** @type {ts.LanguageService} */ + let languageService; + /** An object literal of the incremental compiler host, which provides the * specific "bindings" to the Deno environment that tsc needs to work. * - * @type {ts.CompilerHost} */ + * @type {ts.CompilerHost & ts.LanguageServiceHost} */ const host = { fileExists(fileName) { debug(`host.fileExists("${fileName}")`); @@ -231,21 +299,73 @@ delete Object.prototype.__proto__; debug(`host.resolveModuleNames()`); debug(` base: ${base}`); debug(` specifiers: ${specifiers.join(", ")}`); - /** @type {Array<[string, ts.Extension]>} */ + /** @type {Array<[string, ts.Extension] | undefined>} */ const resolved = core.jsonOpSync("op_resolve", { specifiers, base, }); - const r = resolved.map(([resolvedFileName, extension]) => ({ - resolvedFileName, - extension, - isExternalLibraryImport: false, - })); - return r; + if (resolved) { + const result = resolved.map((item) => { + if (item) { + const [resolvedFileName, extension] = item; + return { + resolvedFileName, + extension, + isExternalLibraryImport: false, + }; + } + return undefined; + }); + result.length = specifiers.length; + return result; + } else { + return new Array(specifiers.length); + } }, createHash(data) { return core.jsonOpSync("op_create_hash", { data }).hash; }, + + // LanguageServiceHost + getCompilationSettings() { + debug("host.getCompilationSettings()"); + return compilationSettings; + }, + getScriptFileNames() { + debug("host.getScriptFileNames()"); + return core.jsonOpSync("op_script_names", undefined); + }, + getScriptVersion(specifier) { + debug(`host.getScriptVersion("${specifier}")`); + const sourceFile = sourceFileCache.get(specifier); + if (sourceFile) { + return sourceFile.version ?? "1"; + } + return core.jsonOpSync("op_script_version", { specifier }); + }, + getScriptSnapshot(specifier) { + debug(`host.getScriptSnapshot("${specifier}")`); + const sourceFile = sourceFileCache.get(specifier); + if (sourceFile) { + return { + getText(start, end) { + return sourceFile.text.substring(start, end); + }, + getLength() { + return sourceFile.text.length; + }, + getChangeRange() { + return undefined; + }, + }; + } + /** @type {string | undefined} */ + const version = core.jsonOpSync("op_script_version", { specifier }); + if (version != null) { + return new ScriptSnapshot(specifier, version); + } + return undefined; + }, }; /** @type {Array<[string, number]>} */ @@ -254,10 +374,13 @@ delete Object.prototype.__proto__; function performanceStart() { stats.length = 0; - statsStart = new Date(); + statsStart = Date.now(); ts.performance.enable(); } + /** + * @param {{ program: ts.Program | ts.EmitAndSemanticDiagnosticsBuilderProgram, fileCount?: number }} options + */ function performanceProgram({ program, fileCount }) { if (program) { if ("getProgram" in program) { @@ -286,7 +409,7 @@ delete Object.prototype.__proto__; } function performanceEnd() { - const duration = new Date() - statsStart; + const duration = Date.now() - statsStart; stats.push(["Compile time", duration]); return stats; } @@ -308,7 +431,7 @@ delete Object.prototype.__proto__; debug(config); const { options, errors: configFileParsingDiagnostics } = ts - .convertCompilerOptionsFromJson(config, "", "tsconfig.json"); + .convertCompilerOptionsFromJson(config, ""); // The `allowNonTsExtensions` is a "hidden" compiler option used in VSCode // which is not allowed to be passed in JSON, we need it to allow special // URLs which Deno supports. So we need to either ignore the diagnostic, or @@ -340,6 +463,106 @@ delete Object.prototype.__proto__; debug("<<< exec stop"); } + /** + * @param {number} id + * @param {any} data + */ + function respond(id, data = null) { + core.jsonOpSync("op_respond", { id, data }); + } + + /** + * @param {LanguageServerRequest} request + */ + function serverRequest({ id, ...request }) { + debug(`serverRequest()`, { id, ...request }); + switch (request.method) { + case "configure": { + const { options, errors } = ts + .convertCompilerOptionsFromJson(request.compilerOptions, ""); + Object.assign(options, { allowNonTsExtensions: true }); + if (errors.length) { + debug(ts.formatDiagnostics(errors, host)); + } + compilationSettings = options; + return respond(id, true); + } + case "getSemanticDiagnostics": { + const diagnostics = languageService.getSemanticDiagnostics( + request.specifier, + ).filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)); + return respond(id, fromTypeScriptDiagnostic(diagnostics)); + } + case "getSuggestionDiagnostics": { + const diagnostics = languageService.getSuggestionDiagnostics( + request.specifier, + ).filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)); + return respond(id, fromTypeScriptDiagnostic(diagnostics)); + } + case "getSyntacticDiagnostics": { + const diagnostics = languageService.getSyntacticDiagnostics( + request.specifier, + ).filter(({ code }) => !IGNORED_DIAGNOSTICS.includes(code)); + return respond(id, fromTypeScriptDiagnostic(diagnostics)); + } + case "getQuickInfo": { + return respond( + id, + languageService.getQuickInfoAtPosition( + request.specifier, + request.position, + ), + ); + } + case "getDocumentHighlights": { + return respond( + id, + languageService.getDocumentHighlights( + request.specifier, + request.position, + request.filesToSearch, + ), + ); + } + case "getReferences": { + return respond( + id, + languageService.getReferencesAtPosition( + request.specifier, + request.position, + ), + ); + } + case "getDefinition": { + return respond( + id, + languageService.getDefinitionAndBoundSpan( + request.specifier, + request.position, + ), + ); + } + default: + throw new TypeError( + // @ts-ignore exhausted case statement sets type to never + `Invalid request method for request: "${request.method}" (${id})`, + ); + } + } + + /** @param {{ debug: boolean; }} init */ + function serverInit({ debug: debugFlag }) { + if (hasStarted) { + throw new Error("The language server has already been initialized."); + } + hasStarted = true; + languageService = ts.createLanguageService(host); + core.ops(); + core.registerErrorClass("Error", Error); + setLogDebug(debugFlag, "TSLS"); + debug("serverInit()"); + } + let hasStarted = false; /** Startup the runtime environment, setting various flags. @@ -391,4 +614,9 @@ delete Object.prototype.__proto__; // checking TypeScript. globalThis.startup = startup; globalThis.exec = exec; + + // exposes the functions that are called when the compiler is used as a + // language service. + globalThis.serverInit = serverInit; + globalThis.serverRequest = serverRequest; })(this); diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts new file mode 100644 index 00000000000000..1a899c291a3c64 --- /dev/null +++ b/cli/tsc/compiler.d.ts @@ -0,0 +1,103 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +// Contains types that can be used to validate and check `99_main_compiler.js` + +import * as _ts from "../dts/typescript"; + +declare global { + // deno-lint-ignore no-namespace + namespace ts { + var libs: string[]; + var libMap: Map; + + interface SourceFile { + version?: string; + } + + interface Performance { + enable(): void; + getDuration(value: string): number; + } + + var performance: Performance; + } + + // deno-lint-ignore no-namespace + namespace ts { + export = _ts; + } + + interface Object { + // deno-lint-ignore no-explicit-any + __proto__: any; + } + + interface DenoCore { + // deno-lint-ignore no-explicit-any + jsonOpSync(name: string, params: T): any; + ops(): void; + print(msg: string): void; + registerErrorClass(name: string, Ctor: typeof Error): void; + } + + type LanguageServerRequest = + | ConfigureRequest + | GetSyntacticDiagnosticsRequest + | GetSemanticDiagnosticsRequest + | GetSuggestionDiagnosticsRequest + | GetQuickInfoRequest + | GetDocumentHighlightsRequest + | GetReferencesRequest + | GetDefinitionRequest; + + interface BaseLanguageServerRequest { + id: number; + method: string; + } + + interface ConfigureRequest extends BaseLanguageServerRequest { + method: "configure"; + // deno-lint-ignore no-explicit-any + compilerOptions: Record; + } + + interface GetSyntacticDiagnosticsRequest extends BaseLanguageServerRequest { + method: "getSyntacticDiagnostics"; + specifier: string; + } + + interface GetSemanticDiagnosticsRequest extends BaseLanguageServerRequest { + method: "getSemanticDiagnostics"; + specifier: string; + } + + interface GetSuggestionDiagnosticsRequest extends BaseLanguageServerRequest { + method: "getSuggestionDiagnostics"; + specifier: string; + } + + interface GetQuickInfoRequest extends BaseLanguageServerRequest { + method: "getQuickInfo"; + specifier: string; + position: number; + } + + interface GetDocumentHighlightsRequest extends BaseLanguageServerRequest { + method: "getDocumentHighlights"; + specifier: string; + position: number; + filesToSearch: string[]; + } + + interface GetReferencesRequest extends BaseLanguageServerRequest { + method: "getReferences"; + specifier: string; + position: number; + } + + interface GetDefinitionRequest extends BaseLanguageServerRequest { + method: "getDefinition"; + specifier: string; + position: number; + } +} diff --git a/cli/tsc_config.rs b/cli/tsc_config.rs index 773d2afb09dc29..16661c7680591f 100644 --- a/cli/tsc_config.rs +++ b/cli/tsc_config.rs @@ -52,7 +52,7 @@ impl fmt::Display for IgnoredCompilerOptions { /// A static slice of all the compiler options that should be ignored that /// either have no effect on the compilation or would cause the emit to not work /// in Deno. -const IGNORED_COMPILER_OPTIONS: &[&str] = &[ +pub const IGNORED_COMPILER_OPTIONS: &[&str] = &[ "allowSyntheticDefaultImports", "allowUmdGlobalAccess", "baseUrl", @@ -83,7 +83,7 @@ const IGNORED_COMPILER_OPTIONS: &[&str] = &[ "useDefineForClassFields", ]; -const IGNORED_RUNTIME_COMPILER_OPTIONS: &[&str] = &[ +pub const IGNORED_RUNTIME_COMPILER_OPTIONS: &[&str] = &[ "assumeChangesOnlyAffectDirectDependencies", "build", "charset", From b77d6cb29e4437f4783368aaa3b1d5c972470ad0 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Mon, 7 Dec 2020 10:49:58 +0000 Subject: [PATCH 018/135] chore(std): Remove tsconfig_test.json (#8629) Ref #8050 --- cli/tests/integration_tests.rs | 3 --- docs/getting_started/typescript.md | 33 ++++++++++++++---------------- std/tsconfig_test.json | 5 ----- 3 files changed, 15 insertions(+), 26 deletions(-) delete mode 100644 std/tsconfig_test.json diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index 42172a7718d6c2..cb1bb27a3489b5 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -10,7 +10,6 @@ use test_util as util; #[test] fn std_tests() { let dir = TempDir::new().expect("tempdir fail"); - let std_config = util::root_path().join("std/tsconfig_test.json"); let status = util::deno_cmd() .env("DENO_DIR", dir.path()) .current_dir(util::root_path()) @@ -18,8 +17,6 @@ fn std_tests() { .arg("--unstable") .arg("--seed=86") // Some tests rely on specific random numbers. .arg("-A") - .arg("--config") - .arg(std_config.to_str().unwrap()) // .arg("-Ldebug") .arg("std/") .spawn() diff --git a/docs/getting_started/typescript.md b/docs/getting_started/typescript.md index 972d42713d82fc..6aa738c883a030 100644 --- a/docs/getting_started/typescript.md +++ b/docs/getting_started/typescript.md @@ -23,23 +23,20 @@ useful when type checking is provided by your editor and you want startup time to be as fast as possible (for example when restarting the program automatically with a file watcher). -Because `--no-check` does not do TypeScript type checking we can not -automatically remove type only imports and exports as this would require type -information. For this purpose TypeScript provides the -[`import type` and `export type` syntax](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-8.html#type-only-imports-and-exports). -To export a type in a different file use -`export type { AnInterface } from "./mod.ts";`. To import a type use -`import type { AnInterface } from "./mod.ts";`. You can check that you are using -`import type` and `export type` where necessary by setting the `isolatedModules` -TypeScript compiler option to `true`, and the `importsNotUsedAsValues` to -`error`. You can see an example `tsconfig.json` with this option -[in the standard library](https://github.com/denoland/deno/blob/$CLI_VERSION/std/tsconfig_test.json). -These settings will be enabled by default in the future. They are already the -default in Deno 1.4 or above when using `--unstable`. - -Because there is no type information when using `--no-check`, `const enum` is -not supported because it is type-directed. `--no-check` also does not support -the legacy `import =` and `export =` syntax. +To make the most of skipping type checks, `--no-check` transpiles each module in +isolation without using information from imported modules. This maximizes +potential for concurrency and incremental rebuilds. On the other hand, the +transpiler cannot know if `export { Foo } from "./foo.ts"` should be preserved +(in case `Foo` is a value) or removed (in case `Foo` is strictly a type). To +resolve such ambiguities, Deno enforces +[`isolatedModules`](https://www.typescriptlang.org/tsconfig#isolatedModules) on +all TS code. This means that `Foo` in the above example must be a value, and the +[`export type`](https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-8.html#type-only-imports-and-exports) +syntax must be used instead if `Foo` is a type. + +Another consequence of `isolatedModules` is that the type-directed `const enum` +is treated like `enum`. The legacy `import =` and `export =` syntaxes are also +not supported by `--no-check`. ### Using external type definitions @@ -90,7 +87,7 @@ definition which happens to be alongside that file, your JavaScript module named export const foo = "foo"; ``` -Deno will see this, and the compiler will use `foo.d.ts` when type checking the +Deno will see this, and the compiler will use `foo.d.ts` when type-checking the file, though `foo.js` will be loaded at runtime. The resolution of the value of the directive follows the same resolution logic as importing a module, meaning the file needs to have an extension and is relative to the current file. Remote diff --git a/std/tsconfig_test.json b/std/tsconfig_test.json deleted file mode 100644 index 8dee5fa2a4a133..00000000000000 --- a/std/tsconfig_test.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "compilerOptions": { - "isolatedModules": true - } -} From 43a35b005f9f4631dd97a9db0f41ad76eaed941e Mon Sep 17 00:00:00 2001 From: Steven Guerrero Date: Mon, 7 Dec 2020 08:27:25 -0500 Subject: [PATCH 019/135] perf: use minimal op with performance.now() (#8619) --- cli/ops/timers.rs | 42 +++++++++++++++++++++++++++------------- cli/rt/11_timers.js | 5 ++++- cli/rt/40_performance.js | 3 +-- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/cli/ops/timers.rs b/cli/ops/timers.rs index 841cdf289d035b..8037fd69827668 100644 --- a/cli/ops/timers.rs +++ b/cli/ops/timers.rs @@ -8,7 +8,11 @@ //! only need to be able to start, cancel and await a single timer (or Delay, as Tokio //! calls it) for an entire Isolate. This is what is implemented here. +use super::dispatch_minimal::minimal_op; +use super::dispatch_minimal::MinimalOp; +use crate::metrics::metrics_op; use crate::permissions::Permissions; +use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::channel::oneshot; @@ -77,7 +81,7 @@ pub fn init(rt: &mut deno_core::JsRuntime) { super::reg_json_sync(rt, "op_global_timer_stop", op_global_timer_stop); super::reg_json_sync(rt, "op_global_timer_start", op_global_timer_start); super::reg_json_async(rt, "op_global_timer", op_global_timer); - super::reg_json_sync(rt, "op_now", op_now); + rt.register_op("op_now", metrics_op(minimal_op(op_now))); super::reg_json_sync(rt, "op_sleep_sync", op_sleep_sync); } @@ -138,26 +142,38 @@ async fn op_global_timer( // If the High precision flag is not set, the // nanoseconds are rounded on 2ms. fn op_now( - state: &mut OpState, - _args: Value, - _zero_copy: &mut [ZeroCopyBuf], -) -> Result { - let start_time = state.borrow::(); + state: Rc>, + // Arguments are discarded + _sync: bool, + _x: i32, + mut zero_copy: BufVec, +) -> MinimalOp { + match zero_copy.len() { + 0 => return MinimalOp::Sync(Err(type_error("no buffer specified"))), + 1 => {} + _ => { + return MinimalOp::Sync(Err(type_error("Invalid number of arguments"))) + } + } + + let op_state = state.borrow(); + let start_time = op_state.borrow::(); let seconds = start_time.elapsed().as_secs(); - let mut subsec_nanos = start_time.elapsed().subsec_nanos(); - let reduced_time_precision = 2_000_000; // 2ms in nanoseconds + let mut subsec_nanos = start_time.elapsed().subsec_nanos() as f64; + let reduced_time_precision = 2_000_000.0; // 2ms in nanoseconds // If the permission is not enabled // Round the nano result on 2 milliseconds // see: https://developer.mozilla.org/en-US/docs/Web/API/DOMHighResTimeStamp#Reduced_time_precision - if state.borrow::().check_hrtime().is_err() { + if op_state.borrow::().check_hrtime().is_err() { subsec_nanos -= subsec_nanos % reduced_time_precision; } - Ok(json!({ - "seconds": seconds, - "subsecNanos": subsec_nanos, - })) + let result = (seconds * 1_000) as f64 + (subsec_nanos / 1_000_000.0); + + (&mut zero_copy[0]).copy_from_slice(&result.to_be_bytes()); + + MinimalOp::Sync(Ok(0)) } #[derive(Deserialize)] diff --git a/cli/rt/11_timers.js b/cli/rt/11_timers.js index c762c59d83f74a..5a59844a3306b7 100644 --- a/cli/rt/11_timers.js +++ b/cli/rt/11_timers.js @@ -3,6 +3,7 @@ ((window) => { const assert = window.__bootstrap.util.assert; const core = window.Deno.core; + const { sendSync } = window.__bootstrap.dispatchMinimal; function opStopGlobalTimer() { core.jsonOpSync("op_global_timer_stop"); @@ -16,8 +17,10 @@ await core.jsonOpAsync("op_global_timer"); } + const nowBytes = new Uint8Array(8); function opNow() { - return core.jsonOpSync("op_now"); + sendSync("op_now", 0, nowBytes); + return new DataView(nowBytes.buffer).getFloat64(); } function sleepSync(millis = 0) { diff --git a/cli/rt/40_performance.js b/cli/rt/40_performance.js index 3d8be603146074..0a63dc704dfc9c 100644 --- a/cli/rt/40_performance.js +++ b/cli/rt/40_performance.js @@ -43,8 +43,7 @@ } function now() { - const res = opNow(); - return res.seconds * 1e3 + res.subsecNanos / 1e6; + return opNow(); } class PerformanceEntry { From b566d184fedcd0fae3de19a54adfa5ce09466cc1 Mon Sep 17 00:00:00 2001 From: Benjamin Gruenbaum Date: Mon, 7 Dec 2020 22:22:58 +0200 Subject: [PATCH 020/135] refactor(cli/rt): deduplicate code (#8649) --- cli/rt/01_web_util.js | 4 +-- cli/rt/99_main.js | 43 +++---------------------------- cli/tests/034_onload/main.ts | 3 +++ docs/runtime/program_lifecycle.md | 3 ++- 4 files changed, 10 insertions(+), 43 deletions(-) diff --git a/cli/rt/01_web_util.js b/cli/rt/01_web_util.js index 3076993ff33279..a9573a71db3be9 100644 --- a/cli/rt/01_web_util.js +++ b/cli/rt/01_web_util.js @@ -128,11 +128,11 @@ wrappedHandler.handler = handler; return wrappedHandler; } - function defineEventHandler(emitter, name) { + function defineEventHandler(emitter, name, defaultValue = undefined) { // HTML specification section 8.1.5.1 Object.defineProperty(emitter, `on${name}`, { get() { - return this[handlerSymbol]?.get(name)?.handler; + return this[handlerSymbol]?.get(name)?.handler ?? defaultValue; }, set(value) { if (!this[handlerSymbol]) { diff --git a/cli/rt/99_main.js b/cli/rt/99_main.js index f582994c7f11f2..40c9c636f53088 100644 --- a/cli/rt/99_main.js +++ b/cli/rt/99_main.js @@ -31,6 +31,7 @@ delete Object.prototype.__proto__; const denoNs = window.__bootstrap.denoNs; const denoNsUnstable = window.__bootstrap.denoNsUnstable; const errors = window.__bootstrap.errors.errors; + const { defineEventHandler } = window.__bootstrap.webUtil; let windowIsClosing = false; @@ -289,46 +290,8 @@ delete Object.prototype.__proto__; Object.setPrototypeOf(globalThis, Window.prototype); eventTarget.setEventTargetData(globalThis); - const handlerSymbol = Symbol("eventHandlers"); - - function makeWrappedHandler(handler) { - function wrappedHandler(...args) { - if (typeof wrappedHandler.handler !== "function") { - return; - } - return wrappedHandler.handler.call(this, ...args); - } - wrappedHandler.handler = handler; - return wrappedHandler; - } - // TODO(benjamingr) reuse when we can reuse code between web crates - // This function is very similar to `defineEventHandler` in `01_web_util.js` - // but it returns `null` instead of `undefined` is handler is not defined. - function defineEventHandler(emitter, name) { - // HTML specification section 8.1.5.1 - Object.defineProperty(emitter, `on${name}`, { - get() { - return this[handlerSymbol]?.get(name)?.handler ?? null; - }, - set(value) { - if (!this[handlerSymbol]) { - this[handlerSymbol] = new Map(); - } - let handlerWrapper = this[handlerSymbol]?.get(name); - if (handlerWrapper) { - handlerWrapper.handler = value; - } else { - handlerWrapper = makeWrappedHandler(value); - this.addEventListener(name, handlerWrapper); - } - this[handlerSymbol].set(name, handlerWrapper); - }, - configurable: true, - enumerable: true, - }); - } - defineEventHandler(window, "load"); - defineEventHandler(window, "unload"); + defineEventHandler(window, "load", null); + defineEventHandler(window, "unload", null); const { args, noColor, pid, ppid, unstableFlag } = runtimeStart(); diff --git a/cli/tests/034_onload/main.ts b/cli/tests/034_onload/main.ts index db6ca669a3019e..aca38869e1bfce 100644 --- a/cli/tests/034_onload/main.ts +++ b/cli/tests/034_onload/main.ts @@ -1,6 +1,9 @@ import { assert } from "../../../std/testing/asserts.ts"; import "./imported.ts"; +assert(window.hasOwnProperty("onload")); +assert(window.onload === null); + const eventHandler = (e: Event): void => { assert(!e.cancelable); console.log(`got ${e.type} event in event handler (main)`); diff --git a/docs/runtime/program_lifecycle.md b/docs/runtime/program_lifecycle.md index f0ebbd448702a1..72e21c4f4b9216 100644 --- a/docs/runtime/program_lifecycle.md +++ b/docs/runtime/program_lifecycle.md @@ -75,4 +75,5 @@ defined in `imported.ts`. In other words, you can register multiple `window.addEventListener` `"load"` or `"unload"` events, but only the last loaded `window.onload` or `window.onunload` -events will be executed. +event handlers will be executed. It is preferable to use `addEventListener` when +possible for this reason. From 5eedcb6b8d471e487179ac66d7da9038279884df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 7 Dec 2020 23:27:43 +0100 Subject: [PATCH 021/135] chore(cli): unhide lsp command from CLI (#8647) --- cli/flags.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/cli/flags.rs b/cli/flags.rs index 2210d756560e85..be93015f56d563 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -1086,7 +1086,6 @@ Show documentation for runtime built-ins: fn language_server_subcommand<'a, 'b>() -> App<'a, 'b> { SubCommand::with_name("lsp") - .setting(AppSettings::Hidden) .about("Start the language server") .long_about( r#"Start the Deno language server which will take input From 02762824e62ad1473f9e94fa0db622cbee643251 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 8 Dec 2020 00:36:15 +0100 Subject: [PATCH 022/135] refactor(core): change script name for core.js (#8650) Co-authored-by: Nayeem Rahman --- core/runtime.rs | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/core/runtime.rs b/core/runtime.rs index c03ee9d6fcf9dc..ecac588ca9e80c 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -326,7 +326,9 @@ impl JsRuntime { fn shared_init(&mut self) { if self.needs_init { self.needs_init = false; - self.execute("core.js", include_str!("core.js")).unwrap(); + self + .execute("deno:core/core.js", include_str!("core.js")) + .unwrap(); } } @@ -2584,4 +2586,19 @@ main(); }; }) } + + #[test] + fn test_core_js_stack_frame() { + let mut runtime = JsRuntime::new(RuntimeOptions::default()); + // Call non-existent op so we get error from `core.js` + let error = runtime + .execute( + "core_js_stack_frame.js", + "Deno.core.dispatchByName('non_existent');", + ) + .unwrap_err(); + let error_string = error.to_string(); + // Test that the script specifier is a URL: `deno:`. + assert!(error_string.contains("deno:core/core.js")); + } } From 4e025fd1640d192898792fc99e0d345279823899 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 7 Dec 2020 20:12:18 -0500 Subject: [PATCH 023/135] Upgrade ring to support arm64 (#8658) --- Cargo.lock | 8 ++++---- cli/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 224d0f322b73fe..06b3c05e281afd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -256,9 +256,9 @@ checksum = "5ba7d7f7b201dfcbc314b14f2176c92f8ba521dab538b40e426ffed25ed7cd80" [[package]] name = "cc" -version = "1.0.59" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66120af515773fb005778dc07c261bd201ec8ce50bd6e7144c927753fe013381" +checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48" [[package]] name = "cfg-if" @@ -2159,9 +2159,9 @@ checksum = "e005d658ad26eacc2b6c506dfde519f4e277e328d0eb3379ca61647d70a8f531" [[package]] name = "ring" -version = "0.16.15" +version = "0.16.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "952cd6b98c85bbc30efa1ba5783b8abf12fec8b3287ffa52605b9432313e34e4" +checksum = "024a1e66fea74c66c66624ee5622a7ff0e4b73a13b4f5c326ddb50c708944226" dependencies = [ "cc", "libc", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 95ffac7fed4ad0..60e0962d00e901 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -62,7 +62,7 @@ lsp-types = { version = "0.84.0", features = ["proposed"] } notify = "5.0.0-pre.3" percent-encoding = "2.1.0" regex = "1.3.9" -ring = "0.16.15" +ring = "0.16.19" rustyline = { version = "7.0.0", default-features = false } rustyline-derive = "0.4.0" semver-parser = "0.9.0" From 8bf3e0f4c6980c0f3ed8b618062657b017a0f609 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 7 Dec 2020 21:20:01 -0500 Subject: [PATCH 024/135] upgrade rusty_v8 to 0.14.0 (#8663) --- Cargo.lock | 4 ++-- core/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 06b3c05e281afd..d2439244763cf6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2196,9 +2196,9 @@ dependencies = [ [[package]] name = "rusty_v8" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf06ea79ead18b3f81707b78e6d7813e83c772d0cb4b43caf3fa35a5df7eaab" +checksum = "6bdaa4aeeae3253c3b34486af66527d9982105d79ba57cf9b50b217e7b47a8b6" dependencies = [ "bitflags", "cargo_gn", diff --git a/core/Cargo.toml b/core/Cargo.toml index ed1f71c75d493e..416bbc82da6370 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -19,7 +19,7 @@ indexmap = "1.6.0" lazy_static = "1.4.0" libc = "0.2.77" log = "0.4.11" -rusty_v8 = "0.13.0" +rusty_v8 = "0.14.0" serde_json = { version = "1.0", features = ["preserve_order"] } serde = { version = "1.0", features = ["derive"] } smallvec = "1.4.2" From e94a18240e5b6312358f787c19dffd3006300a4b Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Tue, 8 Dec 2020 11:36:13 +0100 Subject: [PATCH 025/135] feat(lsp): basic support for textDocument/completion (#8651) --- cli/lsp/capabilities.rs | 19 +++- cli/lsp/handlers.rs | 32 ++++++ cli/lsp/mod.rs | 1 + cli/lsp/tsc.rs | 219 +++++++++++++++++++++++++++++++++++- cli/tsc/99_main_compiler.js | 10 ++ cli/tsc/compiler.d.ts | 10 +- 6 files changed, 287 insertions(+), 4 deletions(-) diff --git a/cli/lsp/capabilities.rs b/cli/lsp/capabilities.rs index cf8f150cac6e43..954baaf51b5f09 100644 --- a/cli/lsp/capabilities.rs +++ b/cli/lsp/capabilities.rs @@ -6,6 +6,7 @@ ///! client. ///! use lsp_types::ClientCapabilities; +use lsp_types::CompletionOptions; use lsp_types::HoverProviderCapability; use lsp_types::OneOf; use lsp_types::SaveOptions; @@ -13,6 +14,7 @@ use lsp_types::ServerCapabilities; use lsp_types::TextDocumentSyncCapability; use lsp_types::TextDocumentSyncKind; use lsp_types::TextDocumentSyncOptions; +use lsp_types::WorkDoneProgressOptions; pub fn server_capabilities( _client_capabilities: &ClientCapabilities, @@ -28,7 +30,22 @@ pub fn server_capabilities( }, )), hover_provider: Some(HoverProviderCapability::Simple(true)), - completion_provider: None, + completion_provider: Some(CompletionOptions { + trigger_characters: Some(vec![ + ".".to_string(), + "\"".to_string(), + "'".to_string(), + "`".to_string(), + "/".to_string(), + "@".to_string(), + "<".to_string(), + "#".to_string(), + ]), + resolve_provider: None, + work_done_progress_options: WorkDoneProgressOptions { + work_done_progress: None, + }, + }), signature_help_provider: None, declaration_provider: None, definition_provider: Some(OneOf::Left(true)), diff --git a/cli/lsp/handlers.rs b/cli/lsp/handlers.rs index 6dd7321c795653..ccda69f7d5a058 100644 --- a/cli/lsp/handlers.rs +++ b/cli/lsp/handlers.rs @@ -12,6 +12,8 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::ModuleSpecifier; use dprint_plugin_typescript as dprint; +use lsp_types::CompletionParams; +use lsp_types::CompletionResponse; use lsp_types::DocumentFormattingParams; use lsp_types::DocumentHighlight; use lsp_types::DocumentHighlightParams; @@ -187,6 +189,36 @@ pub fn handle_hover( } } +pub fn handle_completion( + state: &mut ServerState, + params: CompletionParams, +) -> Result, AnyError> { + let specifier = + utils::normalize_url(params.text_document_position.text_document.uri); + let line_index = get_line_index(state, &specifier)?; + let server_state = state.snapshot(); + let maybe_completion_info: Option = + serde_json::from_value(tsc::request( + &mut state.ts_runtime, + &server_state, + tsc::RequestMethod::GetCompletions(( + specifier, + text::to_char_pos(&line_index, params.text_document_position.position), + tsc::UserPreferences { + // TODO(lucacasonato): enable this. see https://github.com/denoland/deno/pull/8651 + include_completions_with_insert_text: Some(false), + ..Default::default() + }, + )), + )?)?; + + if let Some(completions) = maybe_completion_info { + Ok(Some(completions.into_completion_response(&line_index))) + } else { + Ok(None) + } +} + pub fn handle_references( state: &mut ServerState, params: ReferenceParams, diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index c26c5d89e08e36..e3092d81520738 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -382,6 +382,7 @@ impl ServerState { handlers::handle_goto_definition, )? .on_sync::(handlers::handle_hover)? + .on_sync::(handlers::handle_completion)? .on_sync::(handlers::handle_references)? .on::(handlers::handle_formatting) .on::( diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 65f6ebbdb39957..6ed727d9fde38e 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -14,6 +14,7 @@ use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::json_op_sync; use deno_core::serde::Deserialize; +use deno_core::serde::Serialize; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; @@ -229,7 +230,7 @@ fn replace_links(text: &str) -> String { .to_string() } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Deserialize)] pub enum ScriptElementKind { #[serde(rename = "")] Unknown, @@ -301,7 +302,47 @@ pub enum ScriptElementKind { String, } -#[derive(Debug, Deserialize)] +impl From for lsp_types::CompletionItemKind { + fn from(kind: ScriptElementKind) -> Self { + use lsp_types::CompletionItemKind; + + match kind { + ScriptElementKind::PrimitiveType | ScriptElementKind::Keyword => { + CompletionItemKind::Keyword + } + ScriptElementKind::ConstElement => CompletionItemKind::Constant, + ScriptElementKind::LetElement + | ScriptElementKind::VariableElement + | ScriptElementKind::LocalVariableElement + | ScriptElementKind::Alias => CompletionItemKind::Variable, + ScriptElementKind::MemberVariableElement + | ScriptElementKind::MemberGetAccessorElement + | ScriptElementKind::MemberSetAccessorElement => { + CompletionItemKind::Field + } + ScriptElementKind::FunctionElement => CompletionItemKind::Function, + ScriptElementKind::MemberFunctionElement + | ScriptElementKind::ConstructSignatureElement + | ScriptElementKind::CallSignatureElement + | ScriptElementKind::IndexSignatureElement => CompletionItemKind::Method, + ScriptElementKind::EnumElement => CompletionItemKind::Enum, + ScriptElementKind::ModuleElement + | ScriptElementKind::ExternalModuleName => CompletionItemKind::Module, + ScriptElementKind::ClassElement | ScriptElementKind::TypeElement => { + CompletionItemKind::Class + } + ScriptElementKind::InterfaceElement => CompletionItemKind::Interface, + ScriptElementKind::Warning | ScriptElementKind::ScriptElement => { + CompletionItemKind::File + } + ScriptElementKind::Directory => CompletionItemKind::Folder, + ScriptElementKind::String => CompletionItemKind::Constant, + _ => CompletionItemKind::Property, + } + } +} + +#[derive(Debug, Clone, Deserialize)] #[serde(rename_all = "camelCase")] pub struct TextSpan { start: u32, @@ -519,6 +560,104 @@ impl ReferenceEntry { } } +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompletionInfo { + entries: Vec, + is_member_completion: bool, +} + +impl CompletionInfo { + pub fn into_completion_response( + self, + line_index: &[u32], + ) -> lsp_types::CompletionResponse { + let items = self + .entries + .into_iter() + .map(|entry| entry.into_completion_item(line_index)) + .collect(); + lsp_types::CompletionResponse::Array(items) + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompletionEntry { + kind: ScriptElementKind, + kind_modifiers: Option, + name: String, + sort_text: String, + insert_text: Option, + replacement_span: Option, + has_action: Option, + source: Option, + is_recommended: Option, +} + +impl CompletionEntry { + pub fn into_completion_item( + self, + line_index: &[u32], + ) -> lsp_types::CompletionItem { + let mut item = lsp_types::CompletionItem { + label: self.name, + kind: Some(self.kind.into()), + sort_text: Some(self.sort_text.clone()), + // TODO(lucacasonato): missing commit_characters + ..Default::default() + }; + + if let Some(true) = self.is_recommended { + // Make sure isRecommended property always comes first + // https://github.com/Microsoft/vscode/issues/40325 + item.preselect = Some(true); + } else if self.source.is_some() { + // De-prioritze auto-imports + // https://github.com/Microsoft/vscode/issues/40311 + item.sort_text = Some("\u{ffff}".to_string() + &self.sort_text) + } + + match item.kind { + Some(lsp_types::CompletionItemKind::Function) + | Some(lsp_types::CompletionItemKind::Method) => { + item.insert_text_format = Some(lsp_types::InsertTextFormat::Snippet); + } + _ => {} + } + + let mut insert_text = self.insert_text; + let replacement_range: Option = + self.replacement_span.map(|span| span.to_range(line_index)); + + // TODO(lucacasonato): port other special cases from https://github.com/theia-ide/typescript-language-server/blob/fdf28313833cd6216d00eb4e04dc7f00f4c04f09/server/src/completion.ts#L49-L55 + + if let Some(kind_modifiers) = self.kind_modifiers { + if kind_modifiers.contains("\\optional\\") { + if insert_text.is_none() { + insert_text = Some(item.label.clone()); + } + if item.filter_text.is_none() { + item.filter_text = Some(item.label.clone()); + } + item.label += "?"; + } + } + + if let Some(insert_text) = insert_text { + if let Some(replacement_range) = replacement_range { + item.text_edit = Some(lsp_types::CompletionTextEdit::Edit( + lsp_types::TextEdit::new(replacement_range, insert_text), + )); + } else { + item.insert_text = Some(insert_text); + } + } + + item + } +} + #[derive(Debug, Clone, Deserialize)] struct Response { id: usize, @@ -815,6 +954,71 @@ pub fn start(debug: bool) -> Result { Ok(runtime) } +#[derive(Debug, Serialize)] +#[serde(rename_all = "kebab-case")] +#[allow(dead_code)] +pub enum QuotePreference { + Auto, + Double, + Single, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "kebab-case")] +#[allow(dead_code)] +pub enum ImportModuleSpecifierPreference { + Auto, + Relative, + NonRelative, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "kebab-case")] +#[allow(dead_code)] +pub enum ImportModuleSpecifierEnding { + Auto, + Minimal, + Index, + Js, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "kebab-case")] +#[allow(dead_code)] +pub enum IncludePackageJsonAutoImports { + Auto, + On, + Off, +} + +#[derive(Debug, Default, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UserPreferences { + #[serde(skip_serializing_if = "Option::is_none")] + pub disable_suggestions: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub quote_preference: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_completions_for_module_exports: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_automatic_optional_chain_completions: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_completions_with_insert_text: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub import_module_specifier_preference: + Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub import_module_specifier_ending: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub allow_text_changes_in_new_files: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub provide_prefix_and_suffix_text_for_rename: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_package_json_auto_imports: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub provide_refactor_not_applicable_reason: Option, +} + /// Methods that are supported by the Language Service in the compiler isolate. pub enum RequestMethod { /// Configure the compilation settings for the server. @@ -833,6 +1037,8 @@ pub enum RequestMethod { GetReferences((ModuleSpecifier, u32)), /// Get declaration information for a specific position. GetDefinition((ModuleSpecifier, u32)), + /// Get completion information at a given position (IntelliSense). + GetCompletions((ModuleSpecifier, u32, UserPreferences)), } impl RequestMethod { @@ -887,6 +1093,15 @@ impl RequestMethod { "specifier": specifier, "position": position, }), + RequestMethod::GetCompletions((specifier, position, preferences)) => { + json!({ + "id": id, + "method": "getCompletions", + "specifier": specifier, + "position": position, + "preferences": preferences, + }) + } } } } diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index f379d6baeba90a..a78b85203ec328 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -514,6 +514,16 @@ delete Object.prototype.__proto__; ), ); } + case "getCompletions": { + return respond( + id, + languageService.getCompletionsAtPosition( + request.specifier, + request.position, + request.preferences, + ), + ); + } case "getDocumentHighlights": { return respond( id, diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts index 1a899c291a3c64..a1f4e851cb3982 100644 --- a/cli/tsc/compiler.d.ts +++ b/cli/tsc/compiler.d.ts @@ -48,7 +48,8 @@ declare global { | GetQuickInfoRequest | GetDocumentHighlightsRequest | GetReferencesRequest - | GetDefinitionRequest; + | GetDefinitionRequest + | GetCompletionsRequest; interface BaseLanguageServerRequest { id: number; @@ -100,4 +101,11 @@ declare global { specifier: string; position: number; } + + interface GetCompletionsRequest extends BaseLanguageServerRequest { + method: "getCompletions"; + specifier: string; + position: number; + preferences: ts.UserPreferences; + } } From 656caa2d4fb7b967865a9b296388b312d31b6100 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 8 Dec 2020 13:54:19 +0100 Subject: [PATCH 026/135] chore: release crates (#8662) --- Cargo.lock | 8 ++++---- cli/Cargo.toml | 16 ++++++++-------- core/Cargo.toml | 2 +- op_crates/crypto/Cargo.toml | 4 ++-- op_crates/fetch/Cargo.toml | 4 ++-- op_crates/web/Cargo.toml | 4 ++-- 6 files changed, 19 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d2439244763cf6..14fa655f196cc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -514,7 +514,7 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.69.0" +version = "0.70.0" dependencies = [ "anyhow", "futures", @@ -532,7 +532,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.3.0" +version = "0.4.0" dependencies = [ "deno_core", "rand 0.7.3", @@ -556,7 +556,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.12.0" +version = "0.13.0" dependencies = [ "deno_core", "reqwest", @@ -583,7 +583,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.20.0" +version = "0.21.0" dependencies = [ "deno_core", "futures", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 60e0962d00e901..5666d981e93146 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,10 +20,10 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_crypto = { path = "../op_crates/crypto", version = "0.3.0" } -deno_core = { path = "../core", version = "0.69.0" } -deno_web = { path = "../op_crates/web", version = "0.20.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.12.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } +deno_core = { path = "../core", version = "0.70.0" } +deno_web = { path = "../op_crates/web", version = "0.21.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -32,12 +32,12 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.69.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.3.0" } +deno_core = { path = "../core", version = "0.70.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } deno_doc = "0.1.17" -deno_fetch = { path = "../op_crates/fetch", version = "0.12.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } deno_lint = "0.2.12" -deno_web = { path = "../op_crates/web", version = "0.20.0" } +deno_web = { path = "../op_crates/web", version = "0.21.0" } atty = "0.2.14" base64 = "0.12.3" diff --git a/core/Cargo.toml b/core/Cargo.toml index 416bbc82da6370..367def1f2f5565 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. [package] name = "deno_core" -version = "0.69.0" +version = "0.70.0" edition = "2018" description = "A secure JavaScript/TypeScript runtime built with V8, Rust, and Tokio" authors = ["the Deno authors"] diff --git a/op_crates/crypto/Cargo.toml b/op_crates/crypto/Cargo.toml index 0ca0601a25c14c..64c057afbf8783 100644 --- a/op_crates/crypto/Cargo.toml +++ b/op_crates/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.3.0" +version = "0.4.0" edition = "2018" description = "Collection of WebCrypto APIs" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.69.0", path = "../../core" } +deno_core = { version = "0.70.0", path = "../../core" } rand = "0.7.3" diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml index 0d829d82d16c49..49987cb4a8e4aa 100644 --- a/op_crates/fetch/Cargo.toml +++ b/op_crates/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.12.0" +version = "0.13.0" edition = "2018" description = "provides fetch Web API to deno_core" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.69.0", path = "../../core" } +deno_core = { version = "0.70.0", path = "../../core" } reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } serde = { version = "1.0.116", features = ["derive"] } diff --git a/op_crates/web/Cargo.toml b/op_crates/web/Cargo.toml index 9e03cac7b889e7..560396a0cb4db1 100644 --- a/op_crates/web/Cargo.toml +++ b/op_crates/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.20.0" +version = "0.21.0" edition = "2018" description = "Collection of Web APIs" authors = ["the Deno authors"] @@ -14,7 +14,7 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.69.0", path = "../../core" } +deno_core = { version = "0.70.0", path = "../../core" } idna = "0.2.0" serde = { version = "1.0.116", features = ["derive"] } From df87bf1d6a43ca36b520b45913839fd1f16df595 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 8 Dec 2020 15:37:45 +0100 Subject: [PATCH 027/135] v1.6.0 --- Cargo.lock | 2 +- Releases.md | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ cli/Cargo.toml | 2 +- std/version.ts | 2 +- 4 files changed, 53 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 14fa655f196cc2..21bf5f14690409 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -450,7 +450,7 @@ dependencies = [ [[package]] name = "deno" -version = "1.5.4" +version = "1.6.0" dependencies = [ "atty", "base64 0.12.3", diff --git a/Releases.md b/Releases.md index 6b2684f4bf7a9b..40549d9d3cb653 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,56 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.6.0 / 2020.12.08 + +- BREAKING: Make "isolatedModules" setting non-configurable (#8482) +- feat: Add mvp language server (#8515, #8651) +- feat: deno compile (#8539, #8563, #8581) +- feat: Update to TypeScript 4.1 (#7573) +- feat: EventTarget signal support (#8616) +- feat: Add canary support to upgrade subcommand (#8476) +- feat(unstable): Add cbreak option to Deno.setRaw (#8383) +- fix: "onload" event order (#8376) +- fix: Add file URL support for Deno.readLink (#8423) +- fix: Add hygiene pass to transpile pipeline (#8586) +- fix: Require allow-write permissions for unixpackets datagrams & unix socket + (#8511) +- fix: Highlight `async` and `of` in REPL (#8569) +- fix: Make output of deno info --json deterministic (#8483) +- fix: Panic in worker when closing at top level (#8510) +- fix: Support passing cli arguments under `deno eval` (#8547) +- fix: `redirect: "manual"` fetch should return `type: "default"` response + (#8353) +- fix: close() calls sometimes prints results in REPL (#8558) +- fix: watcher doesn't exit when module resolution fails (#8521) +- fix: Fix PermissionDenied error being caught in Websocket constructor (#8402) +- fix: Set User-Agent header in Websocket (#8502, #8470) +- perf: Use minimal op with performance.now() (#8619) +- core: Implement new ResourceTable (#8273) +- core: Add FsModuleLoader that supports loading from filesystem (#8523) +- upgrade rusty_v8 to 0.14.0 (#8663) +- upgrade: deno_doc, deno_lint, dprint, swc (#8552, #8575, #8588) + +Changes in std version 0.80.0: + +- BREAKING(std/bytes): Adjust APIs based on std-wg discussion (#8612) +- feat(std/encoding/csv): Add stringify functionality (#8408) +- feat(std/fs): Re-enable `followSymlinks` on `walk()` (#8479) +- feat(std/http): Add Cookie value validation (#8471) +- feat(std/node): Add "setImmediate" and "clearImmediate" to global scope + (#8566) +- feat(std/node): Port most of node errors (#7934) +- feat(std/node/stream): Add Duplex, Transform, Passthrough, pipeline, finished + and promises (#7940) +- feat(std/wasi): Add return on exit option (#8605) +- feat(std/wasi): Add support for initializing reactors (#8603) +- feat(std/ws): protocol & version support (#8505) +- fix(std/bufio): Remove '\r' at the end of Windows lines (#8447) +- fix(std/encoding): Rewrite toml parser not to use eval() (#8624) +- fix(std/encoding/csv): Correct readme formatting due to dprint issues (#8503) +- fix(std/http): Prevent path traversal (#8474) +- fix(std/node): Inline default objects to ensure correct prototype (#8513) + ### 1.5.4 / 2020.11.23 - feat(unstable): Add deno test --no-run (#8093) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 5666d981e93146..125c422846fbd9 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.5.4" +version = "1.6.0" license = "MIT" authors = ["the Deno authors"] edition = "2018" diff --git a/std/version.ts b/std/version.ts index 0be67f9fc43060..f9e88e58a04755 100644 --- a/std/version.ts +++ b/std/version.ts @@ -5,4 +5,4 @@ * the cli's API is stable. In the future when std becomes stable, likely we * will match versions with cli as we have in the past. */ -export const VERSION = "0.79.0"; +export const VERSION = "0.80.0"; From 9bff85836cf4052ef9059d62c0bdd164311a270b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 8 Dec 2020 16:33:50 +0100 Subject: [PATCH 028/135] fix: pull .d.ts files from js mod (#8671) --- cli/lsp/tsc.rs | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 6ed727d9fde38e..86c9a298083318 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -56,12 +56,8 @@ pub fn get_asset(asset: &str) -> Option<&'static str> { "/lib.webworker.iterable.d.ts" => inc!("lib.webworker.iterable.d.ts"), // These come from op crates // TODO(@kitsonk) these is even hackier than the rest of this... - "/lib.deno.web.d.ts" => { - Some(include_str!("../../op_crates/web/lib.deno_web.d.ts")) - } - "/lib.deno.fetch.d.ts" => { - Some(include_str!("../../op_crates/fetch/lib.deno_fetch.d.ts")) - } + "/lib.deno.web.d.ts" => Some(js::DENO_WEB_LIB), + "/lib.deno.fetch.d.ts" => Some(js::DENO_FETCH_LIB), // These are included in the snapshot for TypeScript, and could be retrieved // from there? "/lib.d.ts" => inc!("lib.d.ts"), From f15b3d84a562f07581fd350488b2c22dff0ed528 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Tue, 8 Dec 2020 20:29:00 -0500 Subject: [PATCH 029/135] Remove dead code: itest_ignore (#8668) --- cli/tests/integration_tests.rs | 45 +++++++++++----------------------- 1 file changed, 14 insertions(+), 31 deletions(-) diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index cb1bb27a3489b5..c81152ec6435c9 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -7,6 +7,20 @@ use std::process::Command; use tempfile::TempDir; use test_util as util; +macro_rules! itest( + ($name:ident {$( $key:ident: $value:expr,)*}) => { + #[test] + fn $name() { + (util::CheckOutputIntegrationTest { + $( + $key: $value, + )* + .. Default::default() + }).run() + } + } +); + #[test] fn std_tests() { let dir = TempDir::new().expect("tempdir fail"); @@ -2066,37 +2080,6 @@ fn deno_test_no_color() { assert!(out.contains("test result: FAILED. 1 passed; 1 failed; 1 ignored; 0 measured; 0 filtered out")); } -macro_rules! itest( - ($name:ident {$( $key:ident: $value:expr,)*}) => { - #[test] - fn $name() { - (util::CheckOutputIntegrationTest { - $( - $key: $value, - )* - .. Default::default() - }).run() - } - } -); - -// Unfortunately #[ignore] doesn't work with itest! -#[allow(unused)] -macro_rules! itest_ignore( - ($name:ident {$( $key:ident: $value:expr,)*}) => { - #[ignore] - #[test] - fn $name() { - (util::CheckOutputIntegrationTest { - $( - $key: $value, - )* - .. Default::default() - }).run() - } - } -); - itest!(_001_hello { args: "run --reload 001_hello.js", output: "001_hello.js.out", From b1379b7de3045000c1f4fd76a503b4e639946348 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 9 Dec 2020 15:55:05 +0100 Subject: [PATCH 030/135] test(core): type aliases in OpState (#8653) This commit adds a test case to core/gotham_state.rs that shows that type aliases can't be used reliably. Instead wrapper types should be used. --- core/gotham_state.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/core/gotham_state.rs b/core/gotham_state.rs index dfa3b3ea7cad51..e42664223893ef 100644 --- a/core/gotham_state.rs +++ b/core/gotham_state.rs @@ -94,6 +94,9 @@ mod tests { value: &'static str, } + type Alias1 = String; + type Alias2 = String; + #[test] fn put_borrow1() { let mut state = GothamState::default(); @@ -165,4 +168,14 @@ mod tests { assert!(state.try_borrow_mut::().is_none()); assert!(state.try_borrow::().is_none()); } + + #[test] + fn type_alias() { + let mut state = GothamState::default(); + state.put::("alias1".to_string()); + state.put::("alias2".to_string()); + assert_eq!(state.take::(), "alias2"); + assert!(state.try_take::().is_none()); + assert!(state.try_take::().is_none()); + } } From b200e6fc3e591f67646832adb9bbf129ee2b2761 Mon Sep 17 00:00:00 2001 From: Bert Belder Date: Thu, 3 Dec 2020 23:52:55 +0100 Subject: [PATCH 031/135] core: add plumbing for canceling ops when closing a resource (#8661) --- Cargo.lock | 7 +- core/Cargo.toml | 3 +- core/async_cancel.rs | 710 +++++++++++++++++++++++++++ core/async_cell.rs | 74 +-- core/examples/http_bench_bin_ops.rs | 91 ++-- core/examples/http_bench_json_ops.rs | 91 ++-- core/lib.rs | 8 + core/resources2.rs | 7 +- 8 files changed, 876 insertions(+), 115 deletions(-) create mode 100644 core/async_cancel.rs diff --git a/Cargo.lock b/Cargo.lock index 21bf5f14690409..3d885d3615f9ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -522,11 +522,12 @@ dependencies = [ "lazy_static", "libc", "log", + "pin-project 1.0.2", "rusty_v8", "serde", "serde_json", "smallvec", - "tokio 0.3.4", + "tokio 0.3.5", "url", ] @@ -2944,9 +2945,9 @@ dependencies = [ [[package]] name = "tokio" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dfe2523e6fa84ddf5e688151d4e5fddc51678de9752c6512a24714c23818d61" +checksum = "a12a3eb39ee2c231be64487f1fcbe726c8f2514876a55480a5ab8559fc374252" dependencies = [ "autocfg 1.0.1", "bytes 0.6.0", diff --git a/core/Cargo.toml b/core/Cargo.toml index 367def1f2f5565..21cc5f1f7b8618 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -24,6 +24,7 @@ serde_json = { version = "1.0", features = ["preserve_order"] } serde = { version = "1.0", features = ["derive"] } smallvec = "1.4.2" url = { version = "2.1.1", features = ["serde"] } +pin-project = "1.0.2" [[example]] name = "http_bench_bin_ops" @@ -35,4 +36,4 @@ path = "examples/http_bench_json_ops.rs" # These dependendencies are only used for the 'http_bench_*_ops' examples. [dev-dependencies] -tokio = { version = "0.3.4", features = ["full"] } +tokio = { version = "0.3.5", features = ["full"] } diff --git a/core/async_cancel.rs b/core/async_cancel.rs new file mode 100644 index 00000000000000..90cb0c41ff56de --- /dev/null +++ b/core/async_cancel.rs @@ -0,0 +1,710 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use crate::RcLike; +use futures::future::FusedFuture; +use futures::future::Future; +use futures::future::TryFuture; +use futures::task::Context; +use futures::task::Poll; +use pin_project::pin_project; +use std::any::type_name; +use std::error::Error; +use std::fmt; +use std::fmt::Display; +use std::fmt::Formatter; +use std::io; +use std::pin::Pin; +use std::rc::Rc; + +use self::internal as i; + +#[derive(Debug, Default)] +pub struct CancelHandle { + node: i::Node, +} + +impl CancelHandle { + pub fn new() -> Self { + Default::default() + } + + pub fn new_rc() -> Rc { + Rc::new(Self::new()) + } + + /// Cancel all cancelable futures that are bound to this handle. Note that + /// this method does not require a mutable reference to the `CancelHandle`. + pub fn cancel(&self) { + self.node.cancel(); + } + + pub fn is_canceled(&self) -> bool { + self.node.is_canceled() + } +} + +#[pin_project(project = CancelableProjection)] +#[derive(Debug)] +pub enum Cancelable { + Pending { + #[pin] + future: F, + #[pin] + registration: i::Registration, + }, + Terminated, +} + +impl Future for Cancelable { + type Output = Result; + + fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll { + let poll_result = match self.as_mut().project() { + CancelableProjection::Pending { + future, + registration, + } => Self::poll_pending(future, registration, cx), + CancelableProjection::Terminated => { + panic!("{}::poll() called after completion", type_name::()) + } + }; + // Fuse: if this Future is completed or canceled, make sure the inner + // `future` and `registration` fields are dropped in order to unlink it from + // its cancel handle. + if matches!(poll_result, Poll::Ready(_)) { + self.set(Cancelable::Terminated) + } + poll_result + } +} + +impl FusedFuture for Cancelable { + fn is_terminated(&self) -> bool { + matches!(self, Self::Terminated) + } +} + +#[pin_project(project = TryCancelableProjection)] +#[derive(Debug)] +pub struct TryCancelable { + #[pin] + inner: Cancelable, +} + +impl Future for TryCancelable +where + F: Future>, + Canceled: Into, +{ + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll { + let TryCancelableProjection { inner } = self.project(); + match inner.poll(cx) { + Poll::Pending => Poll::Pending, + Poll::Ready(Ok(result)) => Poll::Ready(result), + Poll::Ready(Err(err)) => Poll::Ready(Err(err.into())), + } + } +} + +impl FusedFuture for TryCancelable +where + F: Future>, + Canceled: Into, +{ + fn is_terminated(&self) -> bool { + self.inner.is_terminated() + } +} + +pub trait CancelFuture +where + Self: Future + Sized, +{ + fn or_cancel>( + self, + cancel_handle: H, + ) -> Cancelable { + Cancelable::new(self, cancel_handle.into()) + } +} + +impl CancelFuture for F where F: Future {} + +pub trait CancelTryFuture +where + Self: TryFuture + Sized, + Canceled: Into, +{ + fn try_or_cancel>( + self, + cancel_handle: H, + ) -> TryCancelable { + TryCancelable::new(self, cancel_handle.into()) + } +} + +impl CancelTryFuture for F +where + F: TryFuture, + Canceled: Into, +{ +} + +#[derive(Copy, Clone, Default, Debug, Eq, Hash, PartialEq)] +pub struct Canceled; + +impl Display for Canceled { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + write!(f, "operation canceled") + } +} + +impl Error for Canceled {} + +impl From for io::Error { + fn from(_: Canceled) -> Self { + io::Error::new(io::ErrorKind::Interrupted, Canceled) + } +} + +mod internal { + use super::CancelHandle; + use super::Cancelable; + use super::Canceled; + use super::TryCancelable; + use crate::RcRef; + use futures::future::Future; + use futures::task::Context; + use futures::task::Poll; + use futures::task::Waker; + use pin_project::pin_project; + use std::any::Any; + use std::cell::UnsafeCell; + use std::marker::PhantomPinned; + use std::mem::replace; + use std::pin::Pin; + use std::ptr::NonNull; + use std::rc::Rc; + use std::rc::Weak; + + impl Cancelable { + pub(super) fn new(future: F, cancel_handle: RcRef) -> Self { + let head_node = RcRef::map(cancel_handle, |r| &r.node); + let registration = Registration::WillRegister { head_node }; + Self::Pending { + future, + registration, + } + } + + pub(super) fn poll_pending( + future: Pin<&mut F>, + mut registration: Pin<&mut Registration>, + cx: &mut Context, + ) -> Poll> { + // If this future is being polled for the first time, perform an extra + // cancellation check _before_ polling the inner future. The reason to do + // this is that polling the inner future for the first time might start + // some activity that cannot actually be canceled (e.g. running a compute + // job in a thread pool), so we should try to never start it at all. + match &*registration { + Registration::WillRegister { head_node } if head_node.is_canceled() => { + return Poll::Ready(Err(Canceled)); + } + _ => {} + } + + match future.poll(cx) { + Poll::Ready(res) => return Poll::Ready(Ok(res)), + Poll::Pending => {} + } + + // Register this future with its `CancelHandle`, saving the `Waker` that + // can be used to make the runtime poll this future when it is canceled. + // When already registered, update the stored `Waker` if necessary. + let head_node = match &*registration { + Registration::WillRegister { .. } => { + match registration.as_mut().project_replace(Default::default()) { + RegistrationProjectionOwned::WillRegister { head_node } => { + Some(head_node) + } + _ => unreachable!(), + } + } + _ => None, + }; + let node = match registration.project() { + RegistrationProjection::Registered { node } => node, + _ => unreachable!(), + }; + node.register(cx.waker(), head_node)?; + + Poll::Pending + } + } + + impl TryCancelable { + pub(super) fn new(future: F, cancel_handle: RcRef) -> Self { + Self { + inner: Cancelable::new(future, cancel_handle), + } + } + } + + #[pin_project(project = RegistrationProjection, + project_replace = RegistrationProjectionOwned)] + #[derive(Debug)] + pub enum Registration { + WillRegister { + head_node: RcRef, + }, + Registered { + #[pin] + node: Node, + }, + } + + impl Default for Registration { + fn default() -> Self { + Self::Registered { + node: Default::default(), + } + } + } + + #[derive(Debug)] + pub struct Node { + inner: UnsafeCell, + _pin: PhantomPinned, + } + + impl Node { + /// If necessary, register a `Cancelable` node with a `CancelHandle`, and + /// save or update the `Waker` that can wake with this cancelable future. + pub fn register( + &self, + waker: &Waker, + head_rc: Option>, + ) -> Result<(), Canceled> { + match head_rc.as_ref().map(RcRef::split) { + Some((head, rc)) => { + // Register this `Cancelable` node with a `CancelHandle` head node. + assert_ne!(self, head); + let self_inner = unsafe { &mut *self.inner.get() }; + let head_inner = unsafe { &mut *head.inner.get() }; + self_inner.link(waker, head_inner, rc) + } + None => { + // This `Cancelable` has already been linked to a `CancelHandle` head + // node; just update our stored `Waker` if necessary. + let inner = unsafe { &mut *self.inner.get() }; + inner.update_waker(waker) + } + } + } + + pub fn cancel(&self) { + let inner = unsafe { &mut *self.inner.get() }; + inner.cancel(); + } + + pub fn is_canceled(&self) -> bool { + let inner = unsafe { &mut *self.inner.get() }; + inner.is_canceled() + } + } + + impl Default for Node { + fn default() -> Self { + Self { + inner: UnsafeCell::new(NodeInner::Unlinked), + _pin: PhantomPinned, + } + } + } + + impl Drop for Node { + fn drop(&mut self) { + let inner = unsafe { &mut *self.inner.get() }; + inner.unlink(); + } + } + + impl PartialEq for Node { + fn eq(&self, other: &Self) -> bool { + self as *const _ == other as *const _ + } + } + + #[derive(Debug)] + enum NodeInner { + Unlinked, + Linked { + kind: NodeKind, + prev: NonNull, + next: NonNull, + }, + Canceled, + } + + impl NodeInner { + fn as_non_null(&mut self) -> NonNull { + NonNull::from(self) + } + + fn link( + &mut self, + waker: &Waker, + head: &mut Self, + rc_pin: &Rc, + ) -> Result<(), Canceled> { + // The future should not have been linked to a cancel handle before. + assert!(matches!(self, NodeInner::Unlinked)); + + match head { + NodeInner::Unlinked => { + *head = NodeInner::Linked { + kind: NodeKind::head(rc_pin), + prev: self.as_non_null(), + next: self.as_non_null(), + }; + *self = NodeInner::Linked { + kind: NodeKind::item(waker), + prev: head.as_non_null(), + next: head.as_non_null(), + }; + Ok(()) + } + NodeInner::Linked { + kind: NodeKind::Head { .. }, + prev: next_prev_nn, + .. + } => { + let prev = unsafe { &mut *next_prev_nn.as_ptr() }; + match prev { + NodeInner::Linked { + kind: NodeKind::Item { .. }, + next: prev_next_nn, + .. + } => { + *self = NodeInner::Linked { + kind: NodeKind::item(waker), + prev: replace(next_prev_nn, self.as_non_null()), + next: replace(prev_next_nn, self.as_non_null()), + }; + Ok(()) + } + _ => unreachable!(), + } + } + NodeInner::Canceled => Err(Canceled), + _ => unreachable!(), + } + } + + fn update_waker(&mut self, new_waker: &Waker) -> Result<(), Canceled> { + match self { + NodeInner::Unlinked => Ok(()), + NodeInner::Linked { + kind: NodeKind::Item { waker }, + .. + } => { + if !waker.will_wake(new_waker) { + *waker = new_waker.clone(); + } + Ok(()) + } + NodeInner::Canceled => Err(Canceled), + _ => unreachable!(), + } + } + + /// If this node is linked to other nodes, remove it from the chain. This + /// method is called (only) by the drop handler for `Node`. It is suitable + /// for both 'head' and 'item' nodes. + fn unlink(&mut self) { + if let NodeInner::Linked { + prev: mut prev_nn, + next: mut next_nn, + .. + } = replace(self, NodeInner::Unlinked) + { + if prev_nn == next_nn { + // There were only two nodes in this chain; after unlinking ourselves + // the other node is no longer linked. + let other = unsafe { prev_nn.as_mut() }; + *other = NodeInner::Unlinked; + } else { + // The chain had more than two nodes. + match unsafe { prev_nn.as_mut() } { + NodeInner::Linked { + next: prev_next_nn, .. + } => { + *prev_next_nn = next_nn; + } + _ => unreachable!(), + } + match unsafe { next_nn.as_mut() } { + NodeInner::Linked { + prev: next_prev_nn, .. + } => { + *next_prev_nn = prev_nn; + } + _ => unreachable!(), + } + } + } + } + + /// Mark this node and all linked nodes for cancellation. Note that `self` + /// must refer to a head (`CancelHandle`) node. + fn cancel(&mut self) { + let mut head_nn = NonNull::from(self); + let mut item_nn; + + // Mark the head node as canceled. + match replace(unsafe { head_nn.as_mut() }, NodeInner::Canceled) { + NodeInner::Linked { + kind: NodeKind::Head { .. }, + next: next_nn, + .. + } => item_nn = next_nn, + NodeInner::Unlinked | NodeInner::Canceled => return, + _ => unreachable!(), + }; + + // Cancel all item nodes in the chain, waking each stored `Waker`. + while item_nn != head_nn { + match replace(unsafe { item_nn.as_mut() }, NodeInner::Canceled) { + NodeInner::Linked { + kind: NodeKind::Item { waker }, + next: next_nn, + .. + } => { + waker.wake(); + item_nn = next_nn; + } + _ => unreachable!(), + } + } + } + + /// Returns true if this node has been marked for cancellation. Note that + /// `self` must refer to a head (`CancelHandle`) node. + fn is_canceled(&self) -> bool { + match self { + NodeInner::Unlinked => false, + NodeInner::Linked { + kind: NodeKind::Head { .. }, + .. + } => false, + NodeInner::Canceled => true, + _ => unreachable!(), + } + } + } + + #[derive(Debug)] + enum NodeKind { + /// In a chain of linked nodes, the "head" node is owned by the + /// `CancelHandle`. A chain usually contains at most one head node; however + /// when a `CancelHandle` is dropped before the futures associated with it + /// are dropped, a chain may temporarily contain no head node at all. + Head { + /// The `weak_pin` field adds adds a weak reference to the `Rc` guarding + /// the heap allocation that contains the `CancelHandle`. Without this + /// extra weak reference, `Rc::get_mut()` might succeed and allow the + /// `CancelHandle` to be moved when it isn't safe to do so. + weak_pin: Weak, + }, + /// All item nodes in a chain are associated with a `Cancelable` head node. + Item { + /// If this future indeed does get canceled, the waker is needed to make + /// sure that the canceled future gets polled as soon as possible. + waker: Waker, + }, + } + + impl NodeKind { + fn head(rc_pin: &Rc) -> Self { + let weak_pin = Rc::downgrade(rc_pin); + Self::Head { weak_pin } + } + + fn item(waker: &Waker) -> Self { + let waker = waker.clone(); + Self::Item { waker } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::error::AnyError; + use futures::future::pending; + use futures::future::poll_fn; + use futures::future::ready; + use futures::future::FutureExt; + use futures::future::TryFutureExt; + use futures::select; + use futures::task::noop_waker_ref; + use futures::task::Context; + use futures::task::Poll; + use std::convert::Infallible as Never; + use std::io; + use tokio::net::TcpStream; + use tokio::spawn; + + fn box_fused<'a, F: FusedFuture + 'a>( + future: F, + ) -> Pin + 'a>> { + Box::pin(future) + } + + async fn ready_in_n(name: &str, count: usize) -> &str { + let mut remaining = count as isize; + poll_fn(|_| { + assert!(remaining >= 0); + if remaining == 0 { + Poll::Ready(name) + } else { + remaining -= 1; + Poll::Pending + } + }) + .await + } + + #[test] + fn cancel_future() { + let cancel_now = CancelHandle::new_rc(); + let cancel_at_0 = CancelHandle::new_rc(); + let cancel_at_1 = CancelHandle::new_rc(); + let cancel_at_4 = CancelHandle::new_rc(); + let cancel_never = CancelHandle::new_rc(); + + cancel_now.cancel(); + + let mut futures = vec![ + box_fused(ready("A").or_cancel(&cancel_now)), + box_fused(ready("B").or_cancel(&cancel_at_0)), + box_fused(ready("C").or_cancel(&cancel_at_1)), + box_fused( + ready_in_n("D", 0) + .or_cancel(&cancel_never) + .try_or_cancel(&cancel_now), + ), + box_fused( + ready_in_n("E", 1) + .or_cancel(&cancel_at_1) + .try_or_cancel(&cancel_at_1), + ), + box_fused(ready_in_n("F", 2).or_cancel(&cancel_at_1)), + box_fused(ready_in_n("G", 3).or_cancel(&cancel_at_4)), + box_fused(ready_in_n("H", 4).or_cancel(&cancel_at_4)), + box_fused(ready_in_n("I", 5).or_cancel(&cancel_at_4)), + box_fused(ready_in_n("J", 5).map(Ok)), + box_fused(ready_in_n("K", 5).or_cancel(cancel_never)), + ]; + + let mut cx = Context::from_waker(noop_waker_ref()); + + for i in 0..=5 { + match i { + 0 => cancel_at_0.cancel(), + 1 => cancel_at_1.cancel(), + 4 => cancel_at_4.cancel(), + 2 | 3 | 5 => {} + _ => unreachable!(), + } + + let results = futures + .iter_mut() + .filter(|fut| !fut.is_terminated()) + .filter_map(|fut| match fut.poll_unpin(&mut cx) { + Poll::Pending => None, + Poll::Ready(res) => Some(res), + }) + .collect::>(); + + match i { + 0 => assert_eq!( + results, + [Err(Canceled), Err(Canceled), Ok("C"), Err(Canceled)] + ), + 1 => assert_eq!(results, [Ok("E"), Err(Canceled)]), + 2 => assert_eq!(results, []), + 3 => assert_eq!(results, [Ok("G")]), + 4 => assert_eq!(results, [Ok("H"), Err(Canceled)]), + 5 => assert_eq!(results, [Ok("J"), Ok("K")]), + _ => unreachable!(), + } + } + + assert_eq!(futures.into_iter().any(|fut| !fut.is_terminated()), false); + + let cancel_handles = [cancel_now, cancel_at_0, cancel_at_1, cancel_at_4]; + assert_eq!(cancel_handles.iter().any(|c| !c.is_canceled()), false); + } + + #[tokio::test] + async fn cancel_try_future() { + { + // Cancel a spawned task before it actually runs. + let cancel_handle = Rc::new(CancelHandle::new()); + let future = spawn(async { panic!("the task should not be spawned") }) + .map_err(AnyError::from) + .try_or_cancel(&cancel_handle); + cancel_handle.cancel(); + let error = future.await.unwrap_err(); + assert!(error.downcast_ref::().is_some()); + assert_eq!(error.to_string().as_str(), "operation canceled"); + } + + { + // Cancel a network I/O future right after polling it. + let cancel_handle = Rc::new(CancelHandle::new()); + let result = loop { + select! { + r = TcpStream::connect("1.2.3.4:12345") + .try_or_cancel(&cancel_handle) => break r, + default => cancel_handle.cancel(), + }; + }; + let error = result.unwrap_err(); + assert_eq!(error.kind(), io::ErrorKind::Interrupted); + assert_eq!(error.to_string().as_str(), "operation canceled"); + } + } + + #[test] + fn cancel_handle_pinning() { + let mut cancel_handle = CancelHandle::new_rc(); + + // There is only one reference to `cancel_handle`, so `Rc::get_mut()` should + // succeed. + assert!(Rc::get_mut(&mut cancel_handle).is_some()); + + let mut future = pending::().or_cancel(&cancel_handle); + let future = unsafe { Pin::new_unchecked(&mut future) }; + + // There are two `Rc` references now, so this fails. + assert!(Rc::get_mut(&mut cancel_handle).is_none()); + + let mut cx = Context::from_waker(noop_waker_ref()); + assert!(future.poll(&mut cx).is_pending()); + + // Polling `future` has established a link between the future and + // `cancel_handle`, so both values should be pinned at this point. + assert!(Rc::get_mut(&mut cancel_handle).is_none()); + + cancel_handle.cancel(); + + // Canceling or dropping the associated future(s) unlinks them from the + // cancel handle, therefore `cancel_handle` can now safely be moved again. + assert!(Rc::get_mut(&mut cancel_handle).is_some()); + } +} diff --git a/core/async_cell.rs b/core/async_cell.rs index a140dceb19ffb8..bf62692ed85c17 100644 --- a/core/async_cell.rs +++ b/core/async_cell.rs @@ -126,6 +126,7 @@ impl RcRef> { /// let foo_rc: RcRef = RcRef::map(stuff_rc.clone(), |v| &v.foo); /// let bar_rc: RcRef = RcRef::map(stuff_rc, |v| &v.bar); /// ``` +#[derive(Debug)] pub struct RcRef { rc: Rc, value: *const T, @@ -136,7 +137,7 @@ impl RcRef { Self::from(Rc::new(value)) } - pub fn map, F: FnOnce(&S) -> &T>( + pub fn map, F: FnOnce(&S) -> &T>( source: R, map_fn: F, ) -> RcRef { @@ -144,6 +145,11 @@ impl RcRef { let value = map_fn(unsafe { &*value }); RcRef { rc, value } } + + pub(crate) fn split(rc_ref: &Self) -> (&T, &Rc) { + let &Self { ref rc, value } = rc_ref; + (unsafe { &*value }, rc) + } } impl Default for RcRef { @@ -152,6 +158,21 @@ impl Default for RcRef { } } +impl Clone for RcRef { + fn clone(&self) -> Self { + Self { + rc: self.rc.clone(), + value: self.value, + } + } +} + +impl From<&RcRef> for RcRef { + fn from(rc_ref: &RcRef) -> Self { + rc_ref.clone() + } +} + impl From> for RcRef { fn from(rc: Rc) -> Self { Self { @@ -161,12 +182,9 @@ impl From> for RcRef { } } -impl Clone for RcRef { - fn clone(&self) -> Self { - Self { - rc: self.rc.clone(), - value: self.value, - } +impl From<&Rc> for RcRef { + fn from(rc: &Rc) -> Self { + rc.clone().into() } } @@ -189,8 +207,18 @@ impl AsRef for RcRef { } } +/// The `RcLike` trait provides an abstraction over `std::rc::Rc` and `RcRef`, +/// so that applicable methods can operate on either type. +pub trait RcLike: AsRef + Into> {} + +impl RcLike for Rc {} +impl RcLike for RcRef {} +impl RcLike for &Rc {} +impl RcLike for &RcRef {} + mod internal { use super::AsyncRefCell; + use super::RcLike; use super::RcRef; use futures::future::Future; use futures::ready; @@ -204,32 +232,29 @@ mod internal { use std::ops::Deref; use std::ops::DerefMut; use std::pin::Pin; - use std::rc::Rc; impl AsyncRefCell { /// Borrow the cell's contents synchronouslym without creating an /// intermediate future. If the cell has already been borrowed and either /// the existing or the requested borrow is exclusive, this function returns - /// `None`. - pub(super) fn borrow_sync< - M: BorrowModeTrait, - R: RcLike>, - >( - cell: &R, + /// `None`. + pub fn borrow_sync>>( + cell: R, ) -> Option> { + let cell_ref = cell.as_ref(); // Don't allow synchronous borrows to cut in line; if there are any // enqueued waiters, return `None`, even if the current borrow is a shared // one and the requested borrow is too. - let waiters = unsafe { &mut *cell.waiters.as_ptr() }; + let waiters = unsafe { &mut *cell_ref.waiters.as_ptr() }; if waiters.is_empty() { // There are no enqueued waiters, but it is still possible that the cell // is currently borrowed. If there are no current borrows, or both the // existing and requested ones are shared, `try_add()` returns the // adjusted borrow count. let new_borrow_count = - cell.borrow_count.get().try_add(M::borrow_mode())?; - cell.borrow_count.set(new_borrow_count); - Some(AsyncBorrowImpl::::new(cell.clone().into())) + cell_ref.borrow_count.get().try_add(M::borrow_mode())?; + cell_ref.borrow_count.set(new_borrow_count); + Some(AsyncBorrowImpl::::new(cell.into())) } else { None } @@ -359,10 +384,10 @@ mod internal { } impl AsyncBorrowFutureImpl { - pub fn new>>(cell: &R) -> Self { + pub fn new>>(cell: R) -> Self { Self { - cell: Some(cell.clone().into()), - id: cell.create_waiter::(), + id: cell.as_ref().create_waiter::(), + cell: Some(cell.into()), _phantom: PhantomData, } } @@ -561,13 +586,6 @@ mod internal { self.waker.take() } } - - /// The `RcLike` trait provides an abstraction over `std::rc::Rc` and `RcRef`, - /// so that applicable methods can operate on either type. - pub trait RcLike: Clone + Deref + Into> {} - - impl RcLike for Rc {} - impl RcLike for RcRef {} } #[cfg(test)] diff --git a/core/examples/http_bench_bin_ops.rs b/core/examples/http_bench_bin_ops.rs index 9af74d98006c04..1d7a76c3d03fae 100644 --- a/core/examples/http_bench_bin_ops.rs +++ b/core/examples/http_bench_bin_ops.rs @@ -3,10 +3,10 @@ #[macro_use] extern crate log; -use deno_core::AsyncMutFuture; use deno_core::AsyncRefCell; -use deno_core::AsyncRefFuture; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::JsRuntime; use deno_core::Op; use deno_core::OpState; @@ -46,51 +46,65 @@ impl log::Log for Logger { fn flush(&self) {} } -// Note: it isn't actually necessary to wrap the `tokio::net::TcpListener` in -// a cell, because it only supports one op (`accept`) which does not require -// a mutable reference to the listener. -struct TcpListener(AsyncRefCell); - -impl Resource for TcpListener {} +// Note: a `tokio::net::TcpListener` doesn't need to be wrapped in a cell, +// because it only supports one op (`accept`) which does not require a mutable +// reference to the listener. +struct TcpListener { + inner: tokio::net::TcpListener, + cancel: CancelHandle, +} impl TcpListener { - /// Returns a future that yields a shared borrow of the TCP listener. - fn borrow(self: Rc) -> AsyncRefFuture { - RcRef::map(self, |r| &r.0).borrow() + async fn accept(self: Rc) -> Result { + let cancel = RcRef::map(&self, |r| &r.cancel); + let stream = self.inner.accept().try_or_cancel(cancel).await?.0.into(); + Ok(stream) + } +} + +impl Resource for TcpListener { + fn close(self: Rc) { + self.cancel.cancel(); } } impl TryFrom for TcpListener { type Error = Error; - fn try_from(l: std::net::TcpListener) -> Result { - tokio::net::TcpListener::try_from(l) - .map(AsyncRefCell::new) - .map(Self) + fn try_from( + std_listener: std::net::TcpListener, + ) -> Result { + tokio::net::TcpListener::try_from(std_listener).map(|tokio_listener| Self { + inner: tokio_listener, + cancel: Default::default(), + }) } } struct TcpStream { rd: AsyncRefCell, wr: AsyncRefCell, + // When a `TcpStream` resource is closed, all pending 'read' ops are + // canceled, while 'write' ops are allowed to complete. Therefore only + // 'read' futures are attached to this cancel handle. + cancel: CancelHandle, } -impl Resource for TcpStream {} - impl TcpStream { - /// Returns a future that yields an exclusive borrow of the read end of the - /// tcp stream. - fn rd_borrow_mut( - self: Rc, - ) -> AsyncMutFuture { - RcRef::map(self, |r| &r.rd).borrow_mut() + async fn read(self: Rc, buf: &mut [u8]) -> Result { + let mut rd = RcRef::map(&self, |r| &r.rd).borrow_mut().await; + let cancel = RcRef::map(self, |r| &r.cancel); + rd.read(buf).try_or_cancel(cancel).await } - /// Returns a future that yields an exclusive borrow of the write end of the - /// tcp stream. - fn wr_borrow_mut( - self: Rc, - ) -> AsyncMutFuture { - RcRef::map(self, |r| &r.wr).borrow_mut() + async fn write(self: Rc, buf: &[u8]) -> Result { + let mut wr = RcRef::map(self, |r| &r.wr).borrow_mut().await; + wr.write(buf).await + } +} + +impl Resource for TcpStream { + fn close(self: Rc) { + self.cancel.cancel() } } @@ -100,6 +114,7 @@ impl From for TcpStream { Self { rd: rd.into(), wr: wr.into(), + cancel: Default::default(), } } } @@ -179,14 +194,12 @@ async fn op_accept( ) -> Result { debug!("accept rid={}", rid); - let listener_rc = state + let listener = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let listener_ref = listener_rc.borrow().await; - - let stream: TcpStream = listener_ref.accept().await?.0.into(); + let stream = listener.accept().await?; let rid = state.borrow_mut().resource_table_2.add(stream); Ok(rid) } @@ -199,14 +212,12 @@ async fn op_read( assert_eq!(bufs.len(), 1, "Invalid number of arguments"); debug!("read rid={}", rid); - let stream_rc = state + let stream = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let mut rd_stream_mut = stream_rc.rd_borrow_mut().await; - - rd_stream_mut.read(&mut bufs[0]).await + stream.read(&mut bufs[0]).await } async fn op_write( @@ -217,14 +228,12 @@ async fn op_write( assert_eq!(bufs.len(), 1, "Invalid number of arguments"); debug!("write rid={}", rid); - let stream_rc = state + let stream = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let mut wr_stream_mut = stream_rc.wr_borrow_mut().await; - - wr_stream_mut.write(&bufs[0]).await + stream.write(&bufs[0]).await } fn register_op_bin_sync( diff --git a/core/examples/http_bench_json_ops.rs b/core/examples/http_bench_json_ops.rs index 77f5b9dbe67110..c4fcd636367fbb 100644 --- a/core/examples/http_bench_json_ops.rs +++ b/core/examples/http_bench_json_ops.rs @@ -5,10 +5,10 @@ extern crate log; use deno_core::error::bad_resource_id; use deno_core::error::AnyError; -use deno_core::AsyncMutFuture; use deno_core::AsyncRefCell; -use deno_core::AsyncRefFuture; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::JsRuntime; use deno_core::OpState; use deno_core::RcRef; @@ -41,51 +41,65 @@ impl log::Log for Logger { fn flush(&self) {} } -// Note: it isn't actually necessary to wrap the `tokio::net::TcpListener` in -// a cell, because it only supports one op (`accept`) which does not require -// a mutable reference to the listener. -struct TcpListener(AsyncRefCell); - -impl Resource for TcpListener {} +// Note: a `tokio::net::TcpListener` doesn't need to be wrapped in a cell, +// because it only supports one op (`accept`) which does not require a mutable +// reference to the listener. +struct TcpListener { + inner: tokio::net::TcpListener, + cancel: CancelHandle, +} impl TcpListener { - /// Returns a future that yields a shared borrow of the TCP listener. - fn borrow(self: Rc) -> AsyncRefFuture { - RcRef::map(self, |r| &r.0).borrow() + async fn accept(self: Rc) -> Result { + let cancel = RcRef::map(&self, |r| &r.cancel); + let stream = self.inner.accept().try_or_cancel(cancel).await?.0.into(); + Ok(stream) + } +} + +impl Resource for TcpListener { + fn close(self: Rc) { + self.cancel.cancel(); } } impl TryFrom for TcpListener { type Error = Error; - fn try_from(l: std::net::TcpListener) -> Result { - tokio::net::TcpListener::try_from(l) - .map(AsyncRefCell::new) - .map(Self) + fn try_from( + std_listener: std::net::TcpListener, + ) -> Result { + tokio::net::TcpListener::try_from(std_listener).map(|tokio_listener| Self { + inner: tokio_listener, + cancel: Default::default(), + }) } } struct TcpStream { rd: AsyncRefCell, wr: AsyncRefCell, + // When a `TcpStream` resource is closed, all pending 'read' ops are + // canceled, while 'write' ops are allowed to complete. Therefore only + // 'read' futures are attached to this cancel handle. + cancel: CancelHandle, } -impl Resource for TcpStream {} - impl TcpStream { - /// Returns a future that yields an exclusive borrow of the read end of the - /// tcp stream. - fn rd_borrow_mut( - self: Rc, - ) -> AsyncMutFuture { - RcRef::map(self, |r| &r.rd).borrow_mut() + async fn read(self: Rc, buf: &mut [u8]) -> Result { + let mut rd = RcRef::map(&self, |r| &r.rd).borrow_mut().await; + let cancel = RcRef::map(self, |r| &r.cancel); + rd.read(buf).try_or_cancel(cancel).await } - /// Returns a future that yields an exclusive borrow of the write end of the - /// tcp stream. - fn wr_borrow_mut( - self: Rc, - ) -> AsyncMutFuture { - RcRef::map(self, |r| &r.wr).borrow_mut() + async fn write(self: Rc, buf: &[u8]) -> Result { + let mut wr = RcRef::map(self, |r| &r.wr).borrow_mut().await; + wr.write(buf).await + } +} + +impl Resource for TcpStream { + fn close(self: Rc) { + self.cancel.cancel() } } @@ -95,6 +109,7 @@ impl From for TcpStream { Self { rd: rd.into(), wr: wr.into(), + cancel: Default::default(), } } } @@ -157,14 +172,12 @@ async fn op_accept( .unwrap(); debug!("accept rid={}", rid); - let listener_rc = state + let listener = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let listener_ref = listener_rc.borrow().await; - - let stream: TcpStream = listener_ref.accept().await?.0.into(); + let stream = listener.accept().await?; let rid = state.borrow_mut().resource_table_2.add(stream); Ok(serde_json::json!({ "rid": rid })) } @@ -184,14 +197,12 @@ async fn op_read( .unwrap(); debug!("read rid={}", rid); - let stream_rc = state + let stream = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let mut rd_stream_mut = stream_rc.rd_borrow_mut().await; - - let nread = rd_stream_mut.read(&mut bufs[0]).await?; + let nread = stream.read(&mut bufs[0]).await?; Ok(serde_json::json!({ "nread": nread })) } @@ -210,14 +221,12 @@ async fn op_write( .unwrap(); debug!("write rid={}", rid); - let stream_rc = state + let stream = state .borrow() .resource_table_2 .get::(rid) .ok_or_else(bad_resource_id)?; - let mut wr_stream_mut = stream_rc.wr_borrow_mut().await; - - let nwritten = wr_stream_mut.write(&bufs[0]).await?; + let nwritten = stream.write(&bufs[0]).await?; Ok(serde_json::json!({ "nwritten": nwritten })) } diff --git a/core/lib.rs b/core/lib.rs index 372cd558ec0073..20ee5a3d5dd103 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -5,6 +5,7 @@ extern crate lazy_static; #[macro_use] extern crate log; +mod async_cancel; mod async_cell; mod bindings; pub mod error; @@ -28,11 +29,18 @@ pub use serde; pub use serde_json; pub use url; +pub use crate::async_cancel::CancelFuture; +pub use crate::async_cancel::CancelHandle; +pub use crate::async_cancel::CancelTryFuture; +pub use crate::async_cancel::Cancelable; +pub use crate::async_cancel::Canceled; +pub use crate::async_cancel::TryCancelable; pub use crate::async_cell::AsyncMut; pub use crate::async_cell::AsyncMutFuture; pub use crate::async_cell::AsyncRef; pub use crate::async_cell::AsyncRefCell; pub use crate::async_cell::AsyncRefFuture; +pub use crate::async_cell::RcLike; pub use crate::async_cell::RcRef; pub use crate::flags::v8_set_flags; pub use crate::module_specifier::ModuleResolutionError; diff --git a/core/resources2.rs b/core/resources2.rs index 62cb3f05652223..92548a55652144 100644 --- a/core/resources2.rs +++ b/core/resources2.rs @@ -24,6 +24,11 @@ pub trait Resource: Any + 'static { fn name(&self) -> Cow { type_name::().into() } + + /// Resources may implement the `close()` trait method if they need to do + /// resource specific clean-ups, such as cancelling pending futures, after a + /// resource has been removed from the resource table. + fn close(self: Rc) {} } impl dyn Resource { @@ -117,7 +122,7 @@ impl ResourceTable { /// cause the resource to be dropped. However, since resources are reference /// counted, therefore pending ops are not automatically cancelled. pub fn close(&mut self, rid: ResourceId) -> Option<()> { - self.index.remove(&rid).map(|_| ()) + self.index.remove(&rid).map(|resource| resource.close()) } /// Returns an iterator that yields a `(id, name)` pair for every resource From d492fb0eac8296513f003cf32edf80ec99bf8f2b Mon Sep 17 00:00:00 2001 From: Jae-Heon Ji <32578710+jaeheonji@users.noreply.github.com> Date: Thu, 10 Dec 2020 00:48:06 +0900 Subject: [PATCH 032/135] fix(op_crates/fetch): support non-ascii response headers value (#8600) --- cli/tests/unit/fetch_test.ts | 14 ++++++++++++++ op_crates/fetch/lib.rs | 15 ++++++++++++++- test_util/src/lib.rs | 13 ++++++++++++- 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index 99b7531a3efa1e..6a5cff164f6cde 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -685,6 +685,20 @@ unitTest( }, ); +unitTest( + { + perms: { net: true }, + }, + async function fetchWithNonAsciiRedirection(): Promise { + const response = await fetch("http://localhost:4545/non_ascii_redirect", { + redirect: "manual", + }); + assertEquals(response.status, 301); + assertEquals(response.headers.get("location"), "/redirect®"); + await response.text(); + }, +); + unitTest( { perms: { net: true }, diff --git a/op_crates/fetch/lib.rs b/op_crates/fetch/lib.rs index 49d951d8f411a7..8a4c1ee16915b0 100644 --- a/op_crates/fetch/lib.rs +++ b/op_crates/fetch/lib.rs @@ -156,7 +156,20 @@ where let status = res.status(); let mut res_headers = Vec::new(); for (key, val) in res.headers().iter() { - res_headers.push((key.to_string(), val.to_str().unwrap().to_owned())); + let key_string = key.to_string(); + + if val.as_bytes().is_ascii() { + res_headers.push((key_string, val.to_str().unwrap().to_owned())) + } else { + res_headers.push(( + key_string, + val + .as_bytes() + .iter() + .map(|&c| c as char) + .collect::(), + )); + } } let rid = state diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs index a4558149135e24..8a47eb139cbbae 100644 --- a/test_util/src/lib.rs +++ b/test_util/src/lib.rs @@ -290,6 +290,16 @@ pub async fn run_all_servers() { *res.status_mut() = StatusCode::FOUND; Box::new(res) }); + let non_ascii_redirect = + warp::path("non_ascii_redirect").map(|| -> Box { + let mut res = Response::new(Body::empty()); + *res.status_mut() = StatusCode::MOVED_PERMANENTLY; + res.headers_mut().insert( + "location", + HeaderValue::from_bytes(b"/redirect\xae").unwrap(), + ); + Box::new(res) + }); let etag_script = warp::path!("etag_script.ts") .and(warp::header::optional::("if-none-match")) @@ -444,7 +454,8 @@ pub async fn run_all_servers() { .or(echo_server) .or(echo_multipart_file) .or(multipart_form_data) - .or(bad_redirect); + .or(bad_redirect) + .or(non_ascii_redirect); let http_fut = warp::serve(content_type_handler.clone()).bind(([127, 0, 0, 1], PORT)); From b6dd850f71c029a3d82eb10c2528380b0ed4bc71 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Wed, 9 Dec 2020 11:02:07 -0500 Subject: [PATCH 033/135] build: fix doctests, run cargo publish --dry-run (#8689) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bartek Iwańczuk --- .github/workflows/ci.yml | 10 +++++++++- core/async_cell.rs | 2 +- core/resources2.rs | 4 ++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09b4356f9df2fe..be287cecdde53f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,6 +25,8 @@ jobs: kind: bench - os: ${{ github.repository == 'denoland/deno' && 'ubuntu-latest-xl' || 'ubuntu-18.04' }} kind: lint + - os: ${{ github.repository == 'denoland/deno' && 'ubuntu-latest-xl' || 'ubuntu-18.04' }} + kind: publish-dry-run # Always run master branch builds to completion. This allows the cache to # stay mostly up-to-date in situations where a single job fails due to @@ -217,7 +219,9 @@ jobs: - name: Test debug if: matrix.kind == 'test_debug' - run: cargo test --locked --all-targets + run: | + cargo test --locked --doc + cargo test --locked --all-targets - name: Run Benchmarks if: matrix.kind == 'bench' @@ -264,6 +268,10 @@ jobs: target/release/lib.deno.d.ts draft: true + - name: Publish Dry Run + if: matrix.kind == 'publish-dry-run' + run: cd cli && cargo publish --dry-run + - name: Publish if: | startsWith(matrix.os, 'ubuntu') && diff --git a/core/async_cell.rs b/core/async_cell.rs index bf62692ed85c17..d11b839325579f 100644 --- a/core/async_cell.rs +++ b/core/async_cell.rs @@ -109,7 +109,7 @@ impl RcRef> { /// /// ```rust /// # use std::rc::Rc; -/// # use deno_core::async_cell::RcRef; +/// # use deno_core::RcRef; /// /// struct Stuff { /// foo: u32, diff --git a/core/resources2.rs b/core/resources2.rs index 92548a55652144..989ea83280e756 100644 --- a/core/resources2.rs +++ b/core/resources2.rs @@ -133,8 +133,8 @@ impl ResourceTable { /// # Example /// /// ``` - /// # use deno_core::resources2::ResourceTable; - /// # let resource_table = ResourceTable::default(); + /// # use deno_core::ResourceTable2; + /// # let resource_table = ResourceTable2::default(); /// let resource_names = resource_table.names().collect::>(); /// ``` pub fn names(&self) -> impl Iterator)> { From 95a6698caca228e8a3ec920a59e1904a09c78eaf Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Thu, 10 Dec 2020 06:50:47 +1100 Subject: [PATCH 034/135] feat(lsp): support import maps (#8683) --- cli/lsp/analysis.rs | 13 ++++-- cli/lsp/config.rs | 2 + cli/lsp/mod.rs | 66 +++++++++++++++++++++++++-- cli/lsp/sources.rs | 10 +++- cli/lsp/state.rs | 109 ++++++++++++++++++++++++++++++++++++++++++-- cli/lsp/tsc.rs | 27 +++++++---- 6 files changed, 202 insertions(+), 25 deletions(-) diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 370b41c45f9a89..95e21ed9a3f7fa 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -13,9 +13,10 @@ use deno_core::ModuleSpecifier; use deno_lint::rules; use lsp_types::Position; use lsp_types::Range; -use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; +use std::sync::Arc; +use std::sync::RwLock; /// Category of self-generated diagnostic messages (those not coming from) /// TypeScript. @@ -113,11 +114,13 @@ pub enum ResolvedImport { pub fn resolve_import( specifier: &str, referrer: &ModuleSpecifier, - maybe_import_map: Option>>, + maybe_import_map: Option>>, ) -> ResolvedImport { let maybe_mapped = if let Some(import_map) = maybe_import_map { - if let Ok(maybe_specifier) = - import_map.borrow().resolve(specifier, referrer.as_str()) + if let Ok(maybe_specifier) = import_map + .read() + .unwrap() + .resolve(specifier, referrer.as_str()) { maybe_specifier } else { @@ -159,7 +162,7 @@ pub fn analyze_dependencies( specifier: &ModuleSpecifier, source: &str, media_type: &MediaType, - maybe_import_map: Option>>, + maybe_import_map: Option>>, ) -> Option<(HashMap, Option)> { let specifier_str = specifier.to_string(); let source_map = Rc::new(swc_common::SourceMap::default()); diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index ebc145708920e5..fc3f030c9c4f06 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -4,6 +4,7 @@ use deno_core::error::AnyError; use deno_core::serde::Deserialize; use deno_core::serde_json; use deno_core::serde_json::Value; +use deno_core::url::Url; #[derive(Debug, Clone, Default)] pub struct ClientCapabilities { @@ -23,6 +24,7 @@ pub struct WorkspaceSettings { #[derive(Debug, Clone, Default)] pub struct Config { pub client_capabilities: ClientCapabilities, + pub root_uri: Option, pub settings: WorkspaceSettings, } diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index e3092d81520738..784f3503dd5b8b 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -18,6 +18,7 @@ use config::Config; use diagnostics::DiagnosticSource; use dispatch::NotificationDispatcher; use dispatch::RequestDispatcher; +use state::update_import_map; use state::DocumentData; use state::Event; use state::ServerState; @@ -87,13 +88,13 @@ pub fn start() -> Result<(), AnyError> { } let mut config = Config::default(); + config.root_uri = initialize_params.root_uri.clone(); if let Some(value) = initialize_params.initialization_options { config.update(value)?; } config.update_capabilities(&initialize_params.capabilities); let mut server_state = state::ServerState::new(connection.sender, config); - let state = server_state.snapshot(); // TODO(@kitsonk) need to make this configurable, respect unstable let ts_config = TsConfig::new(json!({ @@ -106,6 +107,7 @@ pub fn start() -> Result<(), AnyError> { "strict": true, "target": "esnext", })); + let state = server_state.snapshot(); tsc::request( &mut server_state.ts_runtime, &state, @@ -259,7 +261,7 @@ impl ServerState { specifier.clone(), params.text_document.version, ¶ms.text_document.text, - None, + state.maybe_import_map.clone(), ), ) .is_some() @@ -281,7 +283,11 @@ impl ServerState { let mut content = file_cache.get_contents(file_id)?; apply_content_changes(&mut content, params.content_changes); let doc_data = state.doc_data.get_mut(&specifier).unwrap(); - doc_data.update(params.text_document.version, &content, None); + doc_data.update( + params.text_document.version, + &content, + state.maybe_import_map.clone(), + ); file_cache.set_contents(specifier, Some(content.into_bytes())); Ok(()) @@ -326,6 +332,15 @@ impl ServerState { if let Err(err) = state.config.update(config.clone()) { error!("failed to update settings: {}", err); } + if let Err(err) = update_import_map(state) { + state + .send_notification::( + lsp_types::ShowMessageParams { + typ: lsp_types::MessageType::Warning, + message: err.to_string(), + }, + ); + } } } (None, None) => { @@ -337,6 +352,15 @@ impl ServerState { Ok(()) })? + .on::(|state, params| { + // if the current import map has changed, we need to reload it + if let Some(import_map_uri) = &state.maybe_import_map_uri { + if params.changes.iter().any(|fe| import_map_uri == &fe.uri) { + update_import_map(state)?; + } + } + Ok(()) + })? .finish(); Ok(()) @@ -395,8 +419,40 @@ impl ServerState { /// Start consuming events from the provided receiver channel. pub fn run(mut self, inbox: Receiver) -> Result<(), AnyError> { - // currently we don't need to do any other loading or tasks, so as soon as - // we run we are "ready" + // Check to see if we need to setup the import map + if let Err(err) = update_import_map(&mut self) { + self.send_notification::( + lsp_types::ShowMessageParams { + typ: lsp_types::MessageType::Warning, + message: err.to_string(), + }, + ); + } + + // we are going to watch all the JSON files in the workspace, and the + // notification handler will pick up any of the changes of those files we + // are interested in. + let watch_registration_options = + lsp_types::DidChangeWatchedFilesRegistrationOptions { + watchers: vec![lsp_types::FileSystemWatcher { + glob_pattern: "**/*.json".to_string(), + kind: Some(lsp_types::WatchKind::Change), + }], + }; + let registration = lsp_types::Registration { + id: "workspace/didChangeWatchedFiles".to_string(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: Some( + serde_json::to_value(watch_registration_options).unwrap(), + ), + }; + self.send_request::( + lsp_types::RegistrationParams { + registrations: vec![registration], + }, + |_, _| (), + ); + self.transition(Status::Ready); while let Some(event) = self.next_event(&inbox) { diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs index 4f80044a29b0d7..c6a15461ffa9f0 100644 --- a/cli/lsp/sources.rs +++ b/cli/lsp/sources.rs @@ -7,6 +7,7 @@ use crate::file_fetcher::get_source_from_bytes; use crate::file_fetcher::map_content_type; use crate::http_cache; use crate::http_cache::HttpCache; +use crate::import_map::ImportMap; use crate::media_type::MediaType; use crate::text_encoding; @@ -16,6 +17,8 @@ use std::collections::HashMap; use std::fs; use std::path::Path; use std::path::PathBuf; +use std::sync::Arc; +use std::sync::RwLock; use std::time::SystemTime; #[derive(Debug, Clone, Default)] @@ -30,6 +33,7 @@ struct Metadata { #[derive(Debug, Clone, Default)] pub struct Sources { http_cache: HttpCache, + maybe_import_map: Option>>, metadata: HashMap, redirects: HashMap, remotes: HashMap, @@ -124,7 +128,11 @@ impl Sources { if let Ok(source) = get_source_from_bytes(bytes, maybe_charset) { let mut maybe_types = if let Some(types) = headers.get("x-typescript-types") { - Some(analysis::resolve_import(types, &specifier, None)) + Some(analysis::resolve_import( + types, + &specifier, + self.maybe_import_map.clone(), + )) } else { None }; diff --git a/cli/lsp/state.rs b/cli/lsp/state.rs index 18a1e4023dfe9c..579a749f6bc4d2 100644 --- a/cli/lsp/state.rs +++ b/cli/lsp/state.rs @@ -18,6 +18,9 @@ use crossbeam_channel::select; use crossbeam_channel::unbounded; use crossbeam_channel::Receiver; use crossbeam_channel::Sender; +use deno_core::error::anyhow; +use deno_core::error::AnyError; +use deno_core::url::Url; use deno_core::JsRuntime; use deno_core::ModuleSpecifier; use lsp_server::Message; @@ -25,11 +28,10 @@ use lsp_server::Notification; use lsp_server::Request; use lsp_server::RequestId; use lsp_server::Response; -use std::cell::RefCell; use std::collections::HashMap; use std::env; use std::fmt; -use std::rc::Rc; +use std::fs; use std::sync::Arc; use std::sync::RwLock; use std::time::Instant; @@ -37,6 +39,45 @@ use std::time::Instant; type ReqHandler = fn(&mut ServerState, Response); type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>; +pub fn update_import_map(state: &mut ServerState) -> Result<(), AnyError> { + if let Some(import_map_str) = &state.config.settings.import_map { + let import_map_url = if let Ok(url) = Url::from_file_path(import_map_str) { + Ok(url) + } else if let Some(root_uri) = &state.config.root_uri { + let root_path = root_uri + .to_file_path() + .map_err(|_| anyhow!("Bad root_uri: {}", root_uri))?; + let import_map_path = root_path.join(import_map_str); + Url::from_file_path(import_map_path).map_err(|_| { + anyhow!("Bad file path for import map: {:?}", import_map_str) + }) + } else { + Err(anyhow!( + "The path to the import map (\"{}\") is not resolvable.", + import_map_str + )) + }?; + let import_map_path = import_map_url + .to_file_path() + .map_err(|_| anyhow!("Bad file path."))?; + let import_map_json = + fs::read_to_string(import_map_path).map_err(|err| { + anyhow!( + "Failed to load the import map at: {}. [{}]", + import_map_url, + err + ) + })?; + let import_map = + ImportMap::from_json(&import_map_url.to_string(), &import_map_json)?; + state.maybe_import_map_uri = Some(import_map_url); + state.maybe_import_map = Some(Arc::new(RwLock::new(import_map))); + } else { + state.maybe_import_map = None; + } + Ok(()) +} + pub enum Event { Message(Message), Task(Task), @@ -107,7 +148,7 @@ impl DocumentData { specifier: ModuleSpecifier, version: i32, source: &str, - maybe_import_map: Option>>, + maybe_import_map: Option>>, ) -> Self { let dependencies = if let Some((dependencies, _)) = analysis::analyze_dependencies( @@ -131,7 +172,7 @@ impl DocumentData { &mut self, version: i32, source: &str, - maybe_import_map: Option>>, + maybe_import_map: Option>>, ) { self.dependencies = if let Some((dependencies, _)) = analysis::analyze_dependencies( @@ -163,6 +204,8 @@ pub struct ServerState { pub diagnostics: DiagnosticCollection, pub doc_data: HashMap, pub file_cache: Arc>, + pub maybe_import_map: Option>>, + pub maybe_import_map_uri: Option, req_queue: ReqQueue, sender: Sender, pub sources: Arc>, @@ -189,8 +232,10 @@ impl ServerState { Self { config, diagnostics: Default::default(), - doc_data: HashMap::new(), + doc_data: Default::default(), file_cache: Arc::new(RwLock::new(Default::default())), + maybe_import_map: None, + maybe_import_map_uri: None, req_queue: Default::default(), sender, sources: Arc::new(RwLock::new(sources)), @@ -290,3 +335,57 @@ impl ServerState { self.status = new_status; } } + +#[cfg(test)] +mod tests { + use super::*; + use deno_core::serde_json::json; + use deno_core::serde_json::Value; + use lsp_server::Connection; + use tempfile::TempDir; + + #[test] + fn test_update_import_map() { + let temp_dir = TempDir::new().expect("could not create temp dir"); + let import_map_path = temp_dir.path().join("import_map.json"); + let import_map_str = &import_map_path.to_string_lossy(); + fs::write( + import_map_path.clone(), + r#"{ + "imports": { + "denoland/": "https://deno.land/x/" + } + }"#, + ) + .expect("could not write file"); + let mut config = Config::default(); + config + .update(json!({ + "enable": false, + "config": Value::Null, + "lint": false, + "importMap": import_map_str, + "unstable": true, + })) + .expect("could not update config"); + let (connection, _) = Connection::memory(); + let mut state = ServerState::new(connection.sender, config); + let result = update_import_map(&mut state); + assert!(result.is_ok()); + assert!(state.maybe_import_map.is_some()); + let expected = + Url::from_file_path(import_map_path).expect("could not parse url"); + assert_eq!(state.maybe_import_map_uri, Some(expected)); + let import_map = state.maybe_import_map.unwrap(); + let import_map = import_map.read().unwrap(); + assert_eq!( + import_map + .resolve("denoland/mod.ts", "https://example.com/index.js") + .expect("bad response"), + Some( + ModuleSpecifier::resolve_url("https://deno.land/x/mod.ts") + .expect("could not create URL") + ) + ); + } +} diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 86c9a298083318..649dd1bb5a97b7 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -844,17 +844,26 @@ fn resolve(state: &mut State, args: Value) -> Result { }; if let ResolvedImport::Resolved(resolved_specifier) = resolved_import { - let media_type = if let Some(media_type) = - sources.get_media_type(&resolved_specifier) + if state + .server_state + .doc_data + .contains_key(&resolved_specifier) + || sources.contains(&resolved_specifier) { - media_type + let media_type = if let Some(media_type) = + sources.get_media_type(&resolved_specifier) + { + media_type + } else { + MediaType::from(&resolved_specifier) + }; + resolved.push(Some(( + resolved_specifier.to_string(), + media_type.as_ts_extension(), + ))); } else { - MediaType::from(&resolved_specifier) - }; - resolved.push(Some(( - resolved_specifier.to_string(), - media_type.as_ts_extension(), - ))); + resolved.push(None); + } } else { resolved.push(None); } From e58147b62cb1b0497d4cdb46b53e4331b8918dd4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 9 Dec 2020 21:17:51 +0100 Subject: [PATCH 035/135] build: remove cargo publish --dry-run (#8694) --- .github/workflows/ci.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index be287cecdde53f..1387ae7efe66bc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,8 +25,6 @@ jobs: kind: bench - os: ${{ github.repository == 'denoland/deno' && 'ubuntu-latest-xl' || 'ubuntu-18.04' }} kind: lint - - os: ${{ github.repository == 'denoland/deno' && 'ubuntu-latest-xl' || 'ubuntu-18.04' }} - kind: publish-dry-run # Always run master branch builds to completion. This allows the cache to # stay mostly up-to-date in situations where a single job fails due to @@ -268,10 +266,6 @@ jobs: target/release/lib.deno.d.ts draft: true - - name: Publish Dry Run - if: matrix.kind == 'publish-dry-run' - run: cd cli && cargo publish --dry-run - - name: Publish if: | startsWith(matrix.os, 'ubuntu') && From de65312b7fca46ce5fe4d8e61eb5d17f599df4e1 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Wed, 9 Dec 2020 23:09:10 +0100 Subject: [PATCH 036/135] chore: update swc_ecmascript to 0.15.0 (#8688) --- Cargo.lock | 48 ++++++++++++++++++++++++------------------------ cli/Cargo.toml | 12 ++++++------ cli/ast.rs | 8 ++++++-- 3 files changed, 36 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d885d3615f9ad..852c4828920473 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -60,9 +60,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7" +checksum = "2c0df63cb2955042487fad3aefd2c6e3ae7389ac5dc1beb28921de0b69f779d4" [[package]] name = "anymap" @@ -541,9 +541,9 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d9f85238450c18dcd5fef41926800fe7d80fc83fd4a6d5416549527cca044b2" +checksum = "1a347f4b721c3f4a3459f3510826b7c54855a8309857e57a63ec10b87c5dcbe9" dependencies = [ "futures", "lazy_static", @@ -566,9 +566,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc89fe2e4200cb29a572ff8a35e6ff3779ee8dfcc2b094de4ac180e631743f67" +checksum = "5945fdac793b4f374e2368e5009131cb4c2beca017911ac3ac0de1a87c85bbdb" dependencies = [ "anyhow", "derive_more", @@ -661,9 +661,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.35.0" +version = "0.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15a5a28bd6eca62b901835ad9add8cb0be18f345cb27ea46192dc8b57e3ffee8" +checksum = "cc6f3888ce8b87a27670cbe4669dd3ae769c0a95a8b50600b76afac3e022da43" dependencies = [ "dprint-core", "serde", @@ -2304,18 +2304,18 @@ checksum = "b46e1121e8180c12ff69a742aabc4f310542b6ccb69f1691689ac17fdf8618aa" [[package]] name = "serde" -version = "1.0.117" +version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b88fa983de7720629c9387e9f517353ed404164b1e482c970a90c1a4aaf7dc1a" +checksum = "06c64263859d87aa2eb554587e2d23183398d617427327cf2b3d0ed8c69e4800" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.117" +version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbd1ae72adb44aab48f325a02444a5fc079349a8d804c1fc922aed3f7454c74e" +checksum = "c84d3526699cd55261af4b941e4e725444df67aa4f9e6a3564f18030d12672df" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.7", @@ -2324,9 +2324,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95" +checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779" dependencies = [ "indexmap", "itoa", @@ -2524,9 +2524,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.17.5" +version = "0.17.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b07ac843906ddcc5f2e17ad47f80c14c7c276146e6c4e7355530ae116caf07" +checksum = "7b09f66f03e88c271bdd4d1f7c92b86ba2521597e1f0f2245c9f0cba3e857127" dependencies = [ "anyhow", "crc", @@ -2550,9 +2550,9 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04c341ef776c6a3dde1cab58b30a8e0ce12f4c7553879105b554260882f43ddd" +checksum = "96d63837c3d3d226ec338338a8fc32c6c8aabefd0c4d32e6b0bcd1ed991c6963" dependencies = [ "ast_node", "cfg-if 0.1.10", @@ -2614,9 +2614,9 @@ dependencies = [ [[package]] name = "swc_ecma_dep_graph" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06d740540964d8ac8d2d2c603223f49b31a1ae9803d2cdcff0796c210085ecc0" +checksum = "195f056055fd028a3824f5b74011252cead88a1915213ab574f50c6a7be33a1c" dependencies = [ "swc_atoms", "swc_common", @@ -2736,9 +2736,9 @@ dependencies = [ [[package]] name = "swc_ecmascript" -version = "0.14.4" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b4658cfd64f6931c38d283d3ee65e2f3f964e07c711782eab2a8c71f35d2ba1" +checksum = "3110a752791f7c5f0cce84b6d0a088c79e08571cab6365c509fc5b244d66242e" dependencies = [ "swc_ecma_ast", "swc_ecma_codegen", @@ -2833,9 +2833,9 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f" +checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" dependencies = [ "winapi-util", ] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 125c422846fbd9..7bc6221ddacc7a 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -34,9 +34,9 @@ winapi = "0.3.9" [dependencies] deno_core = { path = "../core", version = "0.70.0" } deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } -deno_doc = "0.1.17" +deno_doc = "0.1.18" deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } -deno_lint = "0.2.12" +deno_lint = "0.2.13" deno_web = { path = "../op_crates/web", version = "0.21.0" } atty = "0.2.14" @@ -47,7 +47,7 @@ clap = "2.33.3" crossbeam-channel = "0.5.0" dissimilar = "1.0.2" dlopen = "0.1.8" -dprint-plugin-typescript = "0.35.0" +dprint-plugin-typescript = "0.35.1" encoding_rs = "0.8.24" env_logger = "0.7.1" filetime = "0.2.12" @@ -69,9 +69,9 @@ semver-parser = "0.9.0" serde = { version = "1.0.116", features = ["derive"] } shell-escape = "0.1.5" sourcemap = "6.0.1" -swc_bundler = "0.17.5" -swc_common = { version = "0.10.6", features = ["sourcemap"] } -swc_ecmascript = { version = "0.14.4", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } +swc_bundler = "0.17.6" +swc_common = { version = "0.10.7", features = ["sourcemap"] } +swc_ecmascript = { version = "0.15.0", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } sys-info = "0.7.0" tempfile = "3.1.0" termcolor = "1.1.0" diff --git a/cli/ast.rs b/cli/ast.rs index ef64683106770a..255155e7b36a52 100644 --- a/cli/ast.rs +++ b/cli/ast.rs @@ -610,7 +610,9 @@ mod tests { leading_comments: Vec::new(), col: 0, line: 1, - specifier: "./test.ts".into() + specifier: "./test.ts".into(), + specifier_col: 21, + specifier_line: 1, }, DependencyDescriptor { kind: DependencyKind::Import, @@ -618,7 +620,9 @@ mod tests { leading_comments: Vec::new(), col: 22, line: 2, - specifier: "./foo.ts".into() + specifier: "./foo.ts".into(), + specifier_col: 29, + specifier_line: 2, } ] ); From 1a72c9ba23208f7236e5784011bc15640e50fe0b Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Thu, 10 Dec 2020 11:12:46 +1100 Subject: [PATCH 037/135] fix(lsp): only resolve sources with supported schemas (#8696) Fixes #8695 --- cli/file_fetcher.rs | 2 +- cli/lsp/sources.rs | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 5b2f6f74cb2330..a17a8fc2479b6c 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -26,7 +26,7 @@ use std::pin::Pin; use std::sync::Arc; use std::sync::Mutex; -const SUPPORTED_SCHEMES: [&str; 3] = ["http", "https", "file"]; +pub const SUPPORTED_SCHEMES: [&str; 3] = ["http", "https", "file"]; /// A structure representing a source file. #[derive(Debug, Clone, Eq, PartialEq)] diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs index c6a15461ffa9f0..09b0a4cc8bac74 100644 --- a/cli/lsp/sources.rs +++ b/cli/lsp/sources.rs @@ -5,6 +5,7 @@ use super::text; use crate::file_fetcher::get_source_from_bytes; use crate::file_fetcher::map_content_type; +use crate::file_fetcher::SUPPORTED_SCHEMES; use crate::http_cache; use crate::http_cache::HttpCache; use crate::import_map::ImportMap; @@ -279,7 +280,12 @@ impl Sources { &mut self, specifier: &ModuleSpecifier, ) -> Option { - if specifier.as_url().scheme() == "file" { + let scheme = specifier.as_url().scheme(); + if !SUPPORTED_SCHEMES.contains(&scheme) { + return None; + } + + if scheme == "file" { if let Ok(path) = specifier.as_url().to_file_path() { if path.is_file() { return Some(specifier.clone()); @@ -377,4 +383,13 @@ mod tests { let actual = actual.unwrap(); assert_eq!(actual, 28); } + + #[test] + fn test_sources_resolve_specifier_non_supported_schema() { + let (mut sources, _) = setup(); + let specifier = ModuleSpecifier::resolve_url("foo://a/b/c.ts") + .expect("could not create specifier"); + let actual = sources.resolve_specifier(&specifier); + assert!(actual.is_none()); + } } From 7cc7f1719b0c3773fadb8b3fefc45a756fc405f6 Mon Sep 17 00:00:00 2001 From: tomholford <16504501+tomholford@users.noreply.github.com> Date: Wed, 9 Dec 2020 19:22:09 -0800 Subject: [PATCH 038/135] docs: fix naming in `std/io` usage example (#8700) Co-authored-by: tomholford --- std/io/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/std/io/README.md b/std/io/README.md index 46274d3214d5a1..4f5d7454fba9e1 100644 --- a/std/io/README.md +++ b/std/io/README.md @@ -126,26 +126,26 @@ base0123456789 ## Streams -### fromStreamReader +### readerFromStreamReader Creates a `Reader` from a `ReadableStreamDefaultReader`. ```ts -import { fromStreamReader } from "https://deno.land/std@$STD_VERSION/io/mod.ts"; +import { readerFromStreamReader } from "https://deno.land/std@$STD_VERSION/io/mod.ts"; const res = await fetch("https://deno.land"); const file = await Deno.open("./deno.land.html", { create: true, write: true }); -const reader = fromStreamReader(res.body!.getReader()); +const reader = readerFromStreamReader(res.body!.getReader()); await Deno.copy(reader, file); file.close(); ``` -### fromStreamWriter +### writerFromStreamWriter Creates a `Writer` from a `WritableStreamDefaultWriter`. ```ts -import { fromStreamWriter } from "https://deno.land/std@$STD_VERSION/io/mod.ts"; +import { writerFromStreamWriter } from "https://deno.land/std@$STD_VERSION/io/mod.ts"; const file = await Deno.open("./deno.land.html", { read: true }); const writableStream = new WritableStream({ @@ -153,7 +153,7 @@ const writableStream = new WritableStream({ console.log(chunk); }, }); -const writer = fromStreamWriter(writableStream.getWriter()); +const writer = writerFromStreamWriter(writableStream.getWriter()); await Deno.copy(file, writer); file.close(); ``` From 65c6a0306eb49144301934f5ebddcaf5eb1e7480 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 10 Dec 2020 20:31:15 +0800 Subject: [PATCH 039/135] test(std/wasi): add a case for open directory/../.. (#8708) --- std/wasi/testdata | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/std/wasi/testdata b/std/wasi/testdata index 4c7517f6cc5aa3..afe1b368d22901 160000 --- a/std/wasi/testdata +++ b/std/wasi/testdata @@ -1 +1 @@ -Subproject commit 4c7517f6cc5aa3bd7cf405be7dfb8ec1cac6d2de +Subproject commit afe1b368d229016794be458b27be39f052e74942 From c5ccbf3699ebe2d9548a89d6e7ef70636fae0cd4 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 10 Dec 2020 20:36:07 +0800 Subject: [PATCH 040/135] test(std/wasi): run test runner with --no-check (#8710) --- std/wasi/snapshot_preview1_test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/std/wasi/snapshot_preview1_test.ts b/std/wasi/snapshot_preview1_test.ts index 01b91bc3e48f70..f102f083d8560e 100644 --- a/std/wasi/snapshot_preview1_test.ts +++ b/std/wasi/snapshot_preview1_test.ts @@ -90,6 +90,7 @@ for (const pathname of tests) { "--quiet", "--unstable", "--allow-all", + "--no-check", path.resolve(rootdir, "snapshot_preview1_test_runner.ts"), prelude, path.resolve(rootdir, pathname), From 77b2bc3bc1c986d8fce90a144bdfdb66a7537efe Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 10 Dec 2020 20:37:09 +0800 Subject: [PATCH 041/135] fix(std/wasi): disallow multiple starts (#8712) --- std/wasi/snapshot_preview1.ts | 15 ++++++++++++++ std/wasi/snapshot_preview1_test.ts | 33 ++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 225458c66bc02d..7160bbee98488c 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -294,6 +294,7 @@ export default class Context { fds: FileDescriptor[]; exports: Record; + #started: boolean; constructor(options: ContextOptions) { this.args = options.args ?? []; @@ -1571,6 +1572,8 @@ export default class Context { return ERRNO_NOSYS; }), }; + + this.#started = false; } /** @@ -1585,6 +1588,12 @@ export default class Context { * thrown. */ start(instance: WebAssembly.Instance) { + if (this.#started) { + throw new Error("WebAssembly.Instance has already started"); + } + + this.#started = true; + const { _start, _initialize, memory } = instance.exports; if (!(memory instanceof WebAssembly.Memory)) { @@ -1618,6 +1627,12 @@ export default class Context { * thrown. */ initialize(instance: WebAssembly.Instance) { + if (this.#started) { + throw new Error("WebAssembly.Instance has already started"); + } + + this.#started = true; + const { _start, _initialize, memory } = instance.exports; if (!(memory instanceof WebAssembly.Memory)) { diff --git a/std/wasi/snapshot_preview1_test.ts b/std/wasi/snapshot_preview1_test.ts index f102f083d8560e..55982545fd962d 100644 --- a/std/wasi/snapshot_preview1_test.ts +++ b/std/wasi/snapshot_preview1_test.ts @@ -198,6 +198,23 @@ Deno.test("context_start", function () { assert(err instanceof ExitStatus); assertEquals(err.code, 0); } + + assertThrows( + () => { + const context = new Context({}); + context.start({ + exports: { + memory: new WebAssembly.Memory({ initial: 1 }), + _start() {}, + }, + }); + context.start({ + exports: {}, + }); + }, + Error, + "WebAssembly.Instance has already started", + ); }); Deno.test("context_initialize", function () { @@ -240,4 +257,20 @@ Deno.test("context_initialize", function () { TypeError, "export _initialize must be a function", ); + assertThrows( + () => { + const context = new Context({}); + context.initialize({ + exports: { + memory: new WebAssembly.Memory({ initial: 1 }), + _initialize() {}, + }, + }); + context.initialize({ + exports: {}, + }); + }, + Error, + "WebAssembly.Instance has already started", + ); }); From b7faa27704458b4bbb0b43b15bcb16b13e7c3c4f Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 10 Dec 2020 20:38:31 +0800 Subject: [PATCH 042/135] docs(std/wasi): add a basic description of Context (#8711) --- std/wasi/snapshot_preview1.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 7160bbee98488c..2ec51da8813f95 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -285,6 +285,14 @@ export interface ContextOptions { exitOnReturn?: boolean; } +/** + * The Context class provides the environment required to run WebAssembly + * modules compiled to run with the WebAssembly System Interface. + * + * Each context represents a distinct sandboxed environment and must have its + * command-line arguments, environment variables, and pre-opened directory + * structure configured explicitly. + */ export default class Context { args: string[]; env: { [key: string]: string | undefined }; From f91fa16661fa10fd029e6cf26008faee95233143 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Thu, 10 Dec 2020 14:45:41 +0100 Subject: [PATCH 043/135] refactor(core): stack trace mapping (#8660) This commit adds "Deno.core.createPrepareStackTrace". This function was moved from "cli/rt/40_error_stack.js" to unify handling of stack frames in core (before this PR there was implicit dependency on logic in "core/error.rs::JsError"). Unfortunately formatting logic must still be duplicated in "cli/error.js::PrettyJsError" to provide coloring, but currently there's no solution to this problem. "createPrepareStackTrace" can accept a single argument; a function that takes a location and provides source mapped location back. --- cli/rt/40_error_stack.js | 226 -------------------------- cli/rt/99_main.js | 9 +- cli/tests/unit/error_stack_test.ts | 99 ------------ core/error.js | 247 +++++++++++++++++++++++++++++ core/runtime.rs | 3 + 5 files changed, 258 insertions(+), 326 deletions(-) create mode 100644 core/error.js diff --git a/cli/rt/40_error_stack.js b/cli/rt/40_error_stack.js index 834503e34aa64a..da2ee51f391a9b 100644 --- a/cli/rt/40_error_stack.js +++ b/cli/rt/40_error_stack.js @@ -1,11 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. ((window) => { - // Some of the code here is adapted directly from V8 and licensed under a BSD - // style license available here: https://github.com/v8/v8/blob/24886f2d1c565287d33d71e4109a53bf0b54b75c/LICENSE.v8 const core = window.Deno.core; - const assert = window.__bootstrap.util.assert; - const internals = window.__bootstrap.internals; function opFormatDiagnostics(diagnostics) { return core.jsonOpSync("op_format_diagnostic", diagnostics); @@ -20,229 +16,7 @@ }; } - function patchCallSite(callSite, location) { - return { - getThis() { - return callSite.getThis(); - }, - getTypeName() { - return callSite.getTypeName(); - }, - getFunction() { - return callSite.getFunction(); - }, - getFunctionName() { - return callSite.getFunctionName(); - }, - getMethodName() { - return callSite.getMethodName(); - }, - getFileName() { - return location.fileName; - }, - getLineNumber() { - return location.lineNumber; - }, - getColumnNumber() { - return location.columnNumber; - }, - getEvalOrigin() { - return callSite.getEvalOrigin(); - }, - isToplevel() { - return callSite.isToplevel(); - }, - isEval() { - return callSite.isEval(); - }, - isNative() { - return callSite.isNative(); - }, - isConstructor() { - return callSite.isConstructor(); - }, - isAsync() { - return callSite.isAsync(); - }, - isPromiseAll() { - return callSite.isPromiseAll(); - }, - getPromiseIndex() { - return callSite.getPromiseIndex(); - }, - }; - } - - // Keep in sync with `cli/fmt_errors.rs`. - function formatLocation(callSite) { - if (callSite.isNative()) { - return "native"; - } - - let result = ""; - - const fileName = callSite.getFileName(); - - if (fileName) { - result += fileName; - } else { - if (callSite.isEval()) { - const evalOrigin = callSite.getEvalOrigin(); - assert(evalOrigin != null); - result += `${evalOrigin}, `; - } - result += ""; - } - - const lineNumber = callSite.getLineNumber(); - if (lineNumber != null) { - result += `:${lineNumber}`; - - const columnNumber = callSite.getColumnNumber(); - if (columnNumber != null) { - result += `:${columnNumber}`; - } - } - - return result; - } - - // Keep in sync with `cli/fmt_errors.rs`. - function formatCallSite(callSite) { - let result = ""; - const functionName = callSite.getFunctionName(); - - const isTopLevel = callSite.isToplevel(); - const isAsync = callSite.isAsync(); - const isPromiseAll = callSite.isPromiseAll(); - const isConstructor = callSite.isConstructor(); - const isMethodCall = !(isTopLevel || isConstructor); - - if (isAsync) { - result += "async "; - } - if (isPromiseAll) { - result += `Promise.all (index ${callSite.getPromiseIndex()})`; - return result; - } - if (isMethodCall) { - const typeName = callSite.getTypeName(); - const methodName = callSite.getMethodName(); - - if (functionName) { - if (typeName) { - if (!functionName.startsWith(typeName)) { - result += `${typeName}.`; - } - } - result += functionName; - if (methodName) { - if (!functionName.endsWith(methodName)) { - result += ` [as ${methodName}]`; - } - } - } else { - if (typeName) { - result += `${typeName}.`; - } - if (methodName) { - result += methodName; - } else { - result += ""; - } - } - } else if (isConstructor) { - result += "new "; - if (functionName) { - result += functionName; - } else { - result += ""; - } - } else if (functionName) { - result += functionName; - } else { - result += formatLocation(callSite); - return result; - } - - result += ` (${formatLocation(callSite)})`; - return result; - } - - function evaluateCallSite(callSite) { - return { - this: callSite.getThis(), - typeName: callSite.getTypeName(), - function: callSite.getFunction(), - functionName: callSite.getFunctionName(), - methodName: callSite.getMethodName(), - fileName: callSite.getFileName(), - lineNumber: callSite.getLineNumber(), - columnNumber: callSite.getColumnNumber(), - evalOrigin: callSite.getEvalOrigin(), - isToplevel: callSite.isToplevel(), - isEval: callSite.isEval(), - isNative: callSite.isNative(), - isConstructor: callSite.isConstructor(), - isAsync: callSite.isAsync(), - isPromiseAll: callSite.isPromiseAll(), - promiseIndex: callSite.getPromiseIndex(), - }; - } - - function prepareStackTrace( - error, - callSites, - ) { - const mappedCallSites = callSites.map( - (callSite) => { - const fileName = callSite.getFileName(); - const lineNumber = callSite.getLineNumber(); - const columnNumber = callSite.getColumnNumber(); - if (fileName && lineNumber != null && columnNumber != null) { - return patchCallSite( - callSite, - opApplySourceMap({ - fileName, - lineNumber, - columnNumber, - }), - ); - } - return callSite; - }, - ); - Object.defineProperties(error, { - __callSiteEvals: { value: [], configurable: true }, - }); - const formattedCallSites = []; - for (const callSite of mappedCallSites) { - error.__callSiteEvals.push(Object.freeze(evaluateCallSite(callSite))); - formattedCallSites.push(formatCallSite(callSite)); - } - Object.freeze(error.__callSiteEvals); - const message = error.message !== undefined ? error.message : ""; - const name = error.name !== undefined ? error.name : "Error"; - let messageLine; - if (name != "" && message != "") { - messageLine = `${name}: ${message}`; - } else if ((name || message) != "") { - messageLine = name || message; - } else { - messageLine = ""; - } - return messageLine + - formattedCallSites.map((s) => `\n at ${s}`).join(""); - } - - function setPrepareStackTrace(ErrorConstructor) { - ErrorConstructor.prepareStackTrace = prepareStackTrace; - } - - internals.exposeForTest("setPrepareStackTrace", setPrepareStackTrace); - window.__bootstrap.errorStack = { - setPrepareStackTrace, opApplySourceMap, opFormatDiagnostics, }; diff --git a/cli/rt/99_main.js b/cli/rt/99_main.js index 40c9c636f53088..2aa140990e4449 100644 --- a/cli/rt/99_main.js +++ b/cli/rt/99_main.js @@ -164,9 +164,16 @@ delete Object.prototype.__proto__; // TODO(bartlomieju): a very crude way to disable // source mapping of errors. This condition is true // only for compiled standalone binaries. + let prepareStackTrace; if (s.applySourceMaps) { - errorStack.setPrepareStackTrace(Error); + prepareStackTrace = core.createPrepareStackTrace( + errorStack.opApplySourceMap, + ); + } else { + prepareStackTrace = core.createPrepareStackTrace(); } + Error.prepareStackTrace = prepareStackTrace; + return s; } diff --git a/cli/tests/unit/error_stack_test.ts b/cli/tests/unit/error_stack_test.ts index a5fe13796beb24..ad5f2e093caddf 100644 --- a/cli/tests/unit/error_stack_test.ts +++ b/cli/tests/unit/error_stack_test.ts @@ -1,87 +1,6 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. import { assert, assertEquals, assertMatch, unitTest } from "./test_util.ts"; -// @ts-expect-error TypeScript (as of 3.7) does not support indexing namespaces by symbol -const { setPrepareStackTrace } = Deno[Deno.internal]; - -interface CallSite { - getThis(): unknown; - getTypeName(): string | null; - // deno-lint-ignore ban-types - getFunction(): Function | null; - getFunctionName(): string | null; - getMethodName(): string | null; - getFileName(): string | null; - getLineNumber(): number | null; - getColumnNumber(): number | null; - getEvalOrigin(): string | null; - isToplevel(): boolean | null; - isEval(): boolean; - isNative(): boolean; - isConstructor(): boolean; - isAsync(): boolean; - isPromiseAll(): boolean; - getPromiseIndex(): number | null; -} - -function getMockCallSite( - fileName: string, - lineNumber: number | null, - columnNumber: number | null, -): CallSite { - return { - getThis(): unknown { - return undefined; - }, - getTypeName(): string { - return ""; - }, - // deno-lint-ignore ban-types - getFunction(): Function { - return (): void => {}; - }, - getFunctionName(): string { - return ""; - }, - getMethodName(): string { - return ""; - }, - getFileName(): string { - return fileName; - }, - getLineNumber(): number | null { - return lineNumber; - }, - getColumnNumber(): number | null { - return columnNumber; - }, - getEvalOrigin(): null { - return null; - }, - isToplevel(): false { - return false; - }, - isEval(): false { - return false; - }, - isNative(): false { - return false; - }, - isConstructor(): false { - return false; - }, - isAsync(): false { - return false; - }, - isPromiseAll(): false { - return false; - }, - getPromiseIndex(): null { - return null; - }, - }; -} - unitTest(function errorStackMessageLine(): void { const e1 = new Error(); e1.name = "Foo"; @@ -122,24 +41,6 @@ unitTest(function errorStackMessageLine(): void { assertMatch(e6.stack!, /^null: null\n/); }); -// FIXME(bartlomieju): no longer works after migrating -// to JavaScript runtime code -unitTest({ ignore: true }, function prepareStackTrace(): void { - // deno-lint-ignore no-explicit-any - const MockError = {} as any; - setPrepareStackTrace(MockError); - assert(typeof MockError.prepareStackTrace === "function"); - const prepareStackTrace: ( - error: Error, - structuredStackTrace: CallSite[], - ) => string = MockError.prepareStackTrace; - const result = prepareStackTrace(new Error("foo"), [ - getMockCallSite("CLI_SNAPSHOT.js", 23, 0), - ]); - assert(result.startsWith("Error: foo\n")); - assert(result.includes(".ts:"), "should remap to something in 'js/'"); -}); - unitTest(function captureStackTrace(): void { function foo(): void { const error = new Error(); diff --git a/core/error.js b/core/error.js new file mode 100644 index 00000000000000..708b1fd4a71498 --- /dev/null +++ b/core/error.js @@ -0,0 +1,247 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +((window) => { + // Some of the code here is adapted directly from V8 and licensed under a BSD + // style license available here: https://github.com/v8/v8/blob/24886f2d1c565287d33d71e4109a53bf0b54b75c/LICENSE.v8 + function patchCallSite(callSite, location) { + return { + getThis() { + return callSite.getThis(); + }, + getTypeName() { + return callSite.getTypeName(); + }, + getFunction() { + return callSite.getFunction(); + }, + getFunctionName() { + return callSite.getFunctionName(); + }, + getMethodName() { + return callSite.getMethodName(); + }, + getFileName() { + return location.fileName; + }, + getLineNumber() { + return location.lineNumber; + }, + getColumnNumber() { + return location.columnNumber; + }, + getEvalOrigin() { + return callSite.getEvalOrigin(); + }, + isToplevel() { + return callSite.isToplevel(); + }, + isEval() { + return callSite.isEval(); + }, + isNative() { + return callSite.isNative(); + }, + isConstructor() { + return callSite.isConstructor(); + }, + isAsync() { + return callSite.isAsync(); + }, + isPromiseAll() { + return callSite.isPromiseAll(); + }, + getPromiseIndex() { + return callSite.getPromiseIndex(); + }, + }; + } + + // Keep in sync with `cli/fmt_errors.rs`. + function formatLocation(callSite) { + if (callSite.isNative()) { + return "native"; + } + + let result = ""; + + const fileName = callSite.getFileName(); + + if (fileName) { + result += fileName; + } else { + if (callSite.isEval()) { + const evalOrigin = callSite.getEvalOrigin(); + if (evalOrigin == null) { + throw new Error("assert evalOrigin"); + } + result += `${evalOrigin}, `; + } + result += ""; + } + + const lineNumber = callSite.getLineNumber(); + if (lineNumber != null) { + result += `:${lineNumber}`; + + const columnNumber = callSite.getColumnNumber(); + if (columnNumber != null) { + result += `:${columnNumber}`; + } + } + + return result; + } + + // Keep in sync with `cli/fmt_errors.rs`. + function formatCallSite(callSite) { + let result = ""; + const functionName = callSite.getFunctionName(); + + const isTopLevel = callSite.isToplevel(); + const isAsync = callSite.isAsync(); + const isPromiseAll = callSite.isPromiseAll(); + const isConstructor = callSite.isConstructor(); + const isMethodCall = !(isTopLevel || isConstructor); + + if (isAsync) { + result += "async "; + } + if (isPromiseAll) { + result += `Promise.all (index ${callSite.getPromiseIndex()})`; + return result; + } + if (isMethodCall) { + const typeName = callSite.getTypeName(); + const methodName = callSite.getMethodName(); + + if (functionName) { + if (typeName) { + if (!functionName.startsWith(typeName)) { + result += `${typeName}.`; + } + } + result += functionName; + if (methodName) { + if (!functionName.endsWith(methodName)) { + result += ` [as ${methodName}]`; + } + } + } else { + if (typeName) { + result += `${typeName}.`; + } + if (methodName) { + result += methodName; + } else { + result += ""; + } + } + } else if (isConstructor) { + result += "new "; + if (functionName) { + result += functionName; + } else { + result += ""; + } + } else if (functionName) { + result += functionName; + } else { + result += formatLocation(callSite); + return result; + } + + result += ` (${formatLocation(callSite)})`; + return result; + } + + function evaluateCallSite(callSite) { + return { + this: callSite.getThis(), + typeName: callSite.getTypeName(), + function: callSite.getFunction(), + functionName: callSite.getFunctionName(), + methodName: callSite.getMethodName(), + fileName: callSite.getFileName(), + lineNumber: callSite.getLineNumber(), + columnNumber: callSite.getColumnNumber(), + evalOrigin: callSite.getEvalOrigin(), + isToplevel: callSite.isToplevel(), + isEval: callSite.isEval(), + isNative: callSite.isNative(), + isConstructor: callSite.isConstructor(), + isAsync: callSite.isAsync(), + isPromiseAll: callSite.isPromiseAll(), + promiseIndex: callSite.getPromiseIndex(), + }; + } + + /** + * Returns a function that can be used as `Error.prepareStackTrace`. + * + * This function accepts an optional argument, a function that performs + * source mapping. It is not required to pass this argument, but + * in such case only JavaScript sources will have proper position in + * stack frames. + * @param {( + * fileName: string, + * lineNumber: number, + * columnNumber: number + * ) => { + * fileName: string, + * lineNumber: number, + * columnNumber: number + * }} sourceMappingFn + */ + function createPrepareStackTrace(sourceMappingFn) { + return function prepareStackTrace( + error, + callSites, + ) { + const mappedCallSites = callSites.map( + (callSite) => { + const fileName = callSite.getFileName(); + const lineNumber = callSite.getLineNumber(); + const columnNumber = callSite.getColumnNumber(); + if ( + sourceMappingFn && fileName && lineNumber != null && + columnNumber != null + ) { + return patchCallSite( + callSite, + sourceMappingFn({ + fileName, + lineNumber, + columnNumber, + }), + ); + } + return callSite; + }, + ); + Object.defineProperties(error, { + __callSiteEvals: { value: [], configurable: true }, + }); + const formattedCallSites = []; + for (const callSite of mappedCallSites) { + error.__callSiteEvals.push(evaluateCallSite(callSite)); + formattedCallSites.push(formatCallSite(callSite)); + } + const message = error.message !== undefined ? error.message : ""; + const name = error.name !== undefined ? error.name : "Error"; + let messageLine; + if (name != "" && message != "") { + messageLine = `${name}: ${message}`; + } else if ((name || message) != "") { + messageLine = name || message; + } else { + messageLine = ""; + } + return messageLine + + formattedCallSites.map((s) => `\n at ${s}`).join(""); + }; + } + + Object.assign(window.Deno.core, { + createPrepareStackTrace, + }); +})(this); diff --git a/core/runtime.rs b/core/runtime.rs index ecac588ca9e80c..0f09926f879191 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -329,6 +329,9 @@ impl JsRuntime { self .execute("deno:core/core.js", include_str!("core.js")) .unwrap(); + self + .execute("deno:core/error.js", include_str!("error.js")) + .unwrap(); } } From 5f05e1783e9f08d1be4b71f2099601458da78dcd Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 10 Dec 2020 23:13:57 +0800 Subject: [PATCH 044/135] docs(std/wasi): document ContextOptions (#8715) --- std/wasi/snapshot_preview1.ts | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/std/wasi/snapshot_preview1.ts b/std/wasi/snapshot_preview1.ts index 2ec51da8813f95..bc63e82dd04cb9 100644 --- a/std/wasi/snapshot_preview1.ts +++ b/std/wasi/snapshot_preview1.ts @@ -279,9 +279,32 @@ export class ExitStatus { } export interface ContextOptions { + /** + * An array of strings that the WebAssembly instance will see as command-line + * arguments. + * + * The first argument is the virtual path to the command itself. + */ args?: string[]; + + /** + * An object of string keys mapped to string values that the WebAssembly module will see as its environment. + */ env?: { [key: string]: string | undefined }; + + /** + * An object of string keys mapped to string values that the WebAssembly module will see as it's filesystem. + * + * The string keys of are treated as directories within the sandboxed + * filesystem, the values are the real paths to those directories on the host + * machine. + * + */ preopens?: { [key: string]: string }; + + /** + * Determines if calls to exit from within the WebAssembly module will terminate the proess or return. + */ exitOnReturn?: boolean; } From fd9b0202c1bb0e83183fd0a53d8b1303612a5e31 Mon Sep 17 00:00:00 2001 From: Andrew Mitchell <32021055+mitch292@users.noreply.github.com> Date: Thu, 10 Dec 2020 13:49:50 -0500 Subject: [PATCH 045/135] fix(doc): Resolves dead link on script installer man page (#8705) --- docs/tools/script_installer.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tools/script_installer.md b/docs/tools/script_installer.md index a5f37659cb49f9..54ced70f635191 100644 --- a/docs/tools/script_installer.md +++ b/docs/tools/script_installer.md @@ -62,7 +62,7 @@ deno install --allow-net --allow-read https://deno.land/std@$STD_VERSION/http/fi The above command creates an executable called `file_server` that runs with network and read permissions and binds to port 8080. -For good practice, use the [`import.meta.main`](../examples/testing_if_main.md) +For good practice, use the [`import.meta.main`](../examples/module_metadata.md) idiom to specify the entry point in an executable script. Example: From b8bc24d167f2e19482d9dc6d367876c361abf4ea Mon Sep 17 00:00:00 2001 From: Anh Hong Date: Fri, 11 Dec 2020 02:45:45 +0700 Subject: [PATCH 046/135] chore: fixed various misspellings and other typos (#8691) --- Releases.md | 14 +++++++------- cli/dts/README.md | 2 +- cli/tests/DenoWinRunner.cs | 12 ++++++------ cli/tests/echo_server.ts | 2 +- cli/tests/tls/README.md | 4 ++-- core/Cargo.toml | 2 +- docs/contributing/release_schedule.md | 2 +- docs/contributing/style_guide.md | 4 ++-- docs/examples/import_export.md | 4 ++-- docs/runtime/workers.md | 2 +- docs/tools/linter.md | 2 +- op_crates/fetch/11_streams.js | 2 +- op_crates/fetch/20_headers.js | 8 ++++---- op_crates/web/03_global_interfaces.js | 2 +- std/datetime/mod.ts | 10 +++++----- std/datetime/test.ts | 4 ++-- std/encoding/README.md | 18 +++++++++--------- std/encoding/_yaml/example/inout.ts | 2 +- std/encoding/_yaml/loader/loader.ts | 2 +- std/encoding/_yaml/type/float.ts | 2 +- std/encoding/ascii85.ts | 6 +++--- std/encoding/ascii85_test.ts | 12 ++++++------ std/encoding/binary.ts | 4 ++-- std/encoding/csv_stringify.ts | 2 +- std/encoding/hex.ts | 6 +++--- std/encoding/toml.ts | 2 +- std/node/_stream/readable.ts | 2 +- 27 files changed, 67 insertions(+), 67 deletions(-) diff --git a/Releases.md b/Releases.md index 40549d9d3cb653..da6720e2f9ac4a 100644 --- a/Releases.md +++ b/Releases.md @@ -408,7 +408,7 @@ Changes in std version 0.69.0: - fix: Don't expose globalThis.__bootstrap (#7344) - fix: Handle bad redirects more gracefully (#7342) - fix: Handling of + character in URLSearchParams (#7314) -- fix: Regex for TS refereces and deno-types (#7333) +- fix: Regex for TS references and deno-types (#7333) - fix: Set maximum size of thread pool to 31 (#7290) - fix: Support missing features in --no-check (#7289) - fix: Use millisecond precision for Deno.futime and Deno.utime (#7299) @@ -629,7 +629,7 @@ Changes in std version 0.61.0: - fix(std/http): Properly return port 80 in \_parseAddrFromStr (#6635) - fix(std/mime): Boundary random hex values (#6646) - fix(std/node): Add encoding argument to Buffer.byteLength (#6639) -- fix(std/tesing/asserts): AssertEquals/NotEquals should use milliseconds in +- fix(std/testing/asserts): AssertEquals/NotEquals should use milliseconds in Date (#6644) - fix(std/wasi): Return errno::success from fd_tell (#6636) @@ -1128,7 +1128,7 @@ Read more about this release at https://deno.land/v1 ### v0.41.0 / 2020.04.16 - BREAKING: Improve readdir() and FileInfo interfaces (#4763) -- BREAKING: Remove depracated APIs for mkdir and mkdirSync (#4615) +- BREAKING: Remove deprecated APIs for mkdir and mkdirSync (#4615) - BREAKING: Make fetch API more web compatible (#4687) - BREAKING: Remove std/testing/format.ts (#4749) - BREAKING: Migrate std/types to deno.land/x/types/ (#4713, #4771) @@ -1282,7 +1282,7 @@ Read more about this release at https://deno.land/v1 - feat: Add Deno.umask (#4290) - feat: Add global --quiet flag (#4135) - feat: Improvements to std/flags. (#4279) -- feat: Make internel error frames dimmer (#4201) +- feat: Make internal error frames dimmer (#4201) - feat: Support async function and EventListenerObject as listeners (#4240) - feat: add actual error class to fail message (#4305) - feat: seek should return cursor position (#4211) @@ -1310,7 +1310,7 @@ Read more about this release at https://deno.land/v1 - refactor: Rename Option -> Options (#4226) - refactor: cleanup compiler runtimes (#4230) - refactor: preliminary cleanup of Deno.runTests() (#4237) -- refactor: reduce unnecesarry output in cli/js tests (#4182) +- refactor: reduce unnecessary output in cli/js tests (#4182) - refactor: reorganize cli/js (#4317, #4316, #4310, #4250, #4302, #4283, #4264) - refactor: rewrite testPerm into unitTest (#4231) - refactor: uncomment tests broken tests, use skip (#4311) @@ -1605,7 +1605,7 @@ Read more about this release at https://deno.land/v1 - feat: Add ResourceTable in core (#3150) - feat: Re-enable standard stream support for fetch bodies (#3192) - feat: Add CustomInspect for Headers (#3130) -- fix: Cherry-pick depot_tools 6a1d778 to fix macOS Cataliona issues (#3175) +- fix: Cherry-pick depot_tools 6a1d778 to fix macOS Catalina issues (#3175) - fix: Remove runtime panics in op dispatch (#3176, #3202, #3131) - fix: BufReader.readString to actually return Deno.EOF at end (#3191) - perf: faster TextDecoder (#3180, #3204) @@ -1986,7 +1986,7 @@ In deno: In deno_std - Clean up HTTP async iterator code (denoland/deno_std#411) -- fix: add exnext lib to tsconfig.json (denoland/deno_std#416) +- fix: add esnext lib to tsconfig.json (denoland/deno_std#416) - feat(fs): add copy/copySync (denoland/deno_std#278) - feat: add Tar and Untar classes (denoland/deno_std#388) - ws: make acceptable() more robust (denoland/deno_std#404) diff --git a/cli/dts/README.md b/cli/dts/README.md index 006282a90ff3f7..9d07188f7d480e 100644 --- a/cli/dts/README.md +++ b/cli/dts/README.md @@ -4,7 +4,7 @@ The files in this directory are mostly from the TypeScript repository. We currently (unfortunately) have a rather manual process for upgrading TypeScript. It works like this currently: -1. Checkout typescript repo in a seperate directory. +1. Checkout typescript repo in a separate directory. 2. Copy typescript.js into Deno repo. 3. Copy d.ts files into dts directory. diff --git a/cli/tests/DenoWinRunner.cs b/cli/tests/DenoWinRunner.cs index 7879d146dd9495..2f9e9f89fe355d 100644 --- a/cli/tests/DenoWinRunner.cs +++ b/cli/tests/DenoWinRunner.cs @@ -31,7 +31,7 @@ public class DenoWinRunner /// /// Path to the Deno.exe file. Can be absolute or relative /// Path to the script file Deno should run. - /// The contrainsts to apply to the Deno process + /// The constraints to apply to the Deno process /// How long to wait for the Deno process to exit /// The deno.exe exit code, or an exit code provided by the test runner public static int RunDenoScript(string pathToDenoExe, string pathToTestScript, DenoConstraints constraints, uint timeoutMilliseconds = 1000) @@ -66,7 +66,7 @@ public static int RunDenoScript(string pathToDenoExe, string pathToTestScript, D { SetStdHandle(STD_INPUT_HANDLE, (IntPtr)null); } - + if (constraints.HasFlag(DenoConstraints.NoStdout)) { SetStdHandle(STD_OUTPUT_HANDLE, (IntPtr)null); @@ -76,13 +76,13 @@ public static int RunDenoScript(string pathToDenoExe, string pathToTestScript, D { SetStdHandle(STD_ERROR_HANDLE, (IntPtr)null); } - + Process process = new Process { StartInfo = startInfo }; process.Start(); - Task stdErrTask = startInfo.RedirectStandardError ? + Task stdErrTask = startInfo.RedirectStandardError ? process.StandardError.ReadToEndAsync() : Task.FromResult(null); - Task stdOutTask = startInfo.RedirectStandardOutput ? + Task stdOutTask = startInfo.RedirectStandardOutput ? process.StandardOutput.ReadToEndAsync() : Task.FromResult(null); if (!process.WaitForExit((int)timeoutMilliseconds)) @@ -124,4 +124,4 @@ public static int RunDenoScript(string pathToDenoExe, string pathToTestScript, D return -1; } } -} \ No newline at end of file +} diff --git a/cli/tests/echo_server.ts b/cli/tests/echo_server.ts index fc8afa9812a41b..fcb157fdb28665 100644 --- a/cli/tests/echo_server.ts +++ b/cli/tests/echo_server.ts @@ -4,7 +4,7 @@ const listener = Deno.listen({ hostname, port: Number(port) }); console.log("listening on", addr); listener.accept().then( async (conn): Promise => { - console.log("recieved bytes:", await Deno.copy(conn, conn)); + console.log("received bytes:", await Deno.copy(conn, conn)); conn.close(); listener.close(); }, diff --git a/cli/tests/tls/README.md b/cli/tests/tls/README.md index 34de47deadf812..19bbaec35d27d5 100644 --- a/cli/tests/tls/README.md +++ b/cli/tests/tls/README.md @@ -43,5 +43,5 @@ For testing purposes we need following files: - `RootCA.crt` - `RootCA.key` - `RootCA.pem` -- `locahost.crt` -- `locahost.key` +- `localhost.crt` +- `localhost.key` diff --git a/core/Cargo.toml b/core/Cargo.toml index 21cc5f1f7b8618..c8f35aab1912dc 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -34,6 +34,6 @@ path = "examples/http_bench_bin_ops.rs" name = "http_bench_json_ops" path = "examples/http_bench_json_ops.rs" -# These dependendencies are only used for the 'http_bench_*_ops' examples. +# These dependencies are only used for the 'http_bench_*_ops' examples. [dev-dependencies] tokio = { version = "0.3.5", features = ["full"] } diff --git a/docs/contributing/release_schedule.md b/docs/contributing/release_schedule.md index e48bc2ae9a6f4a..8aabac9c8a215e 100644 --- a/docs/contributing/release_schedule.md +++ b/docs/contributing/release_schedule.md @@ -22,7 +22,7 @@ canary release by running: deno upgrade --canary ``` -To update to a specifc canary, pass the commit has in the `--version` option: +To update to a specific canary, pass the commit has in the `--version` option: ``` deno upgrade --canary --version=973af61d8bb03c1709f61e456581d58386ed4952 diff --git a/docs/contributing/style_guide.md b/docs/contributing/style_guide.md index 3f190ecf3bc897..d834ea91a82a60 100644 --- a/docs/contributing/style_guide.md +++ b/docs/contributing/style_guide.md @@ -283,7 +283,7 @@ comment. If it needs further comments it is not a good example. Currently, the building process uses `dlint` to validate linting problems in the code. If the task requires code that is non-conformant to linter use -`deno-lint-ignore ` directive to supress the warning. +`deno-lint-ignore ` directive to suppress the warning. ```typescript // deno-lint-ignore no-explicit-any @@ -349,7 +349,7 @@ export function foo(): string { programs can rely on. We want to guarantee to users that this code does not include potentially unreviewed third party code. -#### Document and maintain browser compatiblity. +#### Document and maintain browser compatibility. If a module is browser compatible, include the following in the JSDoc at the top of the module: diff --git a/docs/examples/import_export.md b/docs/examples/import_export.md index e87980bc76c755..1fa194d0c1e0bd 100644 --- a/docs/examples/import_export.md +++ b/docs/examples/import_export.md @@ -63,7 +63,7 @@ from a locally stored arithmetic module. The same functionality can be created by importing `add` and `multiply` methods from a remote module too. In this case the Ramda module is referenced, including the version number. Also -note a JavaScript module is imported directly into a TypeSript module, Deno has +note a JavaScript module is imported directly into a TypeScript module, Deno has no problem handling this. **Command:** `deno run ./remote.ts` @@ -115,6 +115,6 @@ export function multiply(a: number, b: number): number { ``` All functions, classes, constants and variables which need to be accessible -inside external modules must be exported. Either by prepending them with the +inside external modules must be exported. Either by pretending them with the `export` keyword or including them in an export statement at the bottom of the file. diff --git a/docs/runtime/workers.md b/docs/runtime/workers.md index 019df720121a71..5dcf059155bea8 100644 --- a/docs/runtime/workers.md +++ b/docs/runtime/workers.md @@ -11,7 +11,7 @@ the `type: "module"` option when creating a new worker. Relative module specifiers are [not supported](https://github.com/denoland/deno/issues/5216) at the moment. You -can instead use the `URL` contructor and `import.meta.url` to easily create a +can instead use the `URL` constructor and `import.meta.url` to easily create a specifier for some nearby script. ```ts diff --git a/docs/tools/linter.md b/docs/tools/linter.md index 709e0d644cdd13..4e432034b4b611 100644 --- a/docs/tools/linter.md +++ b/docs/tools/linter.md @@ -106,7 +106,7 @@ function foo(): any { } ``` -Ignore directive must be placed before first stament or declaration: +Ignore directive must be placed before first statement or declaration: ```ts // Copyright 2020 the Deno authors. All rights reserved. MIT license. diff --git a/op_crates/fetch/11_streams.js b/op_crates/fetch/11_streams.js index c3fa6cb6f0d31f..69787d24a7a235 100644 --- a/op_crates/fetch/11_streams.js +++ b/op_crates/fetch/11_streams.js @@ -2482,7 +2482,7 @@ try { transformStreamDefaultControllerEnqueue( controller, - // it defaults to no tranformation, so I is assumed to be O + // it defaults to no transformation, so I is assumed to be O chunk, ); } catch (e) { diff --git a/op_crates/fetch/20_headers.js b/op_crates/fetch/20_headers.js index c2ae72864cf52b..4202bea41dba8f 100644 --- a/op_crates/fetch/20_headers.js +++ b/op_crates/fetch/20_headers.js @@ -237,10 +237,10 @@ set(name, value) { requiredArguments("Headers.set", arguments.length, 2); - const [newname, newvalue] = normalizeParams(name, value); - validateName(newname); - validateValue(newvalue); - dataSet(this[headersData], newname, newvalue); + const [newName, newValue] = normalizeParams(name, value); + validateName(newName); + validateValue(newValue); + dataSet(this[headersData], newName, newValue); } get [Symbol.toStringTag]() { diff --git a/op_crates/web/03_global_interfaces.js b/op_crates/web/03_global_interfaces.js index 7ade64cc1a6484..21a44eb8e3b298 100644 --- a/op_crates/web/03_global_interfaces.js +++ b/op_crates/web/03_global_interfaces.js @@ -1,7 +1,7 @@ ((window) => { const { EventTarget } = window; - const illegalConstructorKey = Symbol("illegalConstuctorKey"); + const illegalConstructorKey = Symbol("illegalConstructorKey"); class Window extends EventTarget { constructor(key = null) { diff --git a/std/datetime/mod.ts b/std/datetime/mod.ts index 2afddbe6f7d324..72f8cc6815c4f0 100644 --- a/std/datetime/mod.ts +++ b/std/datetime/mod.ts @@ -84,13 +84,13 @@ export function weekOfYear(date: Date): number { } /** - * Parse a date to return a IMF formated string date + * Parse a date to return a IMF formatted string date * RFC: https://tools.ietf.org/html/rfc7231#section-7.1.1.1 * IMF is the time format to use when generating times in HTTP * headers. The time being formatted must be in UTC for Format to * generate the correct format. * @param date Date to parse - * @return IMF date formated string + * @return IMF date formatted string */ export function toIMF(date: Date): string { function dtPad(v: string, lPad = 2): string { @@ -233,16 +233,16 @@ function calculateMonthsDifference(bigger: number, smaller: number): number { const smallerDate = new Date(smaller); const yearsDiff = biggerDate.getFullYear() - smallerDate.getFullYear(); const monthsDiff = biggerDate.getMonth() - smallerDate.getMonth(); - const calendarDiffrences = Math.abs(yearsDiff * 12 + monthsDiff); + const calendarDifferences = Math.abs(yearsDiff * 12 + monthsDiff); const compareResult = biggerDate > smallerDate ? 1 : -1; biggerDate.setMonth( - biggerDate.getMonth() - compareResult * calendarDiffrences, + biggerDate.getMonth() - compareResult * calendarDifferences, ); const isLastMonthNotFull = biggerDate > smallerDate ? 1 : -1 === -compareResult ? 1 : 0; - const months = compareResult * (calendarDiffrences - isLastMonthNotFull); + const months = compareResult * (calendarDifferences - isLastMonthNotFull); return months === 0 ? 0 : months; } diff --git a/std/datetime/test.ts b/std/datetime/test.ts index 3e42365ca8f52b..0ff70603df7c7d 100644 --- a/std/datetime/test.ts +++ b/std/datetime/test.ts @@ -357,8 +357,8 @@ Deno.test({ name: "[std/datetime] difference", fn(): void { const denoInit = new Date("2018/5/14"); - const denoRelaseV1 = new Date("2020/5/13"); - let difference = datetime.difference(denoRelaseV1, denoInit, { + const denoReleaseV1 = new Date("2020/5/13"); + let difference = datetime.difference(denoReleaseV1, denoInit, { units: ["days", "months", "years"], }); assertEquals(difference.days, 730); diff --git a/std/encoding/README.md b/std/encoding/README.md index edfbeacc5dea9b..3c60c6fcd2f1d9 100644 --- a/std/encoding/README.md +++ b/std/encoding/README.md @@ -187,7 +187,7 @@ function is as follows: | :--: | :--------: | :--------: | | Deno | rust | typescript | -- **`options`** are options for the delimiter-seprated output. +- **`options`** are options for the delimiter-separated output. - **`headers?: boolean`**: Whether or not to include the row of headers. Default: `true` @@ -538,18 +538,18 @@ console.log(encode(binaryData)); // => LpTqp ``` -### Specifying a standard and delimeter +### Specifying a standard and delimiter By default all functions are using the most popular Adobe version of ascii85 and -not adding any delimeter. However, there are three more standards supported - -btoa (different delimeter and additional compression of 4 bytes equal to 32), +not adding any delimiter. However, there are three more standards supported - +btoa (different delimiter and additional compression of 4 bytes equal to 32), [Z85](https://rfc.zeromq.org/spec/32/) and [RFC 1924](https://tools.ietf.org/html/rfc1924). It's possible to use a different encoding by specifying it in `options` object as a second parameter. -Similarly, it's possible to make `encode` add a delimeter (`<~` and `~>` for -Adobe, `xbtoa Begin` and `xbtoa End` with newlines between the delimeters and -encoded data for btoa. Checksums for btoa are not supported. Delimeters are not +Similarly, it's possible to make `encode` add a delimiter (`<~` and `~>` for +Adobe, `xbtoa Begin` and `xbtoa End` with newlines between the delimiters and +encoded data for btoa. Checksums for btoa are not supported. Delimiters are not supported by other encodings.) encoding examples: @@ -562,9 +562,9 @@ import { const binaryData = new Uint8Array([136, 180, 79, 24]); console.log(encode(binaryData)); // => LpTqp -console.log(encode(binaryData, { standard: "Adobe", delimeter: true })); +console.log(encode(binaryData, { standard: "Adobe", delimiter: true })); // => <~LpTqp~> -console.log(encode(binaryData, { standard: "btoa", delimeter: true })); +console.log(encode(binaryData, { standard: "btoa", delimiter: true })); /* => xbtoa Begin LpTqp xbtoa End */ diff --git a/std/encoding/_yaml/example/inout.ts b/std/encoding/_yaml/example/inout.ts index b0b47e3fe82562..2f7ca6d6b455af 100644 --- a/std/encoding/_yaml/example/inout.ts +++ b/std/encoding/_yaml/example/inout.ts @@ -23,5 +23,5 @@ const string = stringify(test); if (Deno.inspect(test) === Deno.inspect(parse(string))) { console.log("In-Out as expected."); } else { - console.log("Someting went wrong."); + console.log("Something went wrong."); } diff --git a/std/encoding/_yaml/loader/loader.ts b/std/encoding/_yaml/loader/loader.ts index 16da3ba4abd214..b4657c7457d903 100644 --- a/std/encoding/_yaml/loader/loader.ts +++ b/std/encoding/_yaml/loader/loader.ts @@ -942,7 +942,7 @@ function readBlockScalar(state: LoaderState, nodeIndent: number): boolean { } } - // Break this `while` cycle and go to the funciton's epilogue. + // Break this `while` cycle and go to the function's epilogue. break; } diff --git a/std/encoding/_yaml/type/float.ts b/std/encoding/_yaml/type/float.ts index 93635ba251669d..832934fe23f44d 100644 --- a/std/encoding/_yaml/type/float.ts +++ b/std/encoding/_yaml/type/float.ts @@ -103,7 +103,7 @@ function representYamlFloat(object: Any, style?: StyleVariant): Any { const res = object.toString(10); // JS stringifier can build scientific format without dots: 5e-100, - // while YAML requres dot: 5.e-100. Fix it with simple hack + // while YAML requires dot: 5.e-100. Fix it with simple hack return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace("e", ".e") : res; } diff --git a/std/encoding/ascii85.ts b/std/encoding/ascii85.ts index 6a303ebda0a6a9..cb2a3ec7661661 100644 --- a/std/encoding/ascii85.ts +++ b/std/encoding/ascii85.ts @@ -4,8 +4,8 @@ export type Ascii85Standard = "Adobe" | "btoa" | "RFC 1924" | "Z85"; /** * encoding/decoding options - * @property standard - characterset and delimeter (if supported and used). Defaults to Adobe - * @property delimeter - whether to use a delimeter (if supported) - "<~" and "~>" by default + * @property standard - characterset and delimiter (if supported and used). Defaults to Adobe + * @property delimiter - whether to use a delimiter (if supported) - "<~" and "~>" by default */ export interface Ascii85Options { standard?: Ascii85Standard; @@ -20,7 +20,7 @@ const Z85 = * @param uint8 input to encode * @param [options] encoding options * @param [options.standard=Adobe] encoding standard (Adobe, btoa, RFC 1924 or Z85) - * @param [options.delimeter] whether to use a delimeter, if supported by encoding standard + * @param [options.delimiter] whether to use a delimiter, if supported by encoding standard */ export function encode(uint8: Uint8Array, options?: Ascii85Options): string { const standard = options?.standard ?? "Adobe"; diff --git a/std/encoding/ascii85_test.ts b/std/encoding/ascii85_test.ts index cea939faf1fd70..b6d77a8ffb19c7 100644 --- a/std/encoding/ascii85_test.ts +++ b/std/encoding/ascii85_test.ts @@ -3,7 +3,7 @@ import { assertEquals } from "../testing/asserts.ts"; import { Ascii85Standard, decode, encode } from "./ascii85.ts"; type TestCases = Partial<{ [index in Ascii85Standard]: string[][] }>; const utf8encoder = new TextEncoder(); -const testCasesNoDelimeter: TestCases = { +const testCasesNoDelimiter: TestCases = { Adobe: [ ["test", "FCfN8"], ["ascii85", "@<5pmBfIs"], @@ -76,7 +76,7 @@ const testCasesNoDelimeter: TestCases = { [" ", "arR^H"], ], }; -const testCasesDelimeter: TestCases = { +const testCasesDelimiter: TestCases = { Adobe: [ ["test", "<~FCfN8~>"], ["ascii85", "<~@<5pmBfIs~>"], @@ -116,7 +116,7 @@ const testCasesDelimeter: TestCases = { ], }; -for (const [standard, tests] of Object.entries(testCasesNoDelimeter)) { +for (const [standard, tests] of Object.entries(testCasesNoDelimiter)) { if (tests === undefined) continue; Deno.test({ name: `[encoding/ascii85] encode ${standard}`, @@ -144,10 +144,10 @@ for (const [standard, tests] of Object.entries(testCasesNoDelimeter)) { }, }); } -for (const [standard, tests] of Object.entries(testCasesDelimeter)) { +for (const [standard, tests] of Object.entries(testCasesDelimiter)) { if (tests === undefined) continue; Deno.test({ - name: `[encoding/ascii85] encode ${standard} with delimeter`, + name: `[encoding/ascii85] encode ${standard} with delimiter`, fn(): void { for (const [bin, b85] of tests) { assertEquals( @@ -162,7 +162,7 @@ for (const [standard, tests] of Object.entries(testCasesDelimeter)) { }); Deno.test({ - name: `[encoding/ascii85] decode ${standard} with delimeter`, + name: `[encoding/ascii85] decode ${standard} with delimiter`, fn(): void { for (const [bin, b85] of tests) { assertEquals( diff --git a/std/encoding/binary.ts b/std/encoding/binary.ts index f4918e7500c2cd..f66fb61aaebc76 100644 --- a/std/encoding/binary.ts +++ b/std/encoding/binary.ts @@ -193,7 +193,7 @@ export function putVarbig( return sizeof(o.dataType); } -/** Decodes a number from `r`, comsuming `sizeof(o.dataType)` bytes. If less than `sizeof(o.dataType)` bytes were read, throws `Deno.errors.unexpectedEof`. +/** Decodes a number from `r`, consuming `sizeof(o.dataType)` bytes. If less than `sizeof(o.dataType)` bytes were read, throws `Deno.errors.unexpectedEof`. * * `o.dataType` defaults to `"int32"`. */ export async function readVarnum( @@ -205,7 +205,7 @@ export async function readVarnum( return varnum(scratch, o) as number; } -/** Decodes a bigint from `r`, comsuming `sizeof(o.dataType)` bytes. If less than `sizeof(o.dataType)` bytes were read, throws `Deno.errors.unexpectedEof`. +/** Decodes a bigint from `r`, consuming `sizeof(o.dataType)` bytes. If less than `sizeof(o.dataType)` bytes were read, throws `Deno.errors.unexpectedEof`. * * `o.dataType` defaults to `"int64"`. */ export async function readVarbig( diff --git a/std/encoding/csv_stringify.ts b/std/encoding/csv_stringify.ts index 4c5f8c8165baae..c968820ddf1c06 100644 --- a/std/encoding/csv_stringify.ts +++ b/std/encoding/csv_stringify.ts @@ -17,7 +17,7 @@ function getEscapedString(value: unknown, sep: string): string { if (typeof value === "object") str = JSON.stringify(value); else str = String(value); - // Is regex.test more performant here? If so, how to dynamically create? + // Is regex.test more performance here? If so, how to dynamically create? // https://stackoverflow.com/questions/3561493/ if (str.includes(sep) || str.includes(NEWLINE) || str.includes(QUOTE)) { return `${QUOTE}${str.replaceAll(QUOTE, `${QUOTE}${QUOTE}`)}${QUOTE}`; diff --git a/std/encoding/hex.ts b/std/encoding/hex.ts index 20d42f65799797..4ff3f072467ca3 100644 --- a/std/encoding/hex.ts +++ b/std/encoding/hex.ts @@ -5,7 +5,7 @@ // license that can be found in the LICENSE file. // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -const hextable = new TextEncoder().encode("0123456789abcdef"); +const hexTable = new TextEncoder().encode("0123456789abcdef"); /** * ErrInvalidByte takes an invalid byte and returns an Error. @@ -52,8 +52,8 @@ export function encode(src: Uint8Array): Uint8Array { const dst = new Uint8Array(encodedLen(src.length)); for (let i = 0; i < dst.length; i++) { const v = src[i]; - dst[i * 2] = hextable[v >> 4]; - dst[i * 2 + 1] = hextable[v & 0x0f]; + dst[i * 2] = hexTable[v >> 4]; + dst[i * 2 + 1] = hexTable[v & 0x0f]; } return dst; } diff --git a/std/encoding/toml.ts b/std/encoding/toml.ts index a397a7bd91a041..34a428b90e1e7e 100644 --- a/std/encoding/toml.ts +++ b/std/encoding/toml.ts @@ -679,7 +679,7 @@ class Dumper { const min = dtPad(value.getUTCMinutes().toString()); const s = dtPad(value.getUTCSeconds().toString()); const ms = dtPad(value.getUTCMilliseconds().toString(), 3); - // formated date + // formatted date const fData = `${value.getUTCFullYear()}-${m}-${d}T${h}:${min}:${s}.${ms}`; return `${this._declaration(keys)}${fData}`; } diff --git a/std/node/_stream/readable.ts b/std/node/_stream/readable.ts index c8ed29953d7a03..54e0d8ecdf863d 100644 --- a/std/node/_stream/readable.ts +++ b/std/node/_stream/readable.ts @@ -258,7 +258,7 @@ class Readable extends Stream { //TODO(Soremwar) //Part of doEnd condition - //In node, output/inout are a duplex Stream + //In node, output/input are a duplex Stream // && // dest !== stdout && // dest !== stderr From a54ede099d58e8783c6511ff351aefba160b39d9 Mon Sep 17 00:00:00 2001 From: Valentin Anger Date: Thu, 10 Dec 2020 22:34:54 +0100 Subject: [PATCH 047/135] docs(tools): add documentation for the deno lsp command (#8676) Co-authored-by: Yuki Tanaka --- .../getting_started/setup_your_environment.md | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/docs/getting_started/setup_your_environment.md b/docs/getting_started/setup_your_environment.md index 0e31cd914e0619..51f29ab9a3a864 100644 --- a/docs/getting_started/setup_your_environment.md +++ b/docs/getting_started/setup_your_environment.md @@ -144,6 +144,50 @@ project (`npm init -y` as necessary), then add the following block to your } ``` +#### LSP clients + +Deno has builtin support for the +[Language server protocol](https://langserver.org). + +If your editor supports the LSP, you can use Deno as a language server for +TypeScript and JavaScript. + +The editor can start the server with `deno lsp`. + +##### Example for Kakoune + +After installing the [`kak-lsp`](https://github.com/kak-lsp/kak-lsp) LSP client +you can add the Deno language server by adding the following to your +`kak-lsp.toml` + +```toml +[language.deno] +filetypes = ["typescript", "javascript"] +roots = [".git"] +command = "deno" +args = ["lsp"] +``` + If you don't see your favorite IDE on this list, maybe you can develop an extension. Our [community Discord group](https://discord.gg/deno) can give you some pointers on where to get started. + +##### Example for Vim/Neovim + +After installing the [`vim-lsp`](https://github.com/prabirshrestha/vim-lsp) LSP +client you can add the Deno language server by adding the following to your +`vimrc`/`init.vim`: + +```vim +if executable("deno") + augroup LspTypeScript + autocmd! + autocmd User lsp_setup call lsp#register_server({ + \ "name": "deno lsp", + \ "cmd": {server_info -> ["deno", "lsp"]}, + \ "root_uri": {server_info->lsp#utils#path_to_uri(lsp#utils#find_nearest_parent_file_directory(lsp#utils#get_buffer_path(), "tsconfig.json"))}, + \ "whitelist": ["typescript", "typescript.tsx"], + \ }) + augroup END +endif +``` From 71d468bec0ad121d4fafb2cf1d45cecd071e3327 Mon Sep 17 00:00:00 2001 From: crowlKats <13135287+crowlKats@users.noreply.github.com> Date: Fri, 11 Dec 2020 13:18:30 +0100 Subject: [PATCH 048/135] fix(cli): zsh completions (#8718) --- cli/flags.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/cli/flags.rs b/cli/flags.rs index be93015f56d563..ae5ef15be5de7f 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -1213,7 +1213,11 @@ fn permission_args<'a, 'b>(app: App<'a, 'b>) -> App<'a, 'b> { fn run_subcommand<'a, 'b>() -> App<'a, 'b> { runtime_args(SubCommand::with_name("run"), true) - .arg(watch_arg()) + .arg( + watch_arg() + .conflicts_with("inspect") + .conflicts_with("inspect-brk") + ) .setting(AppSettings::TrailingVarArg) .arg( script_arg() @@ -1487,8 +1491,6 @@ fn watch_arg<'a, 'b>() -> Arg<'a, 'b> { Arg::with_name("watch") .requires("unstable") .long("watch") - .conflicts_with("inspect") - .conflicts_with("inspect-brk") .help("Watch for file changes and restart process automatically") .long_help( "Watch for file changes and restart process automatically. @@ -2824,7 +2826,7 @@ mod tests { #[test] fn completions() { - let r = flags_from_vec_safe(svec!["deno", "completions", "bash"]).unwrap(); + let r = flags_from_vec_safe(svec!["deno", "completions", "zsh"]).unwrap(); match r.subcommand { DenoSubcommand::Completions { buf } => assert!(!buf.is_empty()), From 9414dee9e56a9f42c07ada4f8e1be864a1a1b936 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1=C5=A1=20Znamen=C3=A1=C4=8Dek?= Date: Fri, 11 Dec 2020 18:43:01 +0100 Subject: [PATCH 049/135] docs: Fix readme typo (#8721) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2a722c56d38013..035b59eaf53776 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,7 @@ More in-depth info can be found in the runtime We appreciate your help! -To contribute, please read the our +To contribute, please read our [guidelines](https://github.com/denoland/deno/blob/master/docs/contributing/style_guide.md). [Build Status - Cirrus]: https://github.com/denoland/deno/workflows/ci/badge.svg?branch=master&event=push From 65e72b68acf57da8462b8e7b057e7adb9393b698 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Fri, 11 Dec 2020 18:49:26 +0100 Subject: [PATCH 050/135] refactor(cli): decouple ops from ProgramState and Flags (#8659) This commit does major refactor of "Worker" and "WebWorker", in order to decouple them from "ProgramState" and "Flags". The main points of interest are "create_main_worker()" and "create_web_worker_callback()" functions which are responsible for creating "Worker" and "WebWorker" in CLI context. As a result it is now possible to factor out common "runtime" functionality into a separate crate. --- cli/flags.rs | 2 +- cli/inspector.rs | 2 +- cli/main.rs | 154 +++++++++++++++++++++++++-- cli/ops/errors.rs | 6 +- cli/ops/mod.rs | 37 ++++--- cli/ops/runtime.rs | 39 +------ cli/ops/runtime_compiler.rs | 4 +- cli/ops/websocket.rs | 21 ++-- cli/ops/worker_host.rs | 38 +++++-- cli/program_state.rs | 10 -- cli/rt/99_main.js | 48 +++++---- cli/standalone.rs | 30 ++++-- cli/tests/integration_tests.rs | 5 - cli/version.rs | 4 - cli/web_worker.rs | 151 +++++++++++++++++---------- cli/worker.rs | 184 ++++++++++++++++----------------- core/lib.rs | 1 + core/modules.rs | 2 +- core/runtime.rs | 6 +- 19 files changed, 464 insertions(+), 280 deletions(-) diff --git a/cli/flags.rs b/cli/flags.rs index ae5ef15be5de7f..bafd8fe91d1b71 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -228,7 +228,7 @@ lazy_static! { crate::version::deno(), env!("PROFILE"), env!("TARGET"), - crate::version::v8(), + deno_core::v8_version(), crate::version::TYPESCRIPT ); } diff --git a/cli/inspector.rs b/cli/inspector.rs index a8be68c8d9c9e7..fa1bfa2d0499de 100644 --- a/cli/inspector.rs +++ b/cli/inspector.rs @@ -203,7 +203,7 @@ async fn server( warp::reply::json(&json!({ "Browser": format!("Deno/{}", crate::version::deno()), "Protocol-Version": "1.3", - "V8-Version": crate::version::v8(), + "V8-Version": deno_core::v8_version(), })) }); diff --git a/cli/main.rs b/cli/main.rs index 2e40df66bd75aa..8f8879ac9646cd 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -55,15 +55,22 @@ use crate::file_fetcher::FileFetcher; use crate::file_watcher::ModuleResolutionResult; use crate::flags::DenoSubcommand; use crate::flags::Flags; +use crate::fmt_errors::PrettyJsError; use crate::import_map::ImportMap; use crate::media_type::MediaType; +use crate::module_loader::CliModuleLoader; +use crate::ops::worker_host::CreateWebWorkerCb; use crate::permissions::Permissions; use crate::program_state::exit_unstable; use crate::program_state::ProgramState; +use crate::source_maps::apply_source_map; use crate::specifier_handler::FetchHandler; use crate::standalone::create_standalone_binary; use crate::tools::installer::infer_name_from_url; +use crate::web_worker::WebWorker; +use crate::web_worker::WebWorkerOptions; use crate::worker::MainWorker; +use crate::worker::WorkerOptions; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::future::FutureExt; @@ -86,6 +93,134 @@ use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; +fn create_web_worker_callback( + program_state: Arc, +) -> Arc { + Arc::new(move |args| { + let global_state_ = program_state.clone(); + let js_error_create_fn = Rc::new(move |core_js_error| { + let source_mapped_error = + apply_source_map(&core_js_error, global_state_.clone()); + PrettyJsError::create(source_mapped_error) + }); + + let attach_inspector = program_state.maybe_inspector_server.is_some() + || program_state.flags.coverage; + let maybe_inspector_server = program_state.maybe_inspector_server.clone(); + + let module_loader = CliModuleLoader::new_for_worker(program_state.clone()); + let create_web_worker_cb = + create_web_worker_callback(program_state.clone()); + + let options = WebWorkerOptions { + args: program_state.flags.argv.clone(), + apply_source_maps: true, + debug_flag: program_state + .flags + .log_level + .map_or(false, |l| l == log::Level::Debug), + unstable: program_state.flags.unstable, + ca_filepath: program_state.flags.ca_file.clone(), + seed: program_state.flags.seed, + module_loader, + create_web_worker_cb, + js_error_create_fn: Some(js_error_create_fn), + use_deno_namespace: args.use_deno_namespace, + attach_inspector, + maybe_inspector_server, + }; + + let mut worker = WebWorker::from_options( + args.name, + args.permissions, + args.main_module, + args.worker_id, + &options, + ); + + // This block registers additional ops and state that + // are only available in the CLI + { + let js_runtime = &mut worker.js_runtime; + js_runtime + .op_state() + .borrow_mut() + .put::>(program_state.clone()); + // Applies source maps - works in conjuction with `js_error_create_fn` + // above + ops::errors::init(js_runtime); + if args.use_deno_namespace { + ops::runtime_compiler::init(js_runtime); + } + } + worker.bootstrap(&options); + + worker + }) +} + +pub fn create_main_worker( + program_state: &Arc, + main_module: ModuleSpecifier, + permissions: Permissions, +) -> MainWorker { + let module_loader = CliModuleLoader::new(program_state.clone()); + + let global_state_ = program_state.clone(); + + let js_error_create_fn = Rc::new(move |core_js_error| { + let source_mapped_error = + apply_source_map(&core_js_error, global_state_.clone()); + PrettyJsError::create(source_mapped_error) + }); + + let attach_inspector = program_state.maybe_inspector_server.is_some() + || program_state.flags.repl + || program_state.flags.coverage; + let maybe_inspector_server = program_state.maybe_inspector_server.clone(); + let should_break_on_first_statement = + program_state.flags.inspect_brk.is_some(); + + let create_web_worker_cb = create_web_worker_callback(program_state.clone()); + + let options = WorkerOptions { + apply_source_maps: true, + args: program_state.flags.argv.clone(), + debug_flag: program_state + .flags + .log_level + .map_or(false, |l| l == log::Level::Debug), + unstable: program_state.flags.unstable, + ca_filepath: program_state.flags.ca_file.clone(), + seed: program_state.flags.seed, + js_error_create_fn: Some(js_error_create_fn), + create_web_worker_cb, + attach_inspector, + maybe_inspector_server, + should_break_on_first_statement, + module_loader, + }; + + let mut worker = MainWorker::from_options(main_module, permissions, &options); + + // This block registers additional ops and state that + // are only available in the CLI + { + let js_runtime = &mut worker.js_runtime; + js_runtime + .op_state() + .borrow_mut() + .put::>(program_state.clone()); + // Applies source maps - works in conjuction with `js_error_create_fn` + // above + ops::errors::init(js_runtime); + ops::runtime_compiler::init(js_runtime); + } + worker.bootstrap(&options); + + worker +} + fn write_to_stdout_ignore_sigpipe(bytes: &[u8]) -> Result<(), std::io::Error> { use std::io::ErrorKind; @@ -253,7 +388,7 @@ async fn install_command( let program_state = ProgramState::new(preload_flags)?; let main_module = ModuleSpecifier::resolve_url_or_path(&module_url)?; let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); // First, fetch and compile the module; this step ensures that the module exists. worker.preload_module(&main_module).await?; tools::installer::install(flags, &module_url, args, name, root, force) @@ -321,7 +456,7 @@ async fn eval_command( let permissions = Permissions::from_flags(&flags); let program_state = ProgramState::new(flags)?; let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); let main_module_url = main_module.as_url().to_owned(); // Create a dummy source file. let source_code = if print { @@ -664,7 +799,7 @@ async fn run_repl(flags: Flags) -> Result<(), AnyError> { let permissions = Permissions::from_flags(&flags); let program_state = ProgramState::new(flags)?; let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); worker.run_event_loop().await?; tools::repl::run(&program_state, worker).await @@ -675,8 +810,11 @@ async fn run_from_stdin(flags: Flags) -> Result<(), AnyError> { let permissions = Permissions::from_flags(&flags); let main_module = ModuleSpecifier::resolve_url_or_path("./$deno$stdin.ts").unwrap(); - let mut worker = - MainWorker::new(&program_state.clone(), main_module.clone(), permissions); + let mut worker = create_main_worker( + &program_state.clone(), + main_module.clone(), + permissions, + ); let mut source = Vec::new(); std::io::stdin().read_to_end(&mut source)?; @@ -755,7 +893,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<(), AnyError> { let main_module = main_module.clone(); let program_state = ProgramState::new(flags)?; let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); debug!("main_module {}", main_module); worker.execute_module(&main_module).await?; worker.execute("window.dispatchEvent(new Event('load'))")?; @@ -788,7 +926,7 @@ async fn run_command(flags: Flags, script: String) -> Result<(), AnyError> { let program_state = ProgramState::new(flags.clone())?; let permissions = Permissions::from_flags(&flags); let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); debug!("main_module {}", main_module); worker.execute_module(&main_module).await?; worker.execute("window.dispatchEvent(new Event('load'))")?; @@ -857,7 +995,7 @@ async fn test_command( } let mut worker = - MainWorker::new(&program_state, main_module.clone(), permissions); + create_main_worker(&program_state, main_module.clone(), permissions); let mut maybe_coverage_collector = if flags.coverage { let session = worker.create_inspector_session(); diff --git a/cli/ops/errors.rs b/cli/ops/errors.rs index dbb72139dd0d7b..d9893b0ef8eda8 100644 --- a/cli/ops/errors.rs +++ b/cli/ops/errors.rs @@ -1,6 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::diagnostics::Diagnostics; +use crate::program_state::ProgramState; use crate::source_maps::get_orig_position; use crate::source_maps::CachedMaps; use deno_core::error::AnyError; @@ -11,6 +12,7 @@ use deno_core::OpState; use deno_core::ZeroCopyBuf; use serde::Deserialize; use std::collections::HashMap; +use std::sync::Arc; pub fn init(rt: &mut deno_core::JsRuntime) { super::reg_json_sync(rt, "op_apply_source_map", op_apply_source_map); @@ -33,13 +35,15 @@ fn op_apply_source_map( let args: ApplySourceMap = serde_json::from_value(args)?; let mut mappings_map: CachedMaps = HashMap::new(); + let program_state = state.borrow::>().clone(); + let (orig_file_name, orig_line_number, orig_column_number) = get_orig_position( args.file_name, args.line_number.into(), args.column_number.into(), &mut mappings_map, - super::program_state(state), + program_state, ); Ok(json!({ diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index b450f8989edfab..56c0f1ad594986 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -27,7 +27,6 @@ pub mod websocket; pub mod worker_host; use crate::metrics::metrics_op; -use crate::program_state::ProgramState; use deno_core::error::AnyError; use deno_core::json_op_async; use deno_core::json_op_sync; @@ -39,7 +38,6 @@ use deno_core::ZeroCopyBuf; use std::cell::RefCell; use std::future::Future; use std::rc::Rc; -use std::sync::Arc; pub fn reg_json_async(rt: &mut JsRuntime, name: &'static str, op_fn: F) where @@ -57,24 +55,33 @@ where rt.register_op(name, metrics_op(json_op_sync(op_fn))); } +pub struct UnstableChecker { + pub unstable: bool, +} + +impl UnstableChecker { + /// Quits the process if the --unstable flag was not provided. + /// + /// This is intentionally a non-recoverable check so that people cannot probe + /// for unstable APIs from stable programs. + // NOTE(bartlomieju): keep in sync with `cli/program_state.rs` + pub fn check_unstable(&self, api_name: &str) { + if !self.unstable { + eprintln!( + "Unstable API '{}'. The --unstable flag must be provided.", + api_name + ); + std::process::exit(70); + } + } +} /// Helper for checking unstable features. Used for sync ops. pub fn check_unstable(state: &OpState, api_name: &str) { - state.borrow::>().check_unstable(api_name) + state.borrow::().check_unstable(api_name) } /// Helper for checking unstable features. Used for async ops. pub fn check_unstable2(state: &Rc>, api_name: &str) { let state = state.borrow(); - state.borrow::>().check_unstable(api_name) -} - -/// Helper for extracting the commonly used state. Used for sync ops. -pub fn program_state(state: &OpState) -> Arc { - state.borrow::>().clone() -} - -/// Helper for extracting the commonly used state. Used for async ops. -pub fn global_state2(state: &Rc>) -> Arc { - let state = state.borrow(); - state.borrow::>().clone() + state.borrow::().check_unstable(api_name) } diff --git a/cli/ops/runtime.rs b/cli/ops/runtime.rs index 38b23f3b303ce0..cb3b53d537da43 100644 --- a/cli/ops/runtime.rs +++ b/cli/ops/runtime.rs @@ -1,9 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::colors; use crate::metrics::Metrics; use crate::permissions::Permissions; -use crate::version; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::json; @@ -11,50 +9,17 @@ use deno_core::serde_json::Value; use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_core::ZeroCopyBuf; -use std::env; -type ApplySourceMaps = bool; - -pub fn init( - rt: &mut deno_core::JsRuntime, - main_module: ModuleSpecifier, - apply_source_maps: bool, -) { +pub fn init(rt: &mut deno_core::JsRuntime, main_module: ModuleSpecifier) { { let op_state = rt.op_state(); let mut state = op_state.borrow_mut(); state.put::(main_module); - state.put::(apply_source_maps); } - super::reg_json_sync(rt, "op_start", op_start); super::reg_json_sync(rt, "op_main_module", op_main_module); super::reg_json_sync(rt, "op_metrics", op_metrics); } -fn op_start( - state: &mut OpState, - _args: Value, - _zero_copy: &mut [ZeroCopyBuf], -) -> Result { - let apply_source_maps = *state.borrow::(); - let gs = &super::program_state(state); - - Ok(json!({ - "args": gs.flags.argv.clone(), - "applySourceMaps": apply_source_maps, - "debugFlag": gs.flags.log_level.map_or(false, |l| l == log::Level::Debug), - "denoVersion": version::deno(), - "noColor": !colors::use_color(), - "pid": std::process::id(), - "ppid": ppid(), - "target": env!("TARGET"), - "tsVersion": version::TYPESCRIPT, - "unstableFlag": gs.flags.unstable, - "v8Version": version::v8(), - "versionFlag": gs.flags.version, - })) -} - fn op_main_module( state: &mut OpState, _args: Value, @@ -93,7 +58,7 @@ fn op_metrics( })) } -fn ppid() -> Value { +pub fn ppid() -> Value { #[cfg(windows)] { // Adopted from rustup: diff --git a/cli/ops/runtime_compiler.rs b/cli/ops/runtime_compiler.rs index f47f2fdb387020..03ba88c76876b3 100644 --- a/cli/ops/runtime_compiler.rs +++ b/cli/ops/runtime_compiler.rs @@ -7,10 +7,12 @@ use crate::module_graph::BundleType; use crate::module_graph::EmitOptions; use crate::module_graph::GraphBuilder; use crate::permissions::Permissions; +use crate::program_state::ProgramState; use crate::specifier_handler::FetchHandler; use crate::specifier_handler::MemoryHandler; use crate::specifier_handler::SpecifierHandler; use crate::tsc_config; +use std::sync::Arc; use deno_core::error::AnyError; use deno_core::error::Context; @@ -51,7 +53,7 @@ async fn op_compile( } else { super::check_unstable2(&state, "Deno.compile"); } - let program_state = super::global_state2(&state); + let program_state = state.borrow().borrow::>().clone(); let runtime_permissions = { let state = state.borrow(); state.borrow::().clone() diff --git a/cli/ops/websocket.rs b/cli/ops/websocket.rs index c04c3b476b064e..de6357c87ca1f5 100644 --- a/cli/ops/websocket.rs +++ b/cli/ops/websocket.rs @@ -33,7 +33,17 @@ use tokio_tungstenite::tungstenite::{ use tokio_tungstenite::{client_async, WebSocketStream}; use webpki::DNSNameRef; -pub fn init(rt: &mut deno_core::JsRuntime) { +#[derive(Clone)] +struct WsCaFile(String); + +pub fn init(rt: &mut deno_core::JsRuntime, maybe_ca_file: Option<&str>) { + { + let op_state = rt.op_state(); + let mut state = op_state.borrow_mut(); + if let Some(ca_file) = maybe_ca_file { + state.put::(WsCaFile(ca_file.to_string())); + } + } super::reg_json_sync(rt, "op_ws_check_permission", op_ws_check_permission); super::reg_json_async(rt, "op_ws_create", op_ws_create); super::reg_json_async(rt, "op_ws_send", op_ws_send); @@ -92,10 +102,7 @@ pub async fn op_ws_create( ); } - let ca_file = { - let program_state = super::global_state2(&state); - program_state.flags.ca_file.clone() - }; + let maybe_ca_file = state.borrow().try_borrow::().cloned(); let uri: Uri = args.url.parse()?; let mut request = Request::builder().method(Method::GET).uri(&uri); @@ -128,8 +135,8 @@ pub async fn op_ws_create( .root_store .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); - if let Some(path) = ca_file { - let key_file = File::open(path)?; + if let Some(ws_ca_file) = maybe_ca_file { + let key_file = File::open(ws_ca_file.0)?; let reader = &mut BufReader::new(key_file); config.root_store.add_pem_file(reader).unwrap(); } diff --git a/cli/ops/worker_host.rs b/cli/ops/worker_host.rs index 6a2d799682e384..871e4b9fe42311 100644 --- a/cli/ops/worker_host.rs +++ b/cli/ops/worker_host.rs @@ -21,8 +21,27 @@ use std::cell::RefCell; use std::collections::HashMap; use std::convert::From; use std::rc::Rc; +use std::sync::Arc; use std::thread::JoinHandle; +pub struct CreateWebWorkerArgs { + pub name: String, + pub worker_id: u32, + pub permissions: Permissions, + pub main_module: ModuleSpecifier, + pub use_deno_namespace: bool, +} + +pub type CreateWebWorkerCb = + dyn Fn(CreateWebWorkerArgs) -> WebWorker + Sync + Send; + +/// A holder for callback that is used to create a new +/// WebWorker. It's a struct instead of a type alias +/// because `GothamState` used in `OpState` overrides +/// value if type alises have the same underlying type +#[derive(Clone)] +pub struct CreateWebWorkerCbHolder(Arc); + #[derive(Deserialize)] struct HostUnhandledErrorArgs { message: String, @@ -31,12 +50,16 @@ struct HostUnhandledErrorArgs { pub fn init( rt: &mut deno_core::JsRuntime, sender: Option>, + create_web_worker_cb: Arc, ) { { let op_state = rt.op_state(); let mut state = op_state.borrow_mut(); state.put::(WorkersTable::default()); state.put::(WorkerId::default()); + + let create_module_loader = CreateWebWorkerCbHolder(create_web_worker_cb); + state.put::(create_module_loader); } super::reg_json_sync(rt, "op_create_worker", op_create_worker); super::reg_json_sync( @@ -102,11 +125,12 @@ fn op_create_worker( } let permissions = state.borrow::().clone(); let worker_id = state.take::(); + let create_module_loader = state.take::(); + state.put::(create_module_loader.clone()); state.put::(worker_id + 1); let module_specifier = ModuleSpecifier::resolve_url(&specifier)?; let worker_name = args_name.unwrap_or_else(|| "".to_string()); - let program_state = super::program_state(state); let (handle_sender, handle_receiver) = std::sync::mpsc::sync_channel::>(1); @@ -121,14 +145,14 @@ fn op_create_worker( // - JS worker is useless - meaning it throws an exception and can't do anything else, // all action done upon it should be noops // - newly spawned thread exits - let worker = WebWorker::new( - worker_name, + + let worker = (create_module_loader.0)(CreateWebWorkerArgs { + name: worker_name, + worker_id, permissions, - module_specifier.clone(), - program_state, + main_module: module_specifier.clone(), use_deno_namespace, - worker_id, - ); + }); // Send thread safe handle to newly created worker to host thread handle_sender.send(Ok(worker.thread_safe_handle())).unwrap(); diff --git a/cli/program_state.rs b/cli/program_state.rs index 023e2604d16f7c..41b7c51fe0c93f 100644 --- a/cli/program_state.rs +++ b/cli/program_state.rs @@ -258,16 +258,6 @@ impl ProgramState { } } - /// Quits the process if the --unstable flag was not provided. - /// - /// This is intentionally a non-recoverable check so that people cannot probe - /// for unstable APIs from stable programs. - pub fn check_unstable(&self, api_name: &str) { - if !self.flags.unstable { - exit_unstable(api_name); - } - } - #[cfg(test)] pub fn mock( argv: Vec, diff --git a/cli/rt/99_main.js b/cli/rt/99_main.js index 2aa140990e4449..f38d51936abd60 100644 --- a/cli/rt/99_main.js +++ b/cli/rt/99_main.js @@ -132,40 +132,31 @@ delete Object.prototype.__proto__; core.jsonOpSync("op_worker_close"); } - function opStart() { - return core.jsonOpSync("op_start"); - } - function opMainModule() { return core.jsonOpSync("op_main_module"); } - // TODO(bartlomieju): temporary solution, must be fixed when moving - // dispatches to separate crates - function initOps() { + function runtimeStart(runtimeOptions, source) { const opsMap = core.ops(); for (const [name, opId] of Object.entries(opsMap)) { if (name === "op_write" || name === "op_read") { core.setAsyncHandler(opId, dispatchMinimal.asyncMsgFromRust); } } - core.setMacrotaskCallback(timers.handleTimerMacrotask); - } - function runtimeStart(source) { - initOps(); - // First we send an empty `Start` message to let the privileged side know we - // are ready. The response should be a `StartRes` message containing the CLI - // args and other info. - const s = opStart(); - version.setVersions(s.denoVersion, s.v8Version, s.tsVersion); - build.setBuildInfo(s.target); - util.setLogDebug(s.debugFlag, source); + core.setMacrotaskCallback(timers.handleTimerMacrotask); + version.setVersions( + runtimeOptions.denoVersion, + runtimeOptions.v8Version, + runtimeOptions.tsVersion, + ); + build.setBuildInfo(runtimeOptions.target); + util.setLogDebug(runtimeOptions.debugFlag, source); // TODO(bartlomieju): a very crude way to disable // source mapping of errors. This condition is true // only for compiled standalone binaries. let prepareStackTrace; - if (s.applySourceMaps) { + if (runtimeOptions.applySourceMaps) { prepareStackTrace = core.createPrepareStackTrace( errorStack.opApplySourceMap, ); @@ -173,8 +164,6 @@ delete Object.prototype.__proto__; prepareStackTrace = core.createPrepareStackTrace(); } Error.prepareStackTrace = prepareStackTrace; - - return s; } function registerErrors() { @@ -283,7 +272,7 @@ delete Object.prototype.__proto__; let hasBootstrapped = false; - function bootstrapMainRuntime() { + function bootstrapMainRuntime(runtimeOptions) { if (hasBootstrapped) { throw new Error("Worker runtime already bootstrapped"); } @@ -300,7 +289,8 @@ delete Object.prototype.__proto__; defineEventHandler(window, "load", null); defineEventHandler(window, "unload", null); - const { args, noColor, pid, ppid, unstableFlag } = runtimeStart(); + runtimeStart(runtimeOptions); + const { args, noColor, pid, ppid, unstableFlag } = runtimeOptions; registerErrors(); @@ -335,7 +325,12 @@ delete Object.prototype.__proto__; util.log("args", args); } - function bootstrapWorkerRuntime(name, useDenoNamespace, internalName) { + function bootstrapWorkerRuntime( + runtimeOptions, + name, + useDenoNamespace, + internalName, + ) { if (hasBootstrapped) { throw new Error("Worker runtime already bootstrapped"); } @@ -349,9 +344,12 @@ delete Object.prototype.__proto__; Object.defineProperties(globalThis, { name: util.readOnly(name) }); Object.setPrototypeOf(globalThis, DedicatedWorkerGlobalScope.prototype); eventTarget.setEventTargetData(globalThis); - const { unstableFlag, pid, noColor, args } = runtimeStart( + + runtimeStart( + runtimeOptions, internalName ?? name, ); + const { unstableFlag, pid, noColor, args } = runtimeOptions; registerErrors(); diff --git a/cli/standalone.rs b/cli/standalone.rs index df7106c970233b..d7ffd0fd2e456a 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -1,9 +1,9 @@ use crate::colors; use crate::flags::Flags; use crate::permissions::Permissions; -use crate::program_state::ProgramState; use crate::tokio_util; use crate::worker::MainWorker; +use crate::worker::WorkerOptions; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; @@ -21,6 +21,7 @@ use std::io::Write; use std::path::PathBuf; use std::pin::Pin; use std::rc::Rc; +use std::sync::Arc; const MAGIC_TRAILER: &[u8; 8] = b"d3n0l4nd"; @@ -109,16 +110,29 @@ async fn run(source_code: String, args: Vec) -> Result<(), AnyError> { // TODO(lucacasonato): remove once you can specify this correctly through embedded metadata flags.unstable = true; let main_module = ModuleSpecifier::resolve_url(SPECIFIER)?; - let program_state = ProgramState::new(flags.clone())?; let permissions = Permissions::allow_all(); let module_loader = Rc::new(EmbeddedModuleLoader(source_code)); - let mut worker = MainWorker::from_options( - &program_state, - main_module.clone(), - permissions, + let create_web_worker_cb = Arc::new(|_| { + todo!("Worker are currently not supported in standalone binaries"); + }); + + let options = WorkerOptions { + apply_source_maps: false, + args: flags.argv.clone(), + debug_flag: false, + unstable: true, + ca_filepath: None, + seed: None, + js_error_create_fn: None, + create_web_worker_cb, + attach_inspector: false, + maybe_inspector_server: None, + should_break_on_first_statement: false, module_loader, - None, - ); + }; + let mut worker = + MainWorker::from_options(main_module.clone(), permissions, &options); + worker.bootstrap(&options); worker.execute_module(&main_module).await?; worker.execute("window.dispatchEvent(new Event('load'))")?; worker.run_event_loop().await?; diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index c81152ec6435c9..eeffb009701db0 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -2646,11 +2646,6 @@ itest!(fmt_stdin_check_not_formatted { output_str: Some("Not formatted stdin\n"), }); -itest!(circular1 { - args: "run --reload circular1.js", - output: "circular1.js.out", -}); - itest!(config { args: "run --reload --config config.tsconfig.json config.ts", exit_code: 1, diff --git a/cli/version.rs b/cli/version.rs index 694ffc44ba6059..63253cec82b716 100644 --- a/cli/version.rs +++ b/cli/version.rs @@ -13,7 +13,3 @@ pub fn deno() -> String { pub fn is_canary() -> bool { option_env!("DENO_CANARY").is_some() } - -pub fn v8() -> &'static str { - deno_core::v8_version() -} diff --git a/cli/web_worker.rs b/cli/web_worker.rs index 44806d520c4aca..9320c22d81f8e8 100644 --- a/cli/web_worker.rs +++ b/cli/web_worker.rs @@ -1,28 +1,31 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::colors; -use crate::fmt_errors::PrettyJsError; use crate::inspector::DenoInspector; +use crate::inspector::InspectorServer; use crate::js; use crate::metrics::Metrics; -use crate::module_loader::CliModuleLoader; use crate::ops; use crate::permissions::Permissions; -use crate::program_state::ProgramState; -use crate::source_maps::apply_source_map; use crate::tokio_util::create_basic_runtime; +use crate::version; use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; use deno_core::futures::stream::StreamExt; use deno_core::futures::task::AtomicWaker; +use deno_core::serde_json; +use deno_core::serde_json::json; use deno_core::url::Url; use deno_core::v8; +use deno_core::JsErrorCreateFn; use deno_core::JsRuntime; +use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::RuntimeOptions; use std::env; +use std::rc::Rc; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use std::sync::Arc; @@ -115,6 +118,7 @@ fn create_channels( /// Each `WebWorker` is either a child of `MainWorker` or other /// `WebWorker`. pub struct WebWorker { + id: u32, inspector: Option>, // Following fields are pub because they are accessed // when creating a new WebWorker instance. @@ -125,46 +129,48 @@ pub struct WebWorker { event_loop_idle: bool, terminate_rx: mpsc::Receiver<()>, handle: WebWorkerHandle, - pub has_deno_namespace: bool, + pub use_deno_namespace: bool, +} + +pub struct WebWorkerOptions { + pub args: Vec, + pub debug_flag: bool, + pub unstable: bool, + pub ca_filepath: Option, + pub seed: Option, + pub module_loader: Rc, + pub create_web_worker_cb: Arc, + pub js_error_create_fn: Option>, + pub use_deno_namespace: bool, + pub attach_inspector: bool, + pub maybe_inspector_server: Option>, + pub apply_source_maps: bool, } impl WebWorker { - pub fn new( + pub fn from_options( name: String, permissions: Permissions, main_module: ModuleSpecifier, - program_state: Arc, - has_deno_namespace: bool, worker_id: u32, + options: &WebWorkerOptions, ) -> Self { - let module_loader = CliModuleLoader::new_for_worker(program_state.clone()); - let global_state_ = program_state.clone(); - - let js_error_create_fn = Box::new(move |core_js_error| { - let source_mapped_error = - apply_source_map(&core_js_error, global_state_.clone()); - PrettyJsError::create(source_mapped_error) - }); - let mut js_runtime = JsRuntime::new(RuntimeOptions { - module_loader: Some(module_loader), + module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(js::deno_isolate_init()), - js_error_create_fn: Some(js_error_create_fn), + js_error_create_fn: options.js_error_create_fn.clone(), get_error_class_fn: Some(&crate::errors::get_error_class_name), ..Default::default() }); - let inspector = - if let Some(inspector_server) = &program_state.maybe_inspector_server { - Some(DenoInspector::new( - &mut js_runtime, - Some(inspector_server.clone()), - )) - } else if program_state.flags.coverage || program_state.flags.repl { - Some(DenoInspector::new(&mut js_runtime, None)) - } else { - None - }; + let inspector = if options.attach_inspector { + Some(DenoInspector::new( + &mut js_runtime, + options.maybe_inspector_server.clone(), + )) + } else { + None + }; let (terminate_tx, terminate_rx) = mpsc::channel::<()>(1); let isolate_handle = js_runtime.v8_isolate().thread_safe_handle(); @@ -172,15 +178,16 @@ impl WebWorker { create_channels(isolate_handle, terminate_tx); let mut worker = Self { + id: worker_id, inspector, internal_channels, js_runtime, - name: name.clone(), + name, waker: AtomicWaker::new(), event_loop_idle: false, terminate_rx, handle, - has_deno_namespace, + use_deno_namespace: options.use_deno_namespace, }; { @@ -192,15 +199,21 @@ impl WebWorker { let op_state = js_runtime.op_state(); let mut op_state = op_state.borrow_mut(); op_state.put::(Default::default()); - op_state.put::>(program_state.clone()); op_state.put::(permissions); + op_state.put::(ops::UnstableChecker { + unstable: options.unstable, + }); } ops::web_worker::init(js_runtime, sender.clone(), handle); - ops::runtime::init(js_runtime, main_module, true); - ops::fetch::init(js_runtime, program_state.flags.ca_file.as_deref()); + ops::runtime::init(js_runtime, main_module); + ops::fetch::init(js_runtime, options.ca_filepath.as_deref()); ops::timers::init(js_runtime); - ops::worker_host::init(js_runtime, Some(sender)); + ops::worker_host::init( + js_runtime, + Some(sender), + options.create_web_worker_cb.clone(), + ); ops::reg_json_sync(js_runtime, "op_close", deno_core::op_close); ops::reg_json_sync(js_runtime, "op_resources", deno_core::op_resources); ops::reg_json_sync( @@ -208,11 +221,10 @@ impl WebWorker { "op_domain_to_ascii", deno_web::op_domain_to_ascii, ); - ops::errors::init(js_runtime); ops::io::init(js_runtime); - ops::websocket::init(js_runtime); + ops::websocket::init(js_runtime, options.ca_filepath.as_deref()); - if has_deno_namespace { + if options.use_deno_namespace { ops::fs_events::init(js_runtime); ops::fs::init(js_runtime); ops::net::init(js_runtime); @@ -220,8 +232,7 @@ impl WebWorker { ops::permissions::init(js_runtime); ops::plugin::init(js_runtime); ops::process::init(js_runtime); - ops::crypto::init(js_runtime, program_state.flags.seed); - ops::runtime_compiler::init(js_runtime); + ops::crypto::init(js_runtime, options.seed); ops::signal::init(js_runtime); ops::tls::init(js_runtime); ops::tty::init(js_runtime); @@ -239,19 +250,38 @@ impl WebWorker { op_state.resource_table.add("stderr", Box::new(stream)); } } + + worker } + } + + pub fn bootstrap(&mut self, options: &WebWorkerOptions) { + let runtime_options = json!({ + "args": options.args, + "applySourceMaps": options.apply_source_maps, + "debugFlag": options.debug_flag, + "denoVersion": version::deno(), + "noColor": !colors::use_color(), + "pid": std::process::id(), + "ppid": ops::runtime::ppid(), + "target": env!("TARGET"), + "tsVersion": version::TYPESCRIPT, + "unstableFlag": options.unstable, + "v8Version": deno_core::v8_version(), + }); + + let runtime_options_str = + serde_json::to_string_pretty(&runtime_options).unwrap(); // Instead of using name for log we use `worker-${id}` because // WebWorkers can have empty string as name. let script = format!( - "bootstrap.workerRuntime(\"{}\", {}, \"worker-{}\")", - name, worker.has_deno_namespace, worker_id + "bootstrap.workerRuntime({}, \"{}\", {}, \"worker-{}\")", + runtime_options_str, self.name, options.use_deno_namespace, self.id ); - worker + self .execute(&script) .expect("Failed to execute worker bootstrap script"); - - worker } /// Same as execute2() but the filename defaults to "$CWD/__anonymous__". @@ -421,22 +451,39 @@ pub fn run_web_worker( #[cfg(test)] mod tests { use super::*; - use crate::program_state::ProgramState; use crate::tokio_util; use deno_core::serde_json::json; fn create_test_web_worker() -> WebWorker { let main_module = ModuleSpecifier::resolve_url_or_path("./hello.js").unwrap(); - let program_state = ProgramState::mock(vec!["deno".to_string()], None); - WebWorker::new( + let module_loader = Rc::new(deno_core::NoopModuleLoader); + let create_web_worker_cb = Arc::new(|_| unreachable!()); + + let options = WebWorkerOptions { + args: vec![], + apply_source_maps: false, + debug_flag: false, + unstable: false, + ca_filepath: None, + seed: None, + module_loader, + create_web_worker_cb, + js_error_create_fn: None, + use_deno_namespace: false, + attach_inspector: false, + maybe_inspector_server: None, + }; + + let mut worker = WebWorker::from_options( "TEST".to_string(), Permissions::allow_all(), main_module, - program_state, - false, 1, - ) + &options, + ); + worker.bootstrap(&options); + worker } #[tokio::test] diff --git a/cli/worker.rs b/cli/worker.rs index dda26291a41cde..b119c3b0dd6e3e 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -1,18 +1,19 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::fmt_errors::PrettyJsError; +use crate::colors; use crate::inspector::DenoInspector; +use crate::inspector::InspectorServer; use crate::inspector::InspectorSession; use crate::js; use crate::metrics::Metrics; -use crate::module_loader::CliModuleLoader; use crate::ops; use crate::permissions::Permissions; -use crate::program_state::ProgramState; -use crate::source_maps::apply_source_map; +use crate::version; use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; +use deno_core::serde_json; +use deno_core::serde_json::json; use deno_core::url::Url; use deno_core::JsErrorCreateFn; use deno_core::JsRuntime; @@ -35,68 +36,51 @@ use std::task::Poll; /// are descendants of this worker. pub struct MainWorker { inspector: Option>, - js_runtime: JsRuntime, + pub js_runtime: JsRuntime, should_break_on_first_statement: bool, } -impl MainWorker { - pub fn new( - program_state: &Arc, - main_module: ModuleSpecifier, - permissions: Permissions, - ) -> Self { - let module_loader = CliModuleLoader::new(program_state.clone()); - - let global_state_ = program_state.clone(); - - let js_error_create_fn = Box::new(move |core_js_error| { - let source_mapped_error = - apply_source_map(&core_js_error, global_state_.clone()); - PrettyJsError::create(source_mapped_error) - }); - - Self::from_options( - program_state, - main_module, - permissions, - module_loader, - Some(js_error_create_fn), - ) - } +pub struct WorkerOptions { + pub apply_source_maps: bool, + pub args: Vec, + pub debug_flag: bool, + pub unstable: bool, + pub ca_filepath: Option, + pub seed: Option, + pub module_loader: Rc, + // Callback that will be invoked when creating new instance + // of WebWorker + pub create_web_worker_cb: Arc, + pub js_error_create_fn: Option>, + pub attach_inspector: bool, + pub maybe_inspector_server: Option>, + pub should_break_on_first_statement: bool, +} +impl MainWorker { pub fn from_options( - program_state: &Arc, main_module: ModuleSpecifier, permissions: Permissions, - module_loader: Rc, - js_error_create_fn: Option>, + options: &WorkerOptions, ) -> Self { - // TODO(bartlomieju): this is hacky way to not apply source - // maps in JS - let apply_source_maps = js_error_create_fn.is_some(); - let mut js_runtime = JsRuntime::new(RuntimeOptions { - module_loader: Some(module_loader), + module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(js::deno_isolate_init()), - js_error_create_fn, + js_error_create_fn: options.js_error_create_fn.clone(), get_error_class_fn: Some(&crate::errors::get_error_class_name), ..Default::default() }); - let inspector = - if let Some(inspector_server) = &program_state.maybe_inspector_server { - Some(DenoInspector::new( - &mut js_runtime, - Some(inspector_server.clone()), - )) - } else if program_state.flags.coverage || program_state.flags.repl { - Some(DenoInspector::new(&mut js_runtime, None)) - } else { - None - }; - + let inspector = if options.attach_inspector { + Some(DenoInspector::new( + &mut js_runtime, + options.maybe_inspector_server.clone(), + )) + } else { + None + }; let should_break_on_first_statement = - inspector.is_some() && program_state.flags.inspect_brk.is_some(); + inspector.is_some() && options.should_break_on_first_statement; let mut worker = Self { inspector, @@ -111,15 +95,21 @@ impl MainWorker { let op_state = js_runtime.op_state(); let mut op_state = op_state.borrow_mut(); op_state.put::(Default::default()); - op_state.put::>(program_state.clone()); op_state.put::(permissions); + op_state.put::(ops::UnstableChecker { + unstable: options.unstable, + }); } - ops::runtime::init(js_runtime, main_module, apply_source_maps); - ops::fetch::init(js_runtime, program_state.flags.ca_file.as_deref()); + ops::runtime::init(js_runtime, main_module); + ops::fetch::init(js_runtime, options.ca_filepath.as_deref()); ops::timers::init(js_runtime); - ops::worker_host::init(js_runtime, None); - ops::crypto::init(js_runtime, program_state.flags.seed); + ops::worker_host::init( + js_runtime, + None, + options.create_web_worker_cb.clone(), + ); + ops::crypto::init(js_runtime, options.seed); ops::reg_json_sync(js_runtime, "op_close", deno_core::op_close); ops::reg_json_sync(js_runtime, "op_resources", deno_core::op_resources); ops::reg_json_sync( @@ -127,7 +117,6 @@ impl MainWorker { "op_domain_to_ascii", deno_web::op_domain_to_ascii, ); - ops::errors::init(js_runtime); ops::fs_events::init(js_runtime); ops::fs::init(js_runtime); ops::io::init(js_runtime); @@ -136,11 +125,10 @@ impl MainWorker { ops::permissions::init(js_runtime); ops::plugin::init(js_runtime); ops::process::init(js_runtime); - ops::runtime_compiler::init(js_runtime); ops::signal::init(js_runtime); ops::tls::init(js_runtime); ops::tty::init(js_runtime); - ops::websocket::init(js_runtime); + ops::websocket::init(js_runtime, options.ca_filepath.as_deref()); } { let op_state = js_runtime.op_state(); @@ -157,10 +145,32 @@ impl MainWorker { t.add("stderr", Box::new(stream)); } } + worker - .execute("bootstrap.mainRuntime()") + } + + pub fn bootstrap(&mut self, options: &WorkerOptions) { + let runtime_options = json!({ + "args": options.args, + "applySourceMaps": options.apply_source_maps, + "debugFlag": options.debug_flag, + "denoVersion": version::deno(), + "noColor": !colors::use_color(), + "pid": std::process::id(), + "ppid": ops::runtime::ppid(), + "target": env!("TARGET"), + "tsVersion": version::TYPESCRIPT, + "unstableFlag": options.unstable, + "v8Version": deno_core::v8_version(), + }); + + let script = format!( + "bootstrap.mainRuntime({})", + serde_json::to_string_pretty(&runtime_options).unwrap() + ); + self + .execute(&script) .expect("Failed to execute bootstrap script"); - worker } /// Same as execute2() but the filename defaults to "$CWD/__anonymous__". @@ -231,23 +241,28 @@ impl Drop for MainWorker { #[cfg(test)] mod tests { use super::*; - use crate::flags::DenoSubcommand; - use crate::flags::Flags; - use crate::program_state::ProgramState; fn create_test_worker() -> MainWorker { let main_module = ModuleSpecifier::resolve_url_or_path("./hello.js").unwrap(); - let flags = Flags { - subcommand: DenoSubcommand::Run { - script: main_module.to_string(), - }, - ..Default::default() + let permissions = Permissions::default(); + + let options = WorkerOptions { + apply_source_maps: false, + args: vec![], + debug_flag: false, + unstable: false, + ca_filepath: None, + seed: None, + js_error_create_fn: None, + create_web_worker_cb: Arc::new(|_| unreachable!()), + attach_inspector: false, + maybe_inspector_server: None, + should_break_on_first_statement: false, + module_loader: Rc::new(deno_core::FsModuleLoader), }; - let permissions = Permissions::from_flags(&flags); - let program_state = - ProgramState::mock(vec!["deno".to_string()], Some(flags)); - MainWorker::new(&program_state, main_module, permissions) + + MainWorker::from_options(main_module, permissions, &options) } #[tokio::test] @@ -273,26 +288,7 @@ mod tests { let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() .unwrap() - .join("tests/circular1.ts"); - let module_specifier = - ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap(); - let mut worker = create_test_worker(); - let result = worker.execute_module(&module_specifier).await; - if let Err(err) = result { - eprintln!("execute_mod err {:?}", err); - } - if let Err(e) = worker.run_event_loop().await { - panic!("Future got unexpected error: {:?}", e); - } - } - - #[tokio::test] - async fn execute_006_url_imports() { - let _http_server_guard = test_util::http_server(); - let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .parent() - .unwrap() - .join("cli/tests/006_url_imports.ts"); + .join("tests/circular1.js"); let module_specifier = ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap(); let mut worker = create_test_worker(); @@ -323,7 +319,7 @@ mod tests { let p = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) .parent() .unwrap() - .join("cli/tests/002_hello.ts"); + .join("cli/tests/001_hello.js"); let module_specifier = ModuleSpecifier::resolve_url_or_path(&p.to_string_lossy()).unwrap(); let result = worker.execute_module(&module_specifier).await; diff --git a/core/lib.rs b/core/lib.rs index 20ee5a3d5dd103..5846ad99d47703 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -51,6 +51,7 @@ pub use crate::modules::ModuleLoadId; pub use crate::modules::ModuleLoader; pub use crate::modules::ModuleSource; pub use crate::modules::ModuleSourceFuture; +pub use crate::modules::NoopModuleLoader; pub use crate::modules::RecursiveModuleLoad; pub use crate::normalize_path::normalize_path; pub use crate::ops::json_op_async; diff --git a/core/modules.rs b/core/modules.rs index ffae3a4767bcdb..6f330f55949b54 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -105,7 +105,7 @@ pub trait ModuleLoader { /// Placeholder structure used when creating /// a runtime that doesn't support module loading. -pub(crate) struct NoopModuleLoader; +pub struct NoopModuleLoader; impl ModuleLoader for NoopModuleLoader { fn resolve( diff --git a/core/runtime.rs b/core/runtime.rs index 0f09926f879191..24bdf4dc29ae83 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -107,7 +107,7 @@ pub(crate) struct JsRuntimeState { HashMap, v8::Global>, pending_dyn_mod_evaluate: HashMap, pending_mod_evaluate: Option, - pub(crate) js_error_create_fn: Box, + pub(crate) js_error_create_fn: Rc, pub(crate) shared: SharedQueue, pub(crate) pending_ops: FuturesUnordered, pub(crate) pending_unref_ops: FuturesUnordered, @@ -168,7 +168,7 @@ pub struct RuntimeOptions { /// Allows a callback to be set whenever a V8 exception is made. This allows /// the caller to wrap the JsError into an error. By default this callback /// is set to `JsError::create()`. - pub js_error_create_fn: Option>, + pub js_error_create_fn: Option>, /// Allows to map error type to a string "class" used to represent /// error in JavaScript. @@ -257,7 +257,7 @@ impl JsRuntime { let js_error_create_fn = options .js_error_create_fn - .unwrap_or_else(|| Box::new(JsError::create)); + .unwrap_or_else(|| Rc::new(JsError::create)); let mut op_state = OpState::default(); if let Some(get_error_class_fn) = options.get_error_class_fn { From 39c86df4e587bf45e446bd944bff4a91b8a63be3 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Sat, 12 Dec 2020 01:52:10 +0800 Subject: [PATCH 051/135] docs(tools/repl): remove rogue reference to issue (#8720) The keybindings are from upstream rustyline, I was a bit too blind copying action descriptions and a reference to a issue stuck in to the description of the ctrl-v binding. This commit removes the offending reference. --- docs/tools/repl.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tools/repl.md b/docs/tools/repl.md index a0143b80c7316b..ac512ec93a8c81 100644 --- a/docs/tools/repl.md +++ b/docs/tools/repl.md @@ -24,7 +24,7 @@ up program state in the global context. | Ctrl-R | Reverse Search history (Ctrl-S forward, Ctrl-G cancel) | | Ctrl-T | Transpose previous character with current character | | Ctrl-U | Delete from start of line to cursor | -| Ctrl-V | Insert any special character without performing its associated action (#65) | +| Ctrl-V | Insert any special character without performing its associated action | | Ctrl-W | Delete word leading up to cursor (using white space as a word boundary) | | Ctrl-X Ctrl-U | Undo | | Ctrl-Y | Paste from Yank buffer | From 31935c6b8d8f763a5cf809b940427024da01a71c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 12 Dec 2020 00:36:18 +0100 Subject: [PATCH 052/135] refactor(cli): more options on Worker (#8724) --- cli/file_fetcher.rs | 3 +- cli/http_util.rs | 90 +++++++++++++++++++++++++++----------------- cli/inspector.rs | 16 ++++---- cli/main.rs | 8 ++++ cli/ops/fetch.rs | 4 +- cli/ops/websocket.rs | 13 +++++-- cli/program_state.rs | 6 ++- cli/standalone.rs | 5 +++ cli/web_worker.rs | 25 +++++++++--- cli/worker.rs | 26 ++++++++++--- 10 files changed, 137 insertions(+), 59 deletions(-) diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index a17a8fc2479b6c..7b730e455ff714 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -4,6 +4,7 @@ use crate::colors; use crate::http_cache::HttpCache; use crate::http_util::create_http_client; use crate::http_util::fetch_once; +use crate::http_util::get_user_agent; use crate::http_util::FetchOnceResult; use crate::media_type::MediaType; use crate::permissions::Permissions; @@ -289,7 +290,7 @@ impl FileFetcher { cache: FileCache::default(), cache_setting, http_cache, - http_client: create_http_client(maybe_ca_file)?, + http_client: create_http_client(get_user_agent(), maybe_ca_file)?, }) } diff --git a/cli/http_util.rs b/cli/http_util.rs index c32e0dedaee8dc..4bd59e32b08684 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -27,14 +27,18 @@ use std::task::Context; use std::task::Poll; use tokio::io::AsyncRead; +pub fn get_user_agent() -> String { + format!("Deno/{}", version::deno()) +} + /// Create new instance of async reqwest::Client. This client supports /// proxies and doesn't follow redirects. -pub fn create_http_client(ca_file: Option<&str>) -> Result { +pub fn create_http_client( + user_agent: String, + ca_file: Option<&str>, +) -> Result { let mut headers = HeaderMap::new(); - headers.insert( - USER_AGENT, - format!("Deno/{}", version::deno()).parse().unwrap(), - ); + headers.insert(USER_AGENT, user_agent.parse().unwrap()); let mut builder = Client::builder() .redirect(Policy::none()) .default_headers(headers) @@ -230,13 +234,17 @@ impl AsyncRead for HttpBody { mod tests { use super::*; + fn create_test_client(ca_file: Option<&str>) -> Client { + create_http_client("test_client".to_string(), ca_file).unwrap() + } + #[tokio::test] async fn test_fetch_string() { let _http_server_guard = test_util::http_server(); // Relies on external http server. See target/debug/test_server let url = Url::parse("http://127.0.0.1:4545/cli/tests/fixture.json").unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { assert!(!body.is_empty()); @@ -256,7 +264,7 @@ mod tests { "http://127.0.0.1:4545/cli/tests/053_import_compression/gziped", ) .unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { assert_eq!(String::from_utf8(body).unwrap(), "console.log('gzip')"); @@ -275,7 +283,7 @@ mod tests { async fn test_fetch_with_etag() { let _http_server_guard = test_util::http_server(); let url = Url::parse("http://127.0.0.1:4545/etag_script.ts").unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client.clone(), &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { assert!(!body.is_empty()); @@ -302,7 +310,7 @@ mod tests { "http://127.0.0.1:4545/cli/tests/053_import_compression/brotli", ) .unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { assert!(!body.is_empty()); @@ -327,7 +335,7 @@ mod tests { // Dns resolver substitutes `127.0.0.1` with `localhost` let target_url = Url::parse("http://localhost:4545/cli/tests/fixture.json").unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Redirect(url, _)) = result { assert_eq!(url, target_url); @@ -381,12 +389,15 @@ mod tests { let url = Url::parse("https://localhost:5545/cli/tests/fixture.json").unwrap(); - let client = create_http_client(Some( - test_util::root_path() - .join("std/http/testdata/tls/RootCA.pem") - .to_str() - .unwrap(), - )) + let client = create_http_client( + get_user_agent(), + Some( + test_util::root_path() + .join("std/http/testdata/tls/RootCA.pem") + .to_str() + .unwrap(), + ), + ) .unwrap(); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -407,12 +418,15 @@ mod tests { "https://localhost:5545/cli/tests/053_import_compression/gziped", ) .unwrap(); - let client = create_http_client(Some( - test_util::root_path() - .join("std/http/testdata/tls/RootCA.pem") - .to_str() - .unwrap(), - )) + let client = create_http_client( + get_user_agent(), + Some( + test_util::root_path() + .join("std/http/testdata/tls/RootCA.pem") + .to_str() + .unwrap(), + ), + ) .unwrap(); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -432,12 +446,15 @@ mod tests { async fn test_fetch_with_cafile_with_etag() { let _http_server_guard = test_util::http_server(); let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap(); - let client = create_http_client(Some( - test_util::root_path() - .join("std/http/testdata/tls/RootCA.pem") - .to_str() - .unwrap(), - )) + let client = create_http_client( + get_user_agent(), + Some( + test_util::root_path() + .join("std/http/testdata/tls/RootCA.pem") + .to_str() + .unwrap(), + ), + ) .unwrap(); let result = fetch_once(client.clone(), &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -466,12 +483,15 @@ mod tests { "https://localhost:5545/cli/tests/053_import_compression/brotli", ) .unwrap(); - let client = create_http_client(Some( - test_util::root_path() - .join("std/http/testdata/tls/RootCA.pem") - .to_str() - .unwrap(), - )) + let client = create_http_client( + get_user_agent(), + Some( + test_util::root_path() + .join("std/http/testdata/tls/RootCA.pem") + .to_str() + .unwrap(), + ), + ) .unwrap(); let result = fetch_once(client, &url, None).await; if let Ok(FetchOnceResult::Code(body, headers)) = result { @@ -493,7 +513,7 @@ mod tests { let _g = test_util::http_server(); let url_str = "http://127.0.0.1:4545/bad_redirect"; let url = Url::parse(url_str).unwrap(); - let client = create_http_client(None).unwrap(); + let client = create_test_client(None); let result = fetch_once(client, &url, None).await; assert!(result.is_err()); let err = result.unwrap_err(); diff --git a/cli/inspector.rs b/cli/inspector.rs index fa1bfa2d0499de..89fd5bf576ddf9 100644 --- a/cli/inspector.rs +++ b/cli/inspector.rs @@ -52,7 +52,7 @@ pub struct InspectorServer { } impl InspectorServer { - pub fn new(host: SocketAddr) -> Self { + pub fn new(host: SocketAddr, name: String) -> Self { let (register_inspector_tx, register_inspector_rx) = mpsc::unbounded::(); @@ -63,6 +63,7 @@ impl InspectorServer { host, register_inspector_rx, shutdown_server_rx, + name, )) }); @@ -145,6 +146,7 @@ async fn server( host: SocketAddr, register_inspector_rx: UnboundedReceiver, shutdown_server_rx: oneshot::Receiver<()>, + name: String, ) { // TODO: put the `inspector_map` in an `Rc>` instead. This is // currently not possible because warp requires all filters to implement @@ -199,13 +201,13 @@ async fn server( ) }); - let json_version_route = warp::path!("json" / "version").map(|| { - warp::reply::json(&json!({ - "Browser": format!("Deno/{}", crate::version::deno()), - "Protocol-Version": "1.3", - "V8-Version": deno_core::v8_version(), - })) + let json_version_response = json!({ + "Browser": name, + "Protocol-Version": "1.3", + "V8-Version": deno_core::v8_version(), }); + let json_version_route = warp::path!("json" / "version") + .map(move || warp::reply::json(&json_version_response)); let inspector_map_ = inspector_map.clone(); let json_list_route = warp::path("json").map(move || { diff --git a/cli/main.rs b/cli/main.rs index 8f8879ac9646cd..38deec5bb8a02a 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -121,6 +121,7 @@ fn create_web_worker_callback( .map_or(false, |l| l == log::Level::Debug), unstable: program_state.flags.unstable, ca_filepath: program_state.flags.ca_file.clone(), + user_agent: http_util::get_user_agent(), seed: program_state.flags.seed, module_loader, create_web_worker_cb, @@ -128,6 +129,9 @@ fn create_web_worker_callback( use_deno_namespace: args.use_deno_namespace, attach_inspector, maybe_inspector_server, + runtime_version: version::deno(), + ts_version: version::TYPESCRIPT.to_string(), + no_color: !colors::use_color(), }; let mut worker = WebWorker::from_options( @@ -192,6 +196,7 @@ pub fn create_main_worker( .map_or(false, |l| l == log::Level::Debug), unstable: program_state.flags.unstable, ca_filepath: program_state.flags.ca_file.clone(), + user_agent: http_util::get_user_agent(), seed: program_state.flags.seed, js_error_create_fn: Some(js_error_create_fn), create_web_worker_cb, @@ -199,6 +204,9 @@ pub fn create_main_worker( maybe_inspector_server, should_break_on_first_statement, module_loader, + runtime_version: version::deno(), + ts_version: version::TYPESCRIPT.to_string(), + no_color: !colors::use_color(), }; let mut worker = MainWorker::from_options(main_module, permissions, &options); diff --git a/cli/ops/fetch.rs b/cli/ops/fetch.rs index 8c1a2b39c58904..18e9e9c9f242e7 100644 --- a/cli/ops/fetch.rs +++ b/cli/ops/fetch.rs @@ -1,4 +1,5 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use crate::http_util; use crate::permissions::Permissions; use deno_fetch::reqwest; @@ -7,7 +8,8 @@ pub fn init(rt: &mut deno_core::JsRuntime, maybe_ca_file: Option<&str>) { let op_state = rt.op_state(); let mut state = op_state.borrow_mut(); state.put::({ - crate::http_util::create_http_client(maybe_ca_file).unwrap() + http_util::create_http_client(http_util::get_user_agent(), maybe_ca_file) + .unwrap() }); } super::reg_json_async(rt, "op_fetch", deno_fetch::op_fetch::); diff --git a/cli/ops/websocket.rs b/cli/ops/websocket.rs index de6357c87ca1f5..a8c591a3322c27 100644 --- a/cli/ops/websocket.rs +++ b/cli/ops/websocket.rs @@ -35,14 +35,21 @@ use webpki::DNSNameRef; #[derive(Clone)] struct WsCaFile(String); +#[derive(Clone)] +struct WsUserAgent(String); -pub fn init(rt: &mut deno_core::JsRuntime, maybe_ca_file: Option<&str>) { +pub fn init( + rt: &mut deno_core::JsRuntime, + maybe_ca_file: Option<&str>, + user_agent: String, +) { { let op_state = rt.op_state(); let mut state = op_state.borrow_mut(); if let Some(ca_file) = maybe_ca_file { state.put::(WsCaFile(ca_file.to_string())); } + state.put::(WsUserAgent(user_agent)); } super::reg_json_sync(rt, "op_ws_check_permission", op_ws_check_permission); super::reg_json_async(rt, "op_ws_create", op_ws_create); @@ -103,11 +110,11 @@ pub async fn op_ws_create( } let maybe_ca_file = state.borrow().try_borrow::().cloned(); + let user_agent = state.borrow().borrow::().0.clone(); let uri: Uri = args.url.parse()?; let mut request = Request::builder().method(Method::GET).uri(&uri); - request = - request.header("User-Agent", format!("Deno/{}", crate::version::deno())); + request = request.header("User-Agent", user_agent); if !args.protocols.is_empty() { request = request.header("Sec-WebSocket-Protocol", args.protocols); diff --git a/cli/program_state.rs b/cli/program_state.rs index 41b7c51fe0c93f..7b86b7de566b4b 100644 --- a/cli/program_state.rs +++ b/cli/program_state.rs @@ -5,6 +5,7 @@ use crate::file_fetcher::CacheSetting; use crate::file_fetcher::FileFetcher; use crate::flags; use crate::http_cache; +use crate::http_util; use crate::import_map::ImportMap; use crate::inspector::InspectorServer; use crate::lockfile::Lockfile; @@ -99,7 +100,10 @@ impl ProgramState { let maybe_inspect_host = flags.inspect.or(flags.inspect_brk); let maybe_inspector_server = match maybe_inspect_host { - Some(host) => Some(Arc::new(InspectorServer::new(host))), + Some(host) => Some(Arc::new(InspectorServer::new( + host, + http_util::get_user_agent(), + ))), None => None, }; diff --git a/cli/standalone.rs b/cli/standalone.rs index d7ffd0fd2e456a..067fec36a13079 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -2,6 +2,7 @@ use crate::colors; use crate::flags::Flags; use crate::permissions::Permissions; use crate::tokio_util; +use crate::version; use crate::worker::MainWorker; use crate::worker::WorkerOptions; use deno_core::error::type_error; @@ -120,6 +121,7 @@ async fn run(source_code: String, args: Vec) -> Result<(), AnyError> { apply_source_maps: false, args: flags.argv.clone(), debug_flag: false, + user_agent: crate::http_util::get_user_agent(), unstable: true, ca_filepath: None, seed: None, @@ -129,6 +131,9 @@ async fn run(source_code: String, args: Vec) -> Result<(), AnyError> { maybe_inspector_server: None, should_break_on_first_statement: false, module_loader, + runtime_version: version::deno(), + ts_version: version::TYPESCRIPT.to_string(), + no_color: !colors::use_color(), }; let mut worker = MainWorker::from_options(main_module.clone(), permissions, &options); diff --git a/cli/web_worker.rs b/cli/web_worker.rs index 9320c22d81f8e8..18d391580c09a8 100644 --- a/cli/web_worker.rs +++ b/cli/web_worker.rs @@ -8,7 +8,6 @@ use crate::metrics::Metrics; use crate::ops; use crate::permissions::Permissions; use crate::tokio_util::create_basic_runtime; -use crate::version; use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; use deno_core::futures::future::poll_fn; @@ -133,10 +132,12 @@ pub struct WebWorker { } pub struct WebWorkerOptions { + /// Sets `Deno.args` in JS runtime. pub args: Vec, pub debug_flag: bool, pub unstable: bool, pub ca_filepath: Option, + pub user_agent: String, pub seed: Option, pub module_loader: Rc, pub create_web_worker_cb: Arc, @@ -145,6 +146,12 @@ pub struct WebWorkerOptions { pub attach_inspector: bool, pub maybe_inspector_server: Option>, pub apply_source_maps: bool, + /// Sets `Deno.version.deno` in JS runtime. + pub runtime_version: String, + /// Sets `Deno.version.typescript` in JS runtime. + pub ts_version: String, + /// Sets `Deno.noColor` in JS runtime. + pub no_color: bool, } impl WebWorker { @@ -222,7 +229,11 @@ impl WebWorker { deno_web::op_domain_to_ascii, ); ops::io::init(js_runtime); - ops::websocket::init(js_runtime, options.ca_filepath.as_deref()); + ops::websocket::init( + js_runtime, + options.ca_filepath.as_deref(), + options.user_agent.clone(), + ); if options.use_deno_namespace { ops::fs_events::init(js_runtime); @@ -260,12 +271,12 @@ impl WebWorker { "args": options.args, "applySourceMaps": options.apply_source_maps, "debugFlag": options.debug_flag, - "denoVersion": version::deno(), - "noColor": !colors::use_color(), + "denoVersion": options.runtime_version, + "noColor": options.no_color, "pid": std::process::id(), "ppid": ops::runtime::ppid(), "target": env!("TARGET"), - "tsVersion": version::TYPESCRIPT, + "tsVersion": options.ts_version, "unstableFlag": options.unstable, "v8Version": deno_core::v8_version(), }); @@ -466,6 +477,7 @@ mod tests { debug_flag: false, unstable: false, ca_filepath: None, + user_agent: "x".to_string(), seed: None, module_loader, create_web_worker_cb, @@ -473,6 +485,9 @@ mod tests { use_deno_namespace: false, attach_inspector: false, maybe_inspector_server: None, + runtime_version: "x".to_string(), + ts_version: "x".to_string(), + no_color: true, }; let mut worker = WebWorker::from_options( diff --git a/cli/worker.rs b/cli/worker.rs index b119c3b0dd6e3e..d1238df419e05f 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -1,6 +1,5 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::colors; use crate::inspector::DenoInspector; use crate::inspector::InspectorServer; use crate::inspector::InspectorSession; @@ -8,7 +7,6 @@ use crate::js; use crate::metrics::Metrics; use crate::ops; use crate::permissions::Permissions; -use crate::version; use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; @@ -42,10 +40,12 @@ pub struct MainWorker { pub struct WorkerOptions { pub apply_source_maps: bool, + /// Sets `Deno.args` in JS runtime. pub args: Vec, pub debug_flag: bool, pub unstable: bool, pub ca_filepath: Option, + pub user_agent: String, pub seed: Option, pub module_loader: Rc, // Callback that will be invoked when creating new instance @@ -55,6 +55,12 @@ pub struct WorkerOptions { pub attach_inspector: bool, pub maybe_inspector_server: Option>, pub should_break_on_first_statement: bool, + /// Sets `Deno.version.deno` in JS runtime. + pub runtime_version: String, + /// Sets `Deno.version.typescript` in JS runtime. + pub ts_version: String, + /// Sets `Deno.noColor` in JS runtime. + pub no_color: bool, } impl MainWorker { @@ -128,7 +134,11 @@ impl MainWorker { ops::signal::init(js_runtime); ops::tls::init(js_runtime); ops::tty::init(js_runtime); - ops::websocket::init(js_runtime, options.ca_filepath.as_deref()); + ops::websocket::init( + js_runtime, + options.ca_filepath.as_deref(), + options.user_agent.clone(), + ); } { let op_state = js_runtime.op_state(); @@ -154,12 +164,12 @@ impl MainWorker { "args": options.args, "applySourceMaps": options.apply_source_maps, "debugFlag": options.debug_flag, - "denoVersion": version::deno(), - "noColor": !colors::use_color(), + "denoVersion": options.runtime_version, + "noColor": options.no_color, "pid": std::process::id(), "ppid": ops::runtime::ppid(), "target": env!("TARGET"), - "tsVersion": version::TYPESCRIPT, + "tsVersion": options.ts_version, "unstableFlag": options.unstable, "v8Version": deno_core::v8_version(), }); @@ -249,6 +259,7 @@ mod tests { let options = WorkerOptions { apply_source_maps: false, + user_agent: "x".to_string(), args: vec![], debug_flag: false, unstable: false, @@ -260,6 +271,9 @@ mod tests { maybe_inspector_server: None, should_break_on_first_statement: false, module_loader: Rc::new(deno_core::FsModuleLoader), + runtime_version: "x".to_string(), + ts_version: "x".to_string(), + no_color: true, }; MainWorker::from_options(main_module, permissions, &options) From c05615d6707a6270113a514888e828e536ea2c35 Mon Sep 17 00:00:00 2001 From: Andrew Mitchell <32021055+mitch292@users.noreply.github.com> Date: Sat, 12 Dec 2020 07:15:50 -0500 Subject: [PATCH 053/135] docs: Remove a deprecated function from docstring for Deno.permissions (#8729) --- cli/dts/lib.deno.unstable.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/dts/lib.deno.unstable.d.ts b/cli/dts/lib.deno.unstable.d.ts index d8e226699c32c2..f9ef1fb2fcb88c 100644 --- a/cli/dts/lib.deno.unstable.d.ts +++ b/cli/dts/lib.deno.unstable.d.ts @@ -1143,7 +1143,7 @@ declare namespace Deno { * ```ts * const status = await Deno.permissions.request({ name: "env" }); * if (status.state === "granted") { - * console.log(Deno.dir("home")); + * console.log("'env' permission is granted."); * } else { * console.log("'env' permission is denied."); * } From 93cd9ab0b83e0df644315e7303ffb6280307e723 Mon Sep 17 00:00:00 2001 From: Tatsuya Yamamoto Date: Sat, 12 Dec 2020 21:16:52 +0900 Subject: [PATCH 054/135] docs: fix doc to remove annotation about removed function `Deno.dir` (#8732) --- cli/dts/lib.deno.ns.d.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/dts/lib.deno.ns.d.ts b/cli/dts/lib.deno.ns.d.ts index e42fb855d42c89..12d804f9220461 100644 --- a/cli/dts/lib.deno.ns.d.ts +++ b/cli/dts/lib.deno.ns.d.ts @@ -1079,7 +1079,7 @@ declare namespace Deno { export function makeTempDir(options?: MakeTempOptions): Promise; /** Synchronously creates a new temporary file in the default directory for - * temporary files (see also `Deno.dir("temp")`), unless `dir` is specified. + * temporary files, unless `dir` is specified. * Other optional options include prefixing and suffixing the directory name * with `prefix` and `suffix` respectively. * @@ -1098,7 +1098,7 @@ declare namespace Deno { export function makeTempFileSync(options?: MakeTempOptions): string; /** Creates a new temporary file in the default directory for temporary - * files (see also `Deno.dir("temp")`), unless `dir` is specified. Other + * files, unless `dir` is specified. Other * optional options include prefixing and suffixing the directory name with * `prefix` and `suffix` respectively. * From 89c14f79a45868c8f94eab97d794f8a27096a154 Mon Sep 17 00:00:00 2001 From: xiaoxintang Date: Sat, 12 Dec 2020 21:21:48 +0800 Subject: [PATCH 055/135] fix(std/datetime): partsToDate (#8553) --- std/datetime/formatter.ts | 25 ++++++++++++++++++++++++- std/datetime/mod.ts | 3 ++- std/datetime/test.ts | 4 ++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/std/datetime/formatter.ts b/std/datetime/formatter.ts index 8b9c9d9b04665f..ea16dbe0d8edf7 100644 --- a/std/datetime/formatter.ts +++ b/std/datetime/formatter.ts @@ -511,6 +511,28 @@ export class DateTimeFormatter { return parts; } + /** sort & filter dateTimeFormatPart */ + sortDateTimeFormatPart(parts: DateTimeFormatPart[]): DateTimeFormatPart[] { + let result: DateTimeFormatPart[] = []; + const typeArray = [ + "year", + "month", + "day", + "hour", + "minute", + "second", + "fractionalSecond", + ]; + for (const type of typeArray) { + const current = parts.findIndex((el) => el.type === type); + if (current !== -1) { + result = result.concat(parts.splice(current, 1)); + } + } + result = result.concat(parts); + return result; + } + partsToDate(parts: DateTimeFormatPart[]): Date { const date = new Date(); const utc = parts.find( @@ -566,6 +588,7 @@ export class DateTimeFormatter { parse(string: string): Date { const parts = this.parseToParts(string); - return this.partsToDate(parts); + const sortParts = this.sortDateTimeFormatPart(parts); + return this.partsToDate(sortParts); } } diff --git a/std/datetime/mod.ts b/std/datetime/mod.ts index 72f8cc6815c4f0..1f117350cbec4b 100644 --- a/std/datetime/mod.ts +++ b/std/datetime/mod.ts @@ -28,7 +28,8 @@ enum Day { export function parse(dateString: string, formatString: string): Date { const formatter = new DateTimeFormatter(formatString); const parts = formatter.parseToParts(dateString); - return formatter.partsToDate(parts); + const sortParts = formatter.sortDateTimeFormatPart(parts); + return formatter.partsToDate(sortParts); } /** diff --git a/std/datetime/test.ts b/std/datetime/test.ts index 0ff70603df7c7d..1b80edd560e451 100644 --- a/std/datetime/test.ts +++ b/std/datetime/test.ts @@ -53,6 +53,10 @@ Deno.test({ datetime.parse("03-01-2019", "dd-MM-yyyy"), new Date(2019, 0, 3), ); + assertEquals( + datetime.parse("31-10-2019", "dd-MM-yyyy"), + new Date(2019, 9, 31), + ); assertEquals( datetime.parse("2019-01-03", "yyyy-MM-dd"), new Date(2019, 0, 3), From f4cf1b453846a28a306316d197c3c15a41594b33 Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Sat, 12 Dec 2020 22:32:46 +0900 Subject: [PATCH 056/135] chore(test): increase file watcher timeout duration on macOS (#8733) --- cli/tests/integration_tests.rs | 93 ++++++++++++++++++++++++++-------- 1 file changed, 72 insertions(+), 21 deletions(-) diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index eeffb009701db0..f6c3ec632b87e8 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -528,8 +528,16 @@ fn skip_restarting_line( } #[test] -#[ignore] fn fmt_watch_test() { + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + let t = TempDir::new().expect("tempdir fail"); let fixed = util::root_path().join("cli/tests/badly_formatted_fixed.js"); let badly_formatted_original = @@ -553,7 +561,7 @@ fn fmt_watch_test() { std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); // TODO(lucacasonato): remove this timeout. It seems to be needed on Linux. - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(skip_restarting_line(stderr_lines).contains("badly_formatted.js")); @@ -564,7 +572,7 @@ fn fmt_watch_test() { // Change content of the file again to be badly formatted std::fs::copy(&badly_formatted_original, &badly_formatted) .expect("Failed to copy file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); // Check if file has been automatically formatted by watcher let expected = std::fs::read_to_string(fixed).unwrap(); @@ -1276,8 +1284,16 @@ fn bundle_import_map_no_check() { } #[test] -#[ignore] fn bundle_js_watch() { + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + use std::path::PathBuf; // Test strategy extends this of test bundle_js by adding watcher let t = TempDir::new().expect("tempdir fail"); @@ -1303,7 +1319,7 @@ fn bundle_js_watch() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("file_to_watch.js")); assert!(stderr_lines.next().unwrap().contains("mod6.bundle.js")); let file = PathBuf::from(&bundle); @@ -1312,7 +1328,7 @@ fn bundle_js_watch() { std::fs::write(&file_to_watch, "console.log('Hello world2');") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines .next() .unwrap() @@ -1326,7 +1342,7 @@ fn bundle_js_watch() { // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax std::fs::write(&file_to_watch, "syntax error ^^") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines .next() .unwrap() @@ -1346,8 +1362,16 @@ fn bundle_js_watch() { /// Confirm that the watcher continues to work even if module resolution fails at the *first* attempt #[test] -#[ignore] fn bundle_watch_not_exit() { + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "syntax error ^^") @@ -1371,7 +1395,7 @@ fn bundle_watch_not_exit() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("file_to_watch.js")); assert!(stderr_lines.next().unwrap().contains("error:")); assert!(stderr_lines.next().unwrap().contains("Bundle failed!")); @@ -1381,7 +1405,7 @@ fn bundle_watch_not_exit() { // Make sure the watcher actually restarts and works fine with the proper syntax std::fs::write(&file_to_watch, "console.log(42);") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines .next() .unwrap() @@ -1446,8 +1470,16 @@ fn wait_for_process_finished( } #[test] -#[ignore] fn run_watch() { + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "console.log('Hello world');") @@ -1476,13 +1508,13 @@ fn run_watch() { wait_for_process_finished("Process", &mut stderr_lines); // TODO(lucacasonato): remove this timeout. It seems to be needed on Linux. - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); // Change content of the file std::fs::write(&file_to_watch, "console.log('Hello world2');") .expect("error writing file"); // Events from the file watcher is "debounced", so we need to wait for the next execution to start - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("Hello world2")); @@ -1497,7 +1529,7 @@ fn run_watch() { "import { foo } from './another_file.js'; console.log(foo);", ) .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains('0')); wait_for_process_finished("Process", &mut stderr_lines); @@ -1505,7 +1537,7 @@ fn run_watch() { // Confirm that restarting occurs when a new file is updated std::fs::write(&another_file, "export const foo = 42;") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1513,7 +1545,7 @@ fn run_watch() { // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax std::fs::write(&file_to_watch, "syntax error ^^") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stderr_lines.next().unwrap().contains("error:")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1524,7 +1556,7 @@ fn run_watch() { "import { foo } from './another_file.js'; console.log(foo);", ) .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1538,8 +1570,16 @@ fn run_watch() { /// Confirm that the watcher continues to work even if module resolution fails at the *first* attempt #[test] -#[ignore] fn run_watch_not_exit() { + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "syntax error ^^") @@ -1564,14 +1604,14 @@ fn run_watch_not_exit() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("error:")); assert!(stderr_lines.next().unwrap().contains("Process failed!")); // Make sure the watcher actually restarts and works fine with the proper syntax std::fs::write(&file_to_watch, "console.log(42);") .expect("error writing file"); - std::thread::sleep(std::time::Duration::from_secs(1)); + std::thread::sleep(TIMEOUT); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1657,7 +1697,6 @@ fn repl_test_pty_unpaired_braces() { } #[test] -#[ignore] fn run_watch_with_importmap_and_relative_paths() { fn create_relative_tmp_file( directory: &TempDir, @@ -1673,6 +1712,16 @@ fn run_watch_with_importmap_and_relative_paths() { assert!(relative_path.is_relative()); relative_path } + + const TIMEOUT: std::time::Duration = + // Increase timeout duration to address flakiness on CI as much as possible + // See https://github.com/denoland/deno/issues/8571 + std::time::Duration::from_secs(if cfg!(target_os = "macos") { + 5 + } else { + 1 + }); + let temp_directory = TempDir::new_in(util::root_path()).expect("tempdir fail"); let file_to_watch = create_relative_tmp_file( @@ -1707,6 +1756,8 @@ fn run_watch_with_importmap_and_relative_paths() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); + std::thread::sleep(TIMEOUT); + assert!(stderr_lines.next().unwrap().contains("Process finished")); assert!(stdout_lines.next().unwrap().contains("Hello world")); From 623bc22ad0aa7446bc383dc93d7c8da3c4c2fe22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 12 Dec 2020 16:54:00 +0100 Subject: [PATCH 057/135] Revert "chore(test): increase file watcher timeout duration on macOS (#8733)" (#8737) This reverts commit f4cf1b453846a28a306316d197c3c15a41594b33. --- cli/tests/integration_tests.rs | 93 ++++++++-------------------------- 1 file changed, 21 insertions(+), 72 deletions(-) diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index f6c3ec632b87e8..eeffb009701db0 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -528,16 +528,8 @@ fn skip_restarting_line( } #[test] +#[ignore] fn fmt_watch_test() { - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - let t = TempDir::new().expect("tempdir fail"); let fixed = util::root_path().join("cli/tests/badly_formatted_fixed.js"); let badly_formatted_original = @@ -561,7 +553,7 @@ fn fmt_watch_test() { std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); // TODO(lucacasonato): remove this timeout. It seems to be needed on Linux. - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(skip_restarting_line(stderr_lines).contains("badly_formatted.js")); @@ -572,7 +564,7 @@ fn fmt_watch_test() { // Change content of the file again to be badly formatted std::fs::copy(&badly_formatted_original, &badly_formatted) .expect("Failed to copy file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); // Check if file has been automatically formatted by watcher let expected = std::fs::read_to_string(fixed).unwrap(); @@ -1284,16 +1276,8 @@ fn bundle_import_map_no_check() { } #[test] +#[ignore] fn bundle_js_watch() { - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - use std::path::PathBuf; // Test strategy extends this of test bundle_js by adding watcher let t = TempDir::new().expect("tempdir fail"); @@ -1319,7 +1303,7 @@ fn bundle_js_watch() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("file_to_watch.js")); assert!(stderr_lines.next().unwrap().contains("mod6.bundle.js")); let file = PathBuf::from(&bundle); @@ -1328,7 +1312,7 @@ fn bundle_js_watch() { std::fs::write(&file_to_watch, "console.log('Hello world2');") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines .next() .unwrap() @@ -1342,7 +1326,7 @@ fn bundle_js_watch() { // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax std::fs::write(&file_to_watch, "syntax error ^^") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines .next() .unwrap() @@ -1362,16 +1346,8 @@ fn bundle_js_watch() { /// Confirm that the watcher continues to work even if module resolution fails at the *first* attempt #[test] +#[ignore] fn bundle_watch_not_exit() { - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "syntax error ^^") @@ -1395,7 +1371,7 @@ fn bundle_watch_not_exit() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("file_to_watch.js")); assert!(stderr_lines.next().unwrap().contains("error:")); assert!(stderr_lines.next().unwrap().contains("Bundle failed!")); @@ -1405,7 +1381,7 @@ fn bundle_watch_not_exit() { // Make sure the watcher actually restarts and works fine with the proper syntax std::fs::write(&file_to_watch, "console.log(42);") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines .next() .unwrap() @@ -1470,16 +1446,8 @@ fn wait_for_process_finished( } #[test] +#[ignore] fn run_watch() { - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "console.log('Hello world');") @@ -1508,13 +1476,13 @@ fn run_watch() { wait_for_process_finished("Process", &mut stderr_lines); // TODO(lucacasonato): remove this timeout. It seems to be needed on Linux. - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); // Change content of the file std::fs::write(&file_to_watch, "console.log('Hello world2');") .expect("error writing file"); // Events from the file watcher is "debounced", so we need to wait for the next execution to start - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("Hello world2")); @@ -1529,7 +1497,7 @@ fn run_watch() { "import { foo } from './another_file.js'; console.log(foo);", ) .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains('0')); wait_for_process_finished("Process", &mut stderr_lines); @@ -1537,7 +1505,7 @@ fn run_watch() { // Confirm that restarting occurs when a new file is updated std::fs::write(&another_file, "export const foo = 42;") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1545,7 +1513,7 @@ fn run_watch() { // Confirm that the watcher keeps on working even if the file is updated and has invalid syntax std::fs::write(&file_to_watch, "syntax error ^^") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stderr_lines.next().unwrap().contains("error:")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1556,7 +1524,7 @@ fn run_watch() { "import { foo } from './another_file.js'; console.log(foo);", ) .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1570,16 +1538,8 @@ fn run_watch() { /// Confirm that the watcher continues to work even if module resolution fails at the *first* attempt #[test] +#[ignore] fn run_watch_not_exit() { - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - let t = TempDir::new().expect("tempdir fail"); let file_to_watch = t.path().join("file_to_watch.js"); std::fs::write(&file_to_watch, "syntax error ^^") @@ -1604,14 +1564,14 @@ fn run_watch_not_exit() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("error:")); assert!(stderr_lines.next().unwrap().contains("Process failed!")); // Make sure the watcher actually restarts and works fine with the proper syntax std::fs::write(&file_to_watch, "console.log(42);") .expect("error writing file"); - std::thread::sleep(TIMEOUT); + std::thread::sleep(std::time::Duration::from_secs(1)); assert!(stderr_lines.next().unwrap().contains("Restarting")); assert!(stdout_lines.next().unwrap().contains("42")); wait_for_process_finished("Process", &mut stderr_lines); @@ -1697,6 +1657,7 @@ fn repl_test_pty_unpaired_braces() { } #[test] +#[ignore] fn run_watch_with_importmap_and_relative_paths() { fn create_relative_tmp_file( directory: &TempDir, @@ -1712,16 +1673,6 @@ fn run_watch_with_importmap_and_relative_paths() { assert!(relative_path.is_relative()); relative_path } - - const TIMEOUT: std::time::Duration = - // Increase timeout duration to address flakiness on CI as much as possible - // See https://github.com/denoland/deno/issues/8571 - std::time::Duration::from_secs(if cfg!(target_os = "macos") { - 5 - } else { - 1 - }); - let temp_directory = TempDir::new_in(util::root_path()).expect("tempdir fail"); let file_to_watch = create_relative_tmp_file( @@ -1756,8 +1707,6 @@ fn run_watch_with_importmap_and_relative_paths() { let mut stderr_lines = std::io::BufReader::new(stderr).lines().map(|r| r.unwrap()); - std::thread::sleep(TIMEOUT); - assert!(stderr_lines.next().unwrap().contains("Process finished")); assert!(stdout_lines.next().unwrap().contains("Hello world")); From 84ef9bd21fb48fb6b5fbc8dafc3de9f361bade3b Mon Sep 17 00:00:00 2001 From: Yosi Pramajaya Date: Sun, 13 Dec 2020 02:41:43 +0700 Subject: [PATCH 058/135] fix(cli/compile): error when the output path already exists (#8681) --- cli/standalone.rs | 19 +++++++ cli/tests/integration_tests.rs | 98 ++++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+) diff --git a/cli/standalone.rs b/cli/standalone.rs index 067fec36a13079..6559242bdb79fc 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -5,6 +5,7 @@ use crate::tokio_util; use crate::version; use crate::worker::MainWorker; use crate::worker::WorkerOptions; +use deno_core::error::bail; use deno_core::error::type_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; @@ -169,6 +170,24 @@ pub async fn create_standalone_binary( } else { output }; + + if output.exists() { + // If the output is a directory, throw error + if output.is_dir() { + bail!("Could not compile: {:?} is a directory.", &output); + } + + // Make sure we don't overwrite any file not created by Deno compiler. + // Check for magic trailer in last 16 bytes + let mut output_file = File::open(&output)?; + output_file.seek(SeekFrom::End(-16))?; + let mut trailer = [0; 16]; + output_file.read_exact(&mut trailer)?; + let (magic_trailer, _) = trailer.split_at(8); + if magic_trailer != MAGIC_TRAILER { + bail!("Could not compile: cannot overwrite {:?}.", &output); + } + } tokio::fs::write(&output, final_bin).await?; #[cfg(unix)] { diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index eeffb009701db0..cb6dfad6d93df2 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -4650,3 +4650,101 @@ fn standalone_no_module_load() { assert!(util::strip_ansi_codes(&stderr_str) .contains("Self-contained binaries don't support module loading")); } + +#[test] +fn compile_with_directory_exists_error() { + let dir = TempDir::new().expect("tempdir fail"); + let exe = if cfg!(windows) { + dir.path().join("args.exe") + } else { + dir.path().join("args") + }; + std::fs::create_dir(&exe).expect("cannot create directory"); + let output = util::deno_cmd() + .current_dir(util::root_path()) + .arg("compile") + .arg("--unstable") + .arg("./cli/tests/028_args.ts") + .arg("--output") + .arg(&exe) + .stderr(std::process::Stdio::piped()) + .spawn() + .unwrap() + .wait_with_output() + .unwrap(); + assert!(!output.status.success()); + let expected_stderr = + format!("Could not compile: {:?} is a directory.\n", &exe); + let stderr = String::from_utf8(output.stderr).unwrap(); + assert!(stderr.contains(&expected_stderr)); +} + +#[test] +fn compile_with_conflict_file_exists_error() { + let dir = TempDir::new().expect("tempdir fail"); + let exe = if cfg!(windows) { + dir.path().join("args.exe") + } else { + dir.path().join("args") + }; + std::fs::write(&exe, b"SHOULD NOT BE OVERWRITTEN") + .expect("cannot create file"); + let output = util::deno_cmd() + .current_dir(util::root_path()) + .arg("compile") + .arg("--unstable") + .arg("./cli/tests/028_args.ts") + .arg("--output") + .arg(&exe) + .stderr(std::process::Stdio::piped()) + .spawn() + .unwrap() + .wait_with_output() + .unwrap(); + assert!(!output.status.success()); + let expected_stderr = + format!("Could not compile: cannot overwrite {:?}.\n", &exe); + let stderr = String::from_utf8(output.stderr).unwrap(); + assert!(stderr.contains(&expected_stderr)); + assert!(std::fs::read(&exe) + .expect("cannot read file") + .eq(b"SHOULD NOT BE OVERWRITTEN")); +} + +#[test] +fn compile_and_overwrite_file() { + let dir = TempDir::new().expect("tempdir fail"); + let exe = if cfg!(windows) { + dir.path().join("args.exe") + } else { + dir.path().join("args") + }; + let output = util::deno_cmd() + .current_dir(util::root_path()) + .arg("compile") + .arg("--unstable") + .arg("./cli/tests/028_args.ts") + .arg("--output") + .arg(&exe) + .stderr(std::process::Stdio::piped()) + .spawn() + .unwrap() + .wait_with_output() + .unwrap(); + assert!(output.status.success()); + assert!(&exe.exists()); + + let recompile_output = util::deno_cmd() + .current_dir(util::root_path()) + .arg("compile") + .arg("--unstable") + .arg("./cli/tests/028_args.ts") + .arg("--output") + .arg(&exe) + .stderr(std::process::Stdio::piped()) + .spawn() + .unwrap() + .wait_with_output() + .unwrap(); + assert!(recompile_output.status.success()); +} From 2e74f164b6dcf0ecbf8dd38fba9fae550d784bd0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 13 Dec 2020 19:45:53 +0100 Subject: [PATCH 059/135] refactor: deno_runtime crate (#8640) This commit moves Deno JS runtime, ops, permissions and inspector implementation to new "deno_runtime" crate located in "runtime/" directory. Details in "runtime/README.md". Co-authored-by: Ryan Dahl --- Cargo.lock | 48 +++- Cargo.toml | 1 + cli/Cargo.toml | 11 +- cli/build.rs | 17 +- cli/errors.rs | 195 +--------------- cli/file_fetcher.rs | 4 +- cli/file_watcher.rs | 13 +- cli/http_util.rs | 20 +- cli/js.rs | 27 --- cli/main.rs | 53 +++-- cli/module_loader.rs | 2 +- cli/ops/mod.rs | 56 +---- cli/ops/runtime_compiler.rs | 8 +- cli/program_state.rs | 4 +- cli/signal.rs | 62 ------ cli/specifier_handler.rs | 2 +- cli/standalone.rs | 7 +- cli/tools/coverage.rs | 2 +- cli/tools/repl.rs | 4 +- cli/tools/upgrade.rs | 4 +- core/error.rs | 5 - runtime/Cargo.toml | 74 +++++++ runtime/README.md | 44 ++++ runtime/build.rs | 81 +++++++ runtime/colors.rs | 130 +++++++++++ runtime/errors.rs | 209 ++++++++++++++++++ runtime/examples/hello_runtime.js | 2 + runtime/examples/hello_runtime.rs | 55 +++++ runtime/fs_util.rs | 80 +++++++ runtime/http_util.rs | 46 ++++ {cli => runtime}/inspector.rs | 0 runtime/js.rs | 31 +++ runtime/lib.rs | 26 +++ {cli => runtime}/metrics.rs | 0 {cli => runtime}/ops/crypto.rs | 0 {cli => runtime}/ops/dispatch_minimal.rs | 0 {cli => runtime}/ops/fetch.rs | 9 +- {cli => runtime}/ops/fs.rs | 0 {cli => runtime}/ops/fs_events.rs | 0 {cli => runtime}/ops/io.rs | 0 runtime/ops/mod.rs | 89 ++++++++ {cli => runtime}/ops/net.rs | 0 {cli => runtime}/ops/net_unix.rs | 0 {cli => runtime}/ops/os.rs | 0 {cli => runtime}/ops/permissions.rs | 0 {cli => runtime}/ops/plugin.rs | 0 {cli => runtime}/ops/process.rs | 56 ++++- {cli => runtime}/ops/runtime.rs | 0 {cli => runtime}/ops/signal.rs | 0 {cli => runtime}/ops/timers.rs | 0 {cli => runtime}/ops/tls.rs | 0 {cli => runtime}/ops/tty.rs | 14 +- {cli => runtime}/ops/web_worker.rs | 0 {cli => runtime}/ops/websocket.rs | 0 {cli => runtime}/ops/worker_host.rs | 0 {cli => runtime}/permissions.rs | 43 ++-- {cli => runtime}/resolve_addr.rs | 0 {cli => runtime}/rt/00_bootstrap_namespace.js | 0 {cli => runtime}/rt/01_build.js | 0 {cli => runtime}/rt/01_colors.js | 0 {cli => runtime}/rt/01_errors.js | 0 {cli => runtime}/rt/01_internals.js | 0 {cli => runtime}/rt/01_version.js | 0 {cli => runtime}/rt/01_web_util.js | 0 {cli => runtime}/rt/02_console.js | 0 {cli => runtime}/rt/06_util.js | 0 {cli => runtime}/rt/10_dispatch_minimal.js | 0 {cli => runtime}/rt/11_timers.js | 0 {cli => runtime}/rt/11_workers.js | 0 {cli => runtime}/rt/12_io.js | 0 {cli => runtime}/rt/13_buffer.js | 0 {cli => runtime}/rt/27_websocket.js | 0 {cli => runtime}/rt/30_files.js | 0 {cli => runtime}/rt/30_fs.js | 0 {cli => runtime}/rt/30_metrics.js | 0 {cli => runtime}/rt/30_net.js | 0 {cli => runtime}/rt/30_os.js | 0 {cli => runtime}/rt/40_compiler_api.js | 0 {cli => runtime}/rt/40_diagnostics.js | 0 {cli => runtime}/rt/40_error_stack.js | 0 {cli => runtime}/rt/40_fs_events.js | 0 {cli => runtime}/rt/40_net_unstable.js | 0 {cli => runtime}/rt/40_performance.js | 0 {cli => runtime}/rt/40_permissions.js | 0 {cli => runtime}/rt/40_plugins.js | 0 {cli => runtime}/rt/40_process.js | 0 {cli => runtime}/rt/40_read_file.js | 0 {cli => runtime}/rt/40_signals.js | 0 {cli => runtime}/rt/40_testing.js | 0 {cli => runtime}/rt/40_tls.js | 0 {cli => runtime}/rt/40_tty.js | 0 {cli => runtime}/rt/40_write_file.js | 0 {cli => runtime}/rt/41_prompt.js | 0 {cli => runtime}/rt/90_deno_ns.js | 0 {cli => runtime}/rt/99_main.js | 0 {cli => runtime}/rt/README.md | 0 runtime/tokio_util.rs | 25 +++ {cli => runtime}/web_worker.rs | 15 +- {cli => runtime}/worker.rs | 11 +- 99 files changed, 1127 insertions(+), 458 deletions(-) delete mode 100644 cli/signal.rs create mode 100644 runtime/Cargo.toml create mode 100644 runtime/README.md create mode 100644 runtime/build.rs create mode 100644 runtime/colors.rs create mode 100644 runtime/errors.rs create mode 100644 runtime/examples/hello_runtime.js create mode 100644 runtime/examples/hello_runtime.rs create mode 100644 runtime/fs_util.rs create mode 100644 runtime/http_util.rs rename {cli => runtime}/inspector.rs (100%) create mode 100644 runtime/js.rs create mode 100644 runtime/lib.rs rename {cli => runtime}/metrics.rs (100%) rename {cli => runtime}/ops/crypto.rs (100%) rename {cli => runtime}/ops/dispatch_minimal.rs (100%) rename {cli => runtime}/ops/fetch.rs (76%) rename {cli => runtime}/ops/fs.rs (100%) rename {cli => runtime}/ops/fs_events.rs (100%) rename {cli => runtime}/ops/io.rs (100%) create mode 100644 runtime/ops/mod.rs rename {cli => runtime}/ops/net.rs (100%) rename {cli => runtime}/ops/net_unix.rs (100%) rename {cli => runtime}/ops/os.rs (100%) rename {cli => runtime}/ops/permissions.rs (100%) rename {cli => runtime}/ops/plugin.rs (100%) rename {cli => runtime}/ops/process.rs (80%) rename {cli => runtime}/ops/runtime.rs (100%) rename {cli => runtime}/ops/signal.rs (100%) rename {cli => runtime}/ops/timers.rs (100%) rename {cli => runtime}/ops/tls.rs (100%) rename {cli => runtime}/ops/tty.rs (96%) rename {cli => runtime}/ops/web_worker.rs (100%) rename {cli => runtime}/ops/websocket.rs (100%) rename {cli => runtime}/ops/worker_host.rs (100%) rename {cli => runtime}/permissions.rs (96%) rename {cli => runtime}/resolve_addr.rs (100%) rename {cli => runtime}/rt/00_bootstrap_namespace.js (100%) rename {cli => runtime}/rt/01_build.js (100%) rename {cli => runtime}/rt/01_colors.js (100%) rename {cli => runtime}/rt/01_errors.js (100%) rename {cli => runtime}/rt/01_internals.js (100%) rename {cli => runtime}/rt/01_version.js (100%) rename {cli => runtime}/rt/01_web_util.js (100%) rename {cli => runtime}/rt/02_console.js (100%) rename {cli => runtime}/rt/06_util.js (100%) rename {cli => runtime}/rt/10_dispatch_minimal.js (100%) rename {cli => runtime}/rt/11_timers.js (100%) rename {cli => runtime}/rt/11_workers.js (100%) rename {cli => runtime}/rt/12_io.js (100%) rename {cli => runtime}/rt/13_buffer.js (100%) rename {cli => runtime}/rt/27_websocket.js (100%) rename {cli => runtime}/rt/30_files.js (100%) rename {cli => runtime}/rt/30_fs.js (100%) rename {cli => runtime}/rt/30_metrics.js (100%) rename {cli => runtime}/rt/30_net.js (100%) rename {cli => runtime}/rt/30_os.js (100%) rename {cli => runtime}/rt/40_compiler_api.js (100%) rename {cli => runtime}/rt/40_diagnostics.js (100%) rename {cli => runtime}/rt/40_error_stack.js (100%) rename {cli => runtime}/rt/40_fs_events.js (100%) rename {cli => runtime}/rt/40_net_unstable.js (100%) rename {cli => runtime}/rt/40_performance.js (100%) rename {cli => runtime}/rt/40_permissions.js (100%) rename {cli => runtime}/rt/40_plugins.js (100%) rename {cli => runtime}/rt/40_process.js (100%) rename {cli => runtime}/rt/40_read_file.js (100%) rename {cli => runtime}/rt/40_signals.js (100%) rename {cli => runtime}/rt/40_testing.js (100%) rename {cli => runtime}/rt/40_tls.js (100%) rename {cli => runtime}/rt/40_tty.js (100%) rename {cli => runtime}/rt/40_write_file.js (100%) rename {cli => runtime}/rt/41_prompt.js (100%) rename {cli => runtime}/rt/90_deno_ns.js (100%) rename {cli => runtime}/rt/99_main.js (100%) rename {cli => runtime}/rt/README.md (100%) create mode 100644 runtime/tokio_util.rs rename {cli => runtime}/web_worker.rs (98%) rename {cli => runtime}/worker.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index 852c4828920473..1ed126df572d8c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -460,13 +460,12 @@ dependencies = [ "clap", "crossbeam-channel 0.5.0", "deno_core", - "deno_crypto", "deno_doc", "deno_fetch", "deno_lint", + "deno_runtime", "deno_web", "dissimilar", - "dlopen", "dprint-plugin-typescript", "encoding_rs", "env_logger", @@ -496,7 +495,6 @@ dependencies = [ "swc_bundler", "swc_common", "swc_ecmascript", - "sys-info", "tempfile", "termcolor", "test_util", @@ -506,8 +504,6 @@ dependencies = [ "uuid", "walkdir", "warp", - "webpki", - "webpki-roots", "winapi 0.3.9", "winres", ] @@ -582,6 +578,48 @@ dependencies = [ "swc_ecmascript", ] +[[package]] +name = "deno_runtime" +version = "0.1.0" +dependencies = [ + "atty", + "deno_core", + "deno_crypto", + "deno_fetch", + "deno_web", + "dlopen", + "encoding_rs", + "env_logger", + "filetime", + "fwdansi", + "http", + "indexmap", + "lazy_static", + "libc", + "log", + "nix", + "notify", + "percent-encoding", + "regex", + "ring", + "rustyline", + "rustyline-derive", + "serde", + "shell-escape", + "sys-info", + "termcolor", + "test_util", + "tokio 0.2.22", + "tokio-rustls", + "tokio-tungstenite", + "uuid", + "warp", + "webpki", + "webpki-roots", + "winapi 0.3.9", + "winres", +] + [[package]] name = "deno_web" version = "0.21.0" diff --git a/Cargo.toml b/Cargo.toml index 3281be1af6a658..b7b8adae579258 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,6 +4,7 @@ members = [ "cli", "core", + "runtime", "test_plugin", "test_util", "op_crates/fetch", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7bc6221ddacc7a..7ad0cc5ca10fa5 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,10 +20,9 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } deno_core = { path = "../core", version = "0.70.0" } -deno_web = { path = "../op_crates/web", version = "0.21.0" } deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } +deno_web = { path = "../op_crates/web", version = "0.21.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -33,11 +32,9 @@ winapi = "0.3.9" [dependencies] deno_core = { path = "../core", version = "0.70.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } deno_doc = "0.1.18" -deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } deno_lint = "0.2.13" -deno_web = { path = "../op_crates/web", version = "0.21.0" } +deno_runtime = { path = "../runtime", version = "0.1.0" } atty = "0.2.14" base64 = "0.12.3" @@ -46,7 +43,6 @@ byteorder = "1.3.4" clap = "2.33.3" crossbeam-channel = "0.5.0" dissimilar = "1.0.2" -dlopen = "0.1.8" dprint-plugin-typescript = "0.35.1" encoding_rs = "0.8.24" env_logger = "0.7.1" @@ -72,7 +68,6 @@ sourcemap = "6.0.1" swc_bundler = "0.17.6" swc_common = { version = "0.10.7", features = ["sourcemap"] } swc_ecmascript = { version = "0.15.0", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } -sys-info = "0.7.0" tempfile = "3.1.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } @@ -82,8 +77,6 @@ tokio-tungstenite = "0.11.0" uuid = { version = "0.8.1", features = ["v4"] } walkdir = "2.3.1" warp = { version = "0.2.5", features = ["tls"] } -webpki = "0.21.3" -webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. [target.'cfg(windows)'.dependencies] winapi = { version = "0.3.9", features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } diff --git a/cli/build.rs b/cli/build.rs index b0088a9d83ad6c..4be71bb9e182d0 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -13,14 +13,13 @@ use std::env; use std::path::Path; use std::path::PathBuf; +// TODO(bartlomieju): this module contains a lot of duplicated +// logic with `runtime/build.rs`, factor out to `deno_core`. fn create_snapshot( mut js_runtime: JsRuntime, snapshot_path: &Path, files: Vec, ) { - deno_web::init(&mut js_runtime); - deno_fetch::init(&mut js_runtime); - deno_crypto::init(&mut js_runtime); // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the // workspace root. let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); @@ -43,14 +42,6 @@ fn create_snapshot( println!("Snapshot written to: {} ", snapshot_path.display()); } -fn create_runtime_snapshot(snapshot_path: &Path, files: Vec) { - let js_runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); - create_snapshot(js_runtime, snapshot_path, files); -} - #[derive(Debug, Deserialize)] struct LoadArgs { /// The fully qualified specifier that should be loaded. @@ -265,12 +256,8 @@ fn main() { let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); // Main snapshot - let runtime_snapshot_path = o.join("CLI_SNAPSHOT.bin"); let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin"); - let js_files = get_js_files("rt"); - create_runtime_snapshot(&runtime_snapshot_path, js_files); - let js_files = get_js_files("tsc"); create_compiler_snapshot(&compiler_snapshot_path, js_files, &c); diff --git a/cli/errors.rs b/cli/errors.rs index 869cdee2c77239..29fd428c86abd9 100644 --- a/cli/errors.rs +++ b/cli/errors.rs @@ -12,218 +12,25 @@ use crate::ast::DiagnosticBuffer; use crate::import_map::ImportMapError; use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::url; -use deno_core::ModuleResolutionError; -use deno_fetch::reqwest; -use rustyline::error::ReadlineError; -use std::env; -use std::error::Error; -use std::io; - -fn get_dlopen_error_class(error: &dlopen::Error) -> &'static str { - use dlopen::Error::*; - match error { - NullCharacter(_) => "InvalidData", - OpeningLibraryError(ref e) => get_io_error_class(e), - SymbolGettingError(ref e) => get_io_error_class(e), - AddrNotMatchingDll(ref e) => get_io_error_class(e), - NullSymbol => "NotFound", - } -} - -fn get_env_var_error_class(error: &env::VarError) -> &'static str { - use env::VarError::*; - match error { - NotPresent => "NotFound", - NotUnicode(..) => "InvalidData", - } -} fn get_import_map_error_class(_: &ImportMapError) -> &'static str { "URIError" } -fn get_io_error_class(error: &io::Error) -> &'static str { - use io::ErrorKind::*; - match error.kind() { - NotFound => "NotFound", - PermissionDenied => "PermissionDenied", - ConnectionRefused => "ConnectionRefused", - ConnectionReset => "ConnectionReset", - ConnectionAborted => "ConnectionAborted", - NotConnected => "NotConnected", - AddrInUse => "AddrInUse", - AddrNotAvailable => "AddrNotAvailable", - BrokenPipe => "BrokenPipe", - AlreadyExists => "AlreadyExists", - InvalidInput => "TypeError", - InvalidData => "InvalidData", - TimedOut => "TimedOut", - Interrupted => "Interrupted", - WriteZero => "WriteZero", - UnexpectedEof => "UnexpectedEof", - Other => "Error", - WouldBlock => unreachable!(), - // Non-exhaustive enum - might add new variants - // in the future - _ => unreachable!(), - } -} - -fn get_module_resolution_error_class( - _: &ModuleResolutionError, -) -> &'static str { - "URIError" -} - -fn get_notify_error_class(error: ¬ify::Error) -> &'static str { - use notify::ErrorKind::*; - match error.kind { - Generic(_) => "Error", - Io(ref e) => get_io_error_class(e), - PathNotFound => "NotFound", - WatchNotFound => "NotFound", - InvalidConfig(_) => "InvalidData", - } -} - -fn get_readline_error_class(error: &ReadlineError) -> &'static str { - use ReadlineError::*; - match error { - Io(err) => get_io_error_class(err), - Eof => "UnexpectedEof", - Interrupted => "Interrupted", - #[cfg(unix)] - Errno(err) => get_nix_error_class(err), - _ => unimplemented!(), - } -} - -fn get_regex_error_class(error: ®ex::Error) -> &'static str { - use regex::Error::*; - match error { - Syntax(_) => "SyntaxError", - CompiledTooBig(_) => "RangeError", - _ => "Error", - } -} - -fn get_request_error_class(error: &reqwest::Error) -> &'static str { - error - .source() - .and_then(|inner_err| { - (inner_err - .downcast_ref::() - .map(get_io_error_class)) - .or_else(|| { - inner_err - .downcast_ref::() - .map(get_serde_json_error_class) - }) - .or_else(|| { - inner_err - .downcast_ref::() - .map(get_url_parse_error_class) - }) - }) - .unwrap_or("Http") -} - -fn get_serde_json_error_class( - error: &serde_json::error::Error, -) -> &'static str { - use deno_core::serde_json::error::*; - match error.classify() { - Category::Io => error - .source() - .and_then(|e| e.downcast_ref::()) - .map(get_io_error_class) - .unwrap(), - Category::Syntax => "SyntaxError", - Category::Data => "InvalidData", - Category::Eof => "UnexpectedEof", - } -} - fn get_diagnostic_class(_: &DiagnosticBuffer) -> &'static str { "SyntaxError" } -fn get_url_parse_error_class(_error: &url::ParseError) -> &'static str { - "URIError" -} - -#[cfg(unix)] -fn get_nix_error_class(error: &nix::Error) -> &'static str { - use nix::errno::Errno::*; - match error { - nix::Error::Sys(ECHILD) => "NotFound", - nix::Error::Sys(EINVAL) => "TypeError", - nix::Error::Sys(ENOENT) => "NotFound", - nix::Error::Sys(ENOTTY) => "BadResource", - nix::Error::Sys(EPERM) => "PermissionDenied", - nix::Error::Sys(ESRCH) => "NotFound", - nix::Error::Sys(UnknownErrno) => "Error", - nix::Error::Sys(_) => "Error", - nix::Error::InvalidPath => "TypeError", - nix::Error::InvalidUtf8 => "InvalidData", - nix::Error::UnsupportedOperation => unreachable!(), - } -} - pub(crate) fn get_error_class_name(e: &AnyError) -> &'static str { - deno_core::error::get_custom_error_class(e) - .or_else(|| { - e.downcast_ref::() - .map(get_dlopen_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_env_var_error_class) - }) + deno_runtime::errors::get_error_class_name(e) .or_else(|| { e.downcast_ref::() .map(get_import_map_error_class) }) - .or_else(|| e.downcast_ref::().map(get_io_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_module_resolution_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_notify_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_readline_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_request_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_regex_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_serde_json_error_class) - }) .or_else(|| { e.downcast_ref::() .map(get_diagnostic_class) }) - .or_else(|| { - e.downcast_ref::() - .map(get_url_parse_error_class) - }) - .or_else(|| { - #[cfg(unix)] - let maybe_get_nix_error_class = - || e.downcast_ref::().map(get_nix_error_class); - #[cfg(not(unix))] - let maybe_get_nix_error_class = || Option::<&'static str>::None; - (maybe_get_nix_error_class)() - }) .unwrap_or_else(|| { panic!("Error '{}' contains boxed error of unknown type", e); }) diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index 7b730e455ff714..86c0ac9663d0ee 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -7,8 +7,8 @@ use crate::http_util::fetch_once; use crate::http_util::get_user_agent; use crate::http_util::FetchOnceResult; use crate::media_type::MediaType; -use crate::permissions::Permissions; use crate::text_encoding; +use deno_runtime::permissions::Permissions; use deno_core::error::custom_error; use deno_core::error::generic_error; @@ -17,7 +17,7 @@ use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::future::FutureExt; use deno_core::ModuleSpecifier; -use deno_fetch::reqwest; +use deno_runtime::deno_fetch::reqwest; use std::collections::HashMap; use std::fs; use std::future::Future; diff --git a/cli/file_watcher.rs b/cli/file_watcher.rs index 4aa93c581f547d..ef7aae60300bdf 100644 --- a/cli/file_watcher.rs +++ b/cli/file_watcher.rs @@ -240,16 +240,17 @@ fn new_watcher( ) -> Result { let event_detected = Arc::clone(&debounce.event_detected); - let mut watcher: RecommendedWatcher = Watcher::new_immediate( - move |res: Result| { + let mut watcher: RecommendedWatcher = + Watcher::new_immediate(move |res: Result| { if let Ok(event) = res { - if matches!(event.kind, EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_)) - { + if matches!( + event.kind, + EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) + ) { event_detected.store(true, Ordering::Relaxed); } } - }, - )?; + })?; watcher.configure(Config::PreciseEvents(true)).unwrap(); diff --git a/cli/http_util.rs b/cli/http_util.rs index 4bd59e32b08684..97e3453ec4a3a4 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -6,16 +6,16 @@ use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures; use deno_core::url::Url; -use deno_fetch::reqwest; -use deno_fetch::reqwest::header::HeaderMap; -use deno_fetch::reqwest::header::HeaderValue; -use deno_fetch::reqwest::header::IF_NONE_MATCH; -use deno_fetch::reqwest::header::LOCATION; -use deno_fetch::reqwest::header::USER_AGENT; -use deno_fetch::reqwest::redirect::Policy; -use deno_fetch::reqwest::Client; -use deno_fetch::reqwest::Response; -use deno_fetch::reqwest::StatusCode; +use deno_runtime::deno_fetch::reqwest; +use deno_runtime::deno_fetch::reqwest::header::HeaderMap; +use deno_runtime::deno_fetch::reqwest::header::HeaderValue; +use deno_runtime::deno_fetch::reqwest::header::IF_NONE_MATCH; +use deno_runtime::deno_fetch::reqwest::header::LOCATION; +use deno_runtime::deno_fetch::reqwest::header::USER_AGENT; +use deno_runtime::deno_fetch::reqwest::redirect::Policy; +use deno_runtime::deno_fetch::reqwest::Client; +use deno_runtime::deno_fetch::reqwest::Response; +use deno_runtime::deno_fetch::reqwest::StatusCode; use std::cmp::min; use std::collections::HashMap; use std::fs::File; diff --git a/cli/js.rs b/cli/js.rs index 3d2a17f36d7842..7cfa961a167e03 100644 --- a/cli/js.rs +++ b/cli/js.rs @@ -4,8 +4,6 @@ use deno_core::Snapshot; pub const TS_VERSION: &str = env!("TS_VERSION"); -pub static CLI_SNAPSHOT: &[u8] = - include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin")); pub static COMPILER_SNAPSHOT: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/COMPILER_SNAPSHOT.bin")); pub static DENO_NS_LIB: &str = include_str!("dts/lib.deno.ns.d.ts"); @@ -16,37 +14,12 @@ pub static SHARED_GLOBALS_LIB: &str = pub static WINDOW_LIB: &str = include_str!("dts/lib.deno.window.d.ts"); pub static UNSTABLE_NS_LIB: &str = include_str!("dts/lib.deno.unstable.d.ts"); -pub fn deno_isolate_init() -> Snapshot { - debug!("Deno isolate init with snapshots."); - let data = CLI_SNAPSHOT; - Snapshot::Static(data) -} - pub fn compiler_isolate_init() -> Snapshot { debug!("Deno compiler isolate init with snapshots."); let data = COMPILER_SNAPSHOT; Snapshot::Static(data) } -#[test] -fn cli_snapshot() { - let mut js_runtime = deno_core::JsRuntime::new(deno_core::RuntimeOptions { - startup_snapshot: Some(deno_isolate_init()), - ..Default::default() - }); - js_runtime - .execute( - "", - r#" - if (!(bootstrap.mainRuntime && bootstrap.workerRuntime)) { - throw Error("bad"); - } - console.log("we have console.log!!!"); - "#, - ) - .unwrap(); -} - #[test] fn compiler_snapshot() { let mut js_runtime = deno_core::JsRuntime::new(deno_core::RuntimeOptions { diff --git a/cli/main.rs b/cli/main.rs index 38deec5bb8a02a..b6b6b295b8b245 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -25,19 +25,14 @@ mod http_cache; mod http_util; mod import_map; mod info; -mod inspector; mod js; mod lockfile; mod lsp; mod media_type; -mod metrics; mod module_graph; mod module_loader; mod ops; -mod permissions; mod program_state; -mod resolve_addr; -mod signal; mod source_maps; mod specifier_handler; mod standalone; @@ -47,8 +42,6 @@ mod tools; mod tsc; mod tsc_config; mod version; -mod web_worker; -mod worker; use crate::file_fetcher::File; use crate::file_fetcher::FileFetcher; @@ -59,18 +52,12 @@ use crate::fmt_errors::PrettyJsError; use crate::import_map::ImportMap; use crate::media_type::MediaType; use crate::module_loader::CliModuleLoader; -use crate::ops::worker_host::CreateWebWorkerCb; -use crate::permissions::Permissions; use crate::program_state::exit_unstable; use crate::program_state::ProgramState; use crate::source_maps::apply_source_map; use crate::specifier_handler::FetchHandler; use crate::standalone::create_standalone_binary; use crate::tools::installer::infer_name_from_url; -use crate::web_worker::WebWorker; -use crate::web_worker::WebWorkerOptions; -use crate::worker::MainWorker; -use crate::worker::WorkerOptions; use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::future::FutureExt; @@ -81,6 +68,13 @@ use deno_core::v8_set_flags; use deno_core::ModuleSpecifier; use deno_doc as doc; use deno_doc::parser::DocFileLoader; +use deno_runtime::ops::worker_host::CreateWebWorkerCb; +use deno_runtime::permissions::Permissions; +use deno_runtime::permissions::PermissionsOptions; +use deno_runtime::web_worker::WebWorker; +use deno_runtime::web_worker::WebWorkerOptions; +use deno_runtime::worker::MainWorker; +use deno_runtime::worker::WorkerOptions; use log::Level; use log::LevelFilter; use std::cell::RefCell; @@ -93,6 +87,23 @@ use std::pin::Pin; use std::rc::Rc; use std::sync::Arc; +impl From for PermissionsOptions { + fn from(flags: Flags) -> Self { + Self { + allow_env: flags.allow_env, + allow_hrtime: flags.allow_hrtime, + allow_net: flags.allow_net, + allow_plugin: flags.allow_plugin, + allow_read: flags.allow_read, + allow_run: flags.allow_run, + allow_write: flags.allow_write, + net_allowlist: flags.net_allowlist, + read_allowlist: flags.read_allowlist, + write_allowlist: flags.write_allowlist, + } + } +} + fn create_web_worker_callback( program_state: Arc, ) -> Arc { @@ -132,6 +143,7 @@ fn create_web_worker_callback( runtime_version: version::deno(), ts_version: version::TYPESCRIPT.to_string(), no_color: !colors::use_color(), + get_error_class_fn: Some(&crate::errors::get_error_class_name), }; let mut worker = WebWorker::from_options( @@ -207,6 +219,7 @@ pub fn create_main_worker( runtime_version: version::deno(), ts_version: version::TYPESCRIPT.to_string(), no_color: !colors::use_color(), + get_error_class_fn: Some(&crate::errors::get_error_class_name), }; let mut worker = MainWorker::from_options(main_module, permissions, &options); @@ -392,7 +405,7 @@ async fn install_command( let mut preload_flags = flags.clone(); preload_flags.inspect = None; preload_flags.inspect_brk = None; - let permissions = Permissions::from_flags(&preload_flags); + let permissions = Permissions::from_options(&preload_flags.clone().into()); let program_state = ProgramState::new(preload_flags)?; let main_module = ModuleSpecifier::resolve_url_or_path(&module_url)?; let mut worker = @@ -461,7 +474,7 @@ async fn eval_command( // Force TypeScript compile. let main_module = ModuleSpecifier::resolve_url_or_path("./$deno$eval.ts").unwrap(); - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); let program_state = ProgramState::new(flags)?; let mut worker = create_main_worker(&program_state, main_module.clone(), permissions); @@ -804,7 +817,7 @@ async fn format_command( async fn run_repl(flags: Flags) -> Result<(), AnyError> { let main_module = ModuleSpecifier::resolve_url_or_path("./$deno$repl.ts").unwrap(); - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); let program_state = ProgramState::new(flags)?; let mut worker = create_main_worker(&program_state, main_module.clone(), permissions); @@ -815,7 +828,7 @@ async fn run_repl(flags: Flags) -> Result<(), AnyError> { async fn run_from_stdin(flags: Flags) -> Result<(), AnyError> { let program_state = ProgramState::new(flags.clone())?; - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); let main_module = ModuleSpecifier::resolve_url_or_path("./$deno$stdin.ts").unwrap(); let mut worker = create_main_worker( @@ -896,7 +909,7 @@ async fn run_with_watch(flags: Flags, script: String) -> Result<(), AnyError> { let operation = |main_module: ModuleSpecifier| { let flags = flags.clone(); - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); async move { let main_module = main_module.clone(); let program_state = ProgramState::new(flags)?; @@ -932,7 +945,7 @@ async fn run_command(flags: Flags, script: String) -> Result<(), AnyError> { let main_module = ModuleSpecifier::resolve_url_or_path(&script)?; let program_state = ProgramState::new(flags.clone())?; - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); let mut worker = create_main_worker(&program_state, main_module.clone(), permissions); debug!("main_module {}", main_module); @@ -953,7 +966,7 @@ async fn test_command( filter: Option, ) -> Result<(), AnyError> { let program_state = ProgramState::new(flags.clone())?; - let permissions = Permissions::from_flags(&flags); + let permissions = Permissions::from_options(&flags.clone().into()); let cwd = std::env::current_dir().expect("No current directory"); let include = include.unwrap_or_else(|| vec![".".to_string()]); let test_modules = diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 9dda2c24a5ab0b..da75b8510a5802 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -2,7 +2,6 @@ use crate::import_map::ImportMap; use crate::module_graph::TypeLib; -use crate::permissions::Permissions; use crate::program_state::ProgramState; use deno_core::error::AnyError; use deno_core::futures::future::FutureExt; @@ -11,6 +10,7 @@ use deno_core::ModuleLoadId; use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_runtime::permissions::Permissions; use std::cell::RefCell; use std::pin::Pin; use std::rc::Rc; diff --git a/cli/ops/mod.rs b/cli/ops/mod.rs index 56c0f1ad594986..24eca3e77054e9 100644 --- a/cli/ops/mod.rs +++ b/cli/ops/mod.rs @@ -1,32 +1,8 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -mod dispatch_minimal; -pub use dispatch_minimal::MinimalOp; - -pub mod crypto; pub mod errors; -pub mod fetch; -pub mod fs; -pub mod fs_events; -pub mod io; -pub mod net; -#[cfg(unix)] -mod net_unix; -pub mod os; -pub mod permissions; -pub mod plugin; -pub mod process; -pub mod runtime; pub mod runtime_compiler; -pub mod signal; -pub mod timers; -pub mod tls; -pub mod tty; -pub mod web_worker; -pub mod websocket; -pub mod worker_host; -use crate::metrics::metrics_op; use deno_core::error::AnyError; use deno_core::json_op_async; use deno_core::json_op_sync; @@ -35,6 +11,7 @@ use deno_core::BufVec; use deno_core::JsRuntime; use deno_core::OpState; use deno_core::ZeroCopyBuf; +use deno_runtime::metrics::metrics_op; use std::cell::RefCell; use std::future::Future; use std::rc::Rc; @@ -54,34 +31,3 @@ where { rt.register_op(name, metrics_op(json_op_sync(op_fn))); } - -pub struct UnstableChecker { - pub unstable: bool, -} - -impl UnstableChecker { - /// Quits the process if the --unstable flag was not provided. - /// - /// This is intentionally a non-recoverable check so that people cannot probe - /// for unstable APIs from stable programs. - // NOTE(bartlomieju): keep in sync with `cli/program_state.rs` - pub fn check_unstable(&self, api_name: &str) { - if !self.unstable { - eprintln!( - "Unstable API '{}'. The --unstable flag must be provided.", - api_name - ); - std::process::exit(70); - } - } -} -/// Helper for checking unstable features. Used for sync ops. -pub fn check_unstable(state: &OpState, api_name: &str) { - state.borrow::().check_unstable(api_name) -} - -/// Helper for checking unstable features. Used for async ops. -pub fn check_unstable2(state: &Rc>, api_name: &str) { - let state = state.borrow(); - state.borrow::().check_unstable(api_name) -} diff --git a/cli/ops/runtime_compiler.rs b/cli/ops/runtime_compiler.rs index 03ba88c76876b3..ec9806e60c7175 100644 --- a/cli/ops/runtime_compiler.rs +++ b/cli/ops/runtime_compiler.rs @@ -6,12 +6,12 @@ use crate::media_type::MediaType; use crate::module_graph::BundleType; use crate::module_graph::EmitOptions; use crate::module_graph::GraphBuilder; -use crate::permissions::Permissions; use crate::program_state::ProgramState; use crate::specifier_handler::FetchHandler; use crate::specifier_handler::MemoryHandler; use crate::specifier_handler::SpecifierHandler; use crate::tsc_config; +use deno_runtime::permissions::Permissions; use std::sync::Arc; use deno_core::error::AnyError; @@ -49,9 +49,9 @@ async fn op_compile( ) -> Result { let args: CompileArgs = serde_json::from_value(args)?; if args.bundle { - super::check_unstable2(&state, "Deno.bundle"); + deno_runtime::ops::check_unstable2(&state, "Deno.bundle"); } else { - super::check_unstable2(&state, "Deno.compile"); + deno_runtime::ops::check_unstable2(&state, "Deno.compile"); } let program_state = state.borrow().borrow::>().clone(); let runtime_permissions = { @@ -113,7 +113,7 @@ async fn op_transpile( args: Value, _data: BufVec, ) -> Result { - super::check_unstable2(&state, "Deno.transpileOnly"); + deno_runtime::ops::check_unstable2(&state, "Deno.transpileOnly"); let args: TranspileArgs = serde_json::from_value(args)?; let mut compiler_options = tsc_config::TsConfig::new(json!({ diff --git a/cli/program_state.rs b/cli/program_state.rs index 7b86b7de566b4b..223d043bae17ee 100644 --- a/cli/program_state.rs +++ b/cli/program_state.rs @@ -7,16 +7,16 @@ use crate::flags; use crate::http_cache; use crate::http_util; use crate::import_map::ImportMap; -use crate::inspector::InspectorServer; use crate::lockfile::Lockfile; use crate::media_type::MediaType; use crate::module_graph::CheckOptions; use crate::module_graph::GraphBuilder; use crate::module_graph::TranspileOptions; use crate::module_graph::TypeLib; -use crate::permissions::Permissions; use crate::source_maps::SourceMapGetter; use crate::specifier_handler::FetchHandler; +use deno_runtime::inspector::InspectorServer; +use deno_runtime::permissions::Permissions; use deno_core::error::generic_error; use deno_core::error::AnyError; diff --git a/cli/signal.rs b/cli/signal.rs deleted file mode 100644 index b597714f48898c..00000000000000 --- a/cli/signal.rs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -use deno_core::error::AnyError; - -#[cfg(not(unix))] -use deno_core::error::last_os_error; -#[cfg(not(unix))] -use deno_core::error::type_error; - -#[cfg(not(unix))] -const SIGINT: i32 = 2; -#[cfg(not(unix))] -const SIGKILL: i32 = 9; -#[cfg(not(unix))] -const SIGTERM: i32 = 15; - -#[cfg(not(unix))] -use winapi::{ - shared::minwindef::DWORD, - um::{ - handleapi::CloseHandle, - processthreadsapi::{OpenProcess, TerminateProcess}, - winnt::PROCESS_TERMINATE, - }, -}; - -#[cfg(unix)] -pub fn kill(pid: i32, signo: i32) -> Result<(), AnyError> { - use nix::sys::signal::{kill as unix_kill, Signal}; - use nix::unistd::Pid; - use std::convert::TryFrom; - let sig = Signal::try_from(signo)?; - unix_kill(Pid::from_raw(pid), Option::Some(sig)).map_err(AnyError::from) -} - -#[cfg(not(unix))] -pub fn kill(pid: i32, signal: i32) -> Result<(), AnyError> { - match signal { - SIGINT | SIGKILL | SIGTERM => { - if pid <= 0 { - return Err(type_error("unsupported pid")); - } - unsafe { - let handle = OpenProcess(PROCESS_TERMINATE, 0, pid as DWORD); - if handle.is_null() { - return Err(last_os_error()); - } - if TerminateProcess(handle, 1) == 0 { - CloseHandle(handle); - return Err(last_os_error()); - } - if CloseHandle(handle) == 0 { - return Err(last_os_error()); - } - } - } - _ => { - return Err(type_error("unsupported signal")); - } - } - Ok(()) -} diff --git a/cli/specifier_handler.rs b/cli/specifier_handler.rs index 08331674016963..02a1196d3a8e79 100644 --- a/cli/specifier_handler.rs +++ b/cli/specifier_handler.rs @@ -5,8 +5,8 @@ use crate::deno_dir::DenoDir; use crate::disk_cache::DiskCache; use crate::file_fetcher::FileFetcher; use crate::media_type::MediaType; -use crate::permissions::Permissions; use crate::program_state::ProgramState; +use deno_runtime::permissions::Permissions; use deno_core::error::custom_error; use deno_core::error::AnyError; diff --git a/cli/standalone.rs b/cli/standalone.rs index 6559242bdb79fc..fea42fc969b1c0 100644 --- a/cli/standalone.rs +++ b/cli/standalone.rs @@ -1,10 +1,7 @@ use crate::colors; use crate::flags::Flags; -use crate::permissions::Permissions; use crate::tokio_util; use crate::version; -use crate::worker::MainWorker; -use crate::worker::WorkerOptions; use deno_core::error::bail; use deno_core::error::type_error; use deno_core::error::AnyError; @@ -12,6 +9,9 @@ use deno_core::futures::FutureExt; use deno_core::ModuleLoader; use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_runtime::permissions::Permissions; +use deno_runtime::worker::MainWorker; +use deno_runtime::worker::WorkerOptions; use std::cell::RefCell; use std::convert::TryInto; use std::env::current_exe; @@ -135,6 +135,7 @@ async fn run(source_code: String, args: Vec) -> Result<(), AnyError> { runtime_version: version::deno(), ts_version: version::TYPESCRIPT.to_string(), no_color: !colors::use_color(), + get_error_class_fn: Some(&crate::errors::get_error_class_name), }; let mut worker = MainWorker::from_options(main_module.clone(), permissions, &options); diff --git a/cli/tools/coverage.rs b/cli/tools/coverage.rs index 726ce9749ad64a..229cb8020f4b15 100644 --- a/cli/tools/coverage.rs +++ b/cli/tools/coverage.rs @@ -1,11 +1,11 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::colors; -use crate::inspector::InspectorSession; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url::Url; +use deno_runtime::inspector::InspectorSession; use serde::Deserialize; pub struct CoverageCollector { diff --git a/cli/tools/repl.rs b/cli/tools/repl.rs index 9b4d94a236b2c5..61d7809c3bc76a 100644 --- a/cli/tools/repl.rs +++ b/cli/tools/repl.rs @@ -3,13 +3,13 @@ use crate::ast; use crate::ast::TokenOrComment; use crate::colors; -use crate::inspector::InspectorSession; use crate::media_type::MediaType; use crate::program_state::ProgramState; -use crate::worker::MainWorker; use deno_core::error::AnyError; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_runtime::inspector::InspectorSession; +use deno_runtime::worker::MainWorker; use rustyline::completion::Completer; use rustyline::error::ReadlineError; use rustyline::highlight::Highlighter; diff --git a/cli/tools/upgrade.rs b/cli/tools/upgrade.rs index b76850ddec3dcb..da26b3159f56ab 100644 --- a/cli/tools/upgrade.rs +++ b/cli/tools/upgrade.rs @@ -3,8 +3,8 @@ //! This module provides feature to upgrade deno executable use crate::AnyError; -use deno_fetch::reqwest; -use deno_fetch::reqwest::Client; +use deno_runtime::deno_fetch::reqwest; +use deno_runtime::deno_fetch::reqwest::Client; use semver_parser::version::parse as semver_parse; use std::fs; use std::path::Path; diff --git a/core/error.rs b/core/error.rs index 1d31df5edb11fd..8e4138889a9679 100644 --- a/core/error.rs +++ b/core/error.rs @@ -12,7 +12,6 @@ use std::fmt; use std::fmt::Debug; use std::fmt::Display; use std::fmt::Formatter; -use std::io; /// A generic wrapper that can encapsulate any concrete error type. pub type AnyError = anyhow::Error; @@ -41,10 +40,6 @@ pub fn uri_error(message: impl Into>) -> AnyError { custom_error("URIError", message) } -pub fn last_os_error() -> AnyError { - io::Error::last_os_error().into() -} - pub fn bad_resource(message: impl Into>) -> AnyError { custom_error("BadResource", message) } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml new file mode 100644 index 00000000000000..fa097064e5f784 --- /dev/null +++ b/runtime/Cargo.toml @@ -0,0 +1,74 @@ +# Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +[package] +name = "deno_runtime" +version = "0.1.0" +license = "MIT" +authors = ["the Deno authors"] +edition = "2018" +description = "Provides the deno runtime library" +repository = "https://github.com/denoland/deno" + +[lib] +name = "deno_runtime" +path = "lib.rs" + +[[example]] +name = "hello_runtime" +path = "examples/hello_runtime.rs" + +[build-dependencies] +deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } +deno_core = { path = "../core", version = "0.70.0" } +deno_web = { path = "../op_crates/web", version = "0.21.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } + +[target.'cfg(windows)'.build-dependencies] +winres = "0.1.11" +winapi = "0.3.9" + +[dependencies] +deno_core = { path = "../core", version = "0.70.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } +deno_web = { path = "../op_crates/web", version = "0.21.0" } + +atty = "0.2.14" +dlopen = "0.1.8" +encoding_rs = "0.8.24" +env_logger = "0.7.1" +filetime = "0.2.12" +http = "0.2.1" +indexmap = "1.6.0" +lazy_static = "1.4.0" +libc = "0.2.77" +log = "0.4.11" +notify = "5.0.0-pre.3" +percent-encoding = "2.1.0" +regex = "1.3.9" +ring = "0.16.19" +rustyline = { version = "7.0.0", default-features = false } +rustyline-derive = "0.4.0" +serde = { version = "1.0.116", features = ["derive"] } +shell-escape = "0.1.5" +sys-info = "0.7.0" +termcolor = "1.1.0" +tokio = { version = "0.2.22", features = ["full"] } +tokio-rustls = "0.14.1" +# Keep in-sync with warp. +tokio-tungstenite = "0.11.0" +uuid = { version = "0.8.1", features = ["v4"] } +warp = { version = "0.2.5", features = ["tls"] } +webpki = "0.21.3" +webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. + +[target.'cfg(windows)'.dependencies] +winapi = { version = "0.3.9", features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } +fwdansi = "1.1.0" + +[target.'cfg(unix)'.dependencies] +nix = "0.19.0" + +[dev-dependencies] +# Used in benchmark +test_util = { path = "../test_util" } diff --git a/runtime/README.md b/runtime/README.md new file mode 100644 index 00000000000000..1056ac5b6318af --- /dev/null +++ b/runtime/README.md @@ -0,0 +1,44 @@ +# `deno_runtime` crate + +[![crates](https://img.shields.io/crates/v/deno_runtime.svg)](https://crates.io/crates/deno_runtime) +[![docs](https://docs.rs/deno_runtime/badge.svg)](https://docs.rs/deno_runtime) + +This is a slim version of the Deno CLI which removes typescript integration and +various tooling (like lint and doc). Basically only JavaScript execution with +Deno's operating system bindings (ops). + +## Stability + +This crate is built using battle-tested modules that were originally in `deno` +crate, however the API of this crate is subject to rapid and breaking changes. + +## `MainWorker` + +The main API of this crate is `MainWorker`. `MainWorker` is a structure +encapsulating `deno_core::JsRuntime` with a set of ops used to implement `Deno` +namespace. + +When creating a `MainWorker` implementors must call `MainWorker::bootstrap` to +prepare JS runtime for use. + +`MainWorker` is highly configurable and allows to customize many of the +runtime's properties: + +- module loading implementation +- error formatting +- support for source maps +- support for V8 inspector and Chrome Devtools debugger +- HTTP client user agent, CA certificate +- random number generator seed + +## `Worker` Web API + +`deno_runtime` comes with support for `Worker` Web API. The `Worker` API is +implemented using `WebWorker` structure. + +When creating a new instance of `MainWorker` implementors must provide a +callback function that is used when creating a new instance of `Worker`. + +All `WebWorker` instances are decendents of `MainWorker` which is responsible +for setting up communication with child worker. Each `WebWorker` spawns a new OS +thread that is dedicated solely to that worker. diff --git a/runtime/build.rs b/runtime/build.rs new file mode 100644 index 00000000000000..7c74c9793bcd84 --- /dev/null +++ b/runtime/build.rs @@ -0,0 +1,81 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::JsRuntime; +use deno_core::RuntimeOptions; +use std::env; +use std::path::Path; +use std::path::PathBuf; + +// TODO(bartlomieju): this module contains a lot of duplicated +// logic with `cli/build.rs`, factor out to `deno_core`. +fn create_snapshot( + mut js_runtime: JsRuntime, + snapshot_path: &Path, + files: Vec, +) { + deno_web::init(&mut js_runtime); + deno_fetch::init(&mut js_runtime); + deno_crypto::init(&mut js_runtime); + // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the + // workspace root. + let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); + for file in files { + println!("cargo:rerun-if-changed={}", file.display()); + let display_path = file.strip_prefix(display_root).unwrap(); + let display_path_str = display_path.display().to_string(); + js_runtime + .execute( + &("deno:".to_string() + &display_path_str.replace('\\', "/")), + &std::fs::read_to_string(&file).unwrap(), + ) + .unwrap(); + } + + let snapshot = js_runtime.snapshot(); + let snapshot_slice: &[u8] = &*snapshot; + println!("Snapshot size: {}", snapshot_slice.len()); + std::fs::write(&snapshot_path, snapshot_slice).unwrap(); + println!("Snapshot written to: {} ", snapshot_path.display()); +} + +fn create_runtime_snapshot(snapshot_path: &Path, files: Vec) { + let js_runtime = JsRuntime::new(RuntimeOptions { + will_snapshot: true, + ..Default::default() + }); + create_snapshot(js_runtime, snapshot_path, files); +} + +fn main() { + // Don't build V8 if "cargo doc" is being run. This is to support docs.rs. + if env::var_os("RUSTDOCFLAGS").is_some() { + return; + } + + // To debug snapshot issues uncomment: + // op_fetch_asset::trace_serializer(); + + println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); + println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); + let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); + + // Main snapshot + let runtime_snapshot_path = o.join("CLI_SNAPSHOT.bin"); + + let js_files = get_js_files("rt"); + create_runtime_snapshot(&runtime_snapshot_path, js_files); +} + +fn get_js_files(d: &str) -> Vec { + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let mut js_files = std::fs::read_dir(d) + .unwrap() + .map(|dir_entry| { + let file = dir_entry.unwrap(); + manifest_dir.join(file.path()) + }) + .filter(|path| path.extension().unwrap_or_default() == "js") + .collect::>(); + js_files.sort(); + js_files +} diff --git a/runtime/colors.rs b/runtime/colors.rs new file mode 100644 index 00000000000000..93f252716e6953 --- /dev/null +++ b/runtime/colors.rs @@ -0,0 +1,130 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use regex::Regex; +use std::env; +use std::fmt; +use std::io::Write; +use termcolor::Color::{Ansi256, Black, Blue, Cyan, Green, Red, White, Yellow}; +use termcolor::{Ansi, ColorSpec, WriteColor}; + +#[cfg(windows)] +use termcolor::{BufferWriter, ColorChoice}; + +lazy_static! { + // STRIP_ANSI_RE and strip_ansi_codes are lifted from the "console" crate. + // Copyright 2017 Armin Ronacher . MIT License. + static ref STRIP_ANSI_RE: Regex = Regex::new( + r"[\x1b\x9b][\[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-PRZcf-nqry=><]" + ).unwrap(); + static ref NO_COLOR: bool = { + env::var_os("NO_COLOR").is_some() + }; +} + +/// Helper function to strip ansi codes. +#[cfg(test)] +pub fn strip_ansi_codes(s: &str) -> std::borrow::Cow { + STRIP_ANSI_RE.replace_all(s, "") +} + +pub fn use_color() -> bool { + !(*NO_COLOR) +} + +#[cfg(windows)] +pub fn enable_ansi() { + BufferWriter::stdout(ColorChoice::AlwaysAnsi); +} + +fn style(s: &str, colorspec: ColorSpec) -> impl fmt::Display { + if !use_color() { + return String::from(s); + } + let mut v = Vec::new(); + let mut ansi_writer = Ansi::new(&mut v); + ansi_writer.set_color(&colorspec).unwrap(); + ansi_writer.write_all(s.as_bytes()).unwrap(); + ansi_writer.reset().unwrap(); + String::from_utf8_lossy(&v).into_owned() +} + +pub fn red_bold(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Red)).set_bold(true); + style(&s, style_spec) +} + +pub fn green_bold(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Green)).set_bold(true); + style(&s, style_spec) +} + +pub fn italic_bold(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_bold(true).set_italic(true); + style(&s, style_spec) +} + +pub fn white_on_red(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_bg(Some(Red)).set_fg(Some(White)); + style(&s, style_spec) +} + +pub fn black_on_green(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_bg(Some(Green)).set_fg(Some(Black)); + style(&s, style_spec) +} + +pub fn yellow(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Yellow)); + style(&s, style_spec) +} + +pub fn cyan(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Cyan)); + style(&s, style_spec) +} + +pub fn red(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Red)); + style(&s, style_spec) +} + +pub fn green(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Green)); + style(&s, style_spec) +} + +pub fn bold(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_bold(true); + style(&s, style_spec) +} + +pub fn gray(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Ansi256(8))); + style(&s, style_spec) +} + +pub fn italic_bold_gray(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec + .set_fg(Some(Ansi256(8))) + .set_bold(true) + .set_italic(true); + style(&s, style_spec) +} + +pub fn intense_blue(s: &str) -> impl fmt::Display { + let mut style_spec = ColorSpec::new(); + style_spec.set_fg(Some(Blue)).set_intense(true); + style(&s, style_spec) +} diff --git a/runtime/errors.rs b/runtime/errors.rs new file mode 100644 index 00000000000000..f8f71a8594b722 --- /dev/null +++ b/runtime/errors.rs @@ -0,0 +1,209 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +//! There are many types of errors in Deno: +//! - AnyError: a generic wrapper that can encapsulate any type of error. +//! - JsError: a container for the error message and stack trace for exceptions +//! thrown in JavaScript code. We use this to pretty-print stack traces. +//! - Diagnostic: these are errors that originate in TypeScript's compiler. +//! They're similar to JsError, in that they have line numbers. But +//! Diagnostics are compile-time type errors, whereas JsErrors are runtime +//! exceptions. + +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::url; +use deno_core::ModuleResolutionError; +use deno_fetch::reqwest; +use rustyline::error::ReadlineError; +use std::env; +use std::error::Error; +use std::io; + +fn get_dlopen_error_class(error: &dlopen::Error) -> &'static str { + use dlopen::Error::*; + match error { + NullCharacter(_) => "InvalidData", + OpeningLibraryError(ref e) => get_io_error_class(e), + SymbolGettingError(ref e) => get_io_error_class(e), + AddrNotMatchingDll(ref e) => get_io_error_class(e), + NullSymbol => "NotFound", + } +} + +fn get_env_var_error_class(error: &env::VarError) -> &'static str { + use env::VarError::*; + match error { + NotPresent => "NotFound", + NotUnicode(..) => "InvalidData", + } +} + +fn get_io_error_class(error: &io::Error) -> &'static str { + use io::ErrorKind::*; + match error.kind() { + NotFound => "NotFound", + PermissionDenied => "PermissionDenied", + ConnectionRefused => "ConnectionRefused", + ConnectionReset => "ConnectionReset", + ConnectionAborted => "ConnectionAborted", + NotConnected => "NotConnected", + AddrInUse => "AddrInUse", + AddrNotAvailable => "AddrNotAvailable", + BrokenPipe => "BrokenPipe", + AlreadyExists => "AlreadyExists", + InvalidInput => "TypeError", + InvalidData => "InvalidData", + TimedOut => "TimedOut", + Interrupted => "Interrupted", + WriteZero => "WriteZero", + UnexpectedEof => "UnexpectedEof", + Other => "Error", + WouldBlock => unreachable!(), + // Non-exhaustive enum - might add new variants + // in the future + _ => unreachable!(), + } +} + +fn get_module_resolution_error_class( + _: &ModuleResolutionError, +) -> &'static str { + "URIError" +} + +fn get_notify_error_class(error: ¬ify::Error) -> &'static str { + use notify::ErrorKind::*; + match error.kind { + Generic(_) => "Error", + Io(ref e) => get_io_error_class(e), + PathNotFound => "NotFound", + WatchNotFound => "NotFound", + InvalidConfig(_) => "InvalidData", + } +} + +fn get_readline_error_class(error: &ReadlineError) -> &'static str { + use ReadlineError::*; + match error { + Io(err) => get_io_error_class(err), + Eof => "UnexpectedEof", + Interrupted => "Interrupted", + #[cfg(unix)] + Errno(err) => get_nix_error_class(err), + _ => unimplemented!(), + } +} + +fn get_regex_error_class(error: ®ex::Error) -> &'static str { + use regex::Error::*; + match error { + Syntax(_) => "SyntaxError", + CompiledTooBig(_) => "RangeError", + _ => "Error", + } +} + +fn get_request_error_class(error: &reqwest::Error) -> &'static str { + error + .source() + .and_then(|inner_err| { + (inner_err + .downcast_ref::() + .map(get_io_error_class)) + .or_else(|| { + inner_err + .downcast_ref::() + .map(get_serde_json_error_class) + }) + .or_else(|| { + inner_err + .downcast_ref::() + .map(get_url_parse_error_class) + }) + }) + .unwrap_or("Http") +} + +fn get_serde_json_error_class( + error: &serde_json::error::Error, +) -> &'static str { + use deno_core::serde_json::error::*; + match error.classify() { + Category::Io => error + .source() + .and_then(|e| e.downcast_ref::()) + .map(get_io_error_class) + .unwrap(), + Category::Syntax => "SyntaxError", + Category::Data => "InvalidData", + Category::Eof => "UnexpectedEof", + } +} + +fn get_url_parse_error_class(_error: &url::ParseError) -> &'static str { + "URIError" +} + +#[cfg(unix)] +fn get_nix_error_class(error: &nix::Error) -> &'static str { + use nix::errno::Errno::*; + match error { + nix::Error::Sys(ECHILD) => "NotFound", + nix::Error::Sys(EINVAL) => "TypeError", + nix::Error::Sys(ENOENT) => "NotFound", + nix::Error::Sys(ENOTTY) => "BadResource", + nix::Error::Sys(EPERM) => "PermissionDenied", + nix::Error::Sys(ESRCH) => "NotFound", + nix::Error::Sys(UnknownErrno) => "Error", + nix::Error::Sys(_) => "Error", + nix::Error::InvalidPath => "TypeError", + nix::Error::InvalidUtf8 => "InvalidData", + nix::Error::UnsupportedOperation => unreachable!(), + } +} + +pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { + deno_core::error::get_custom_error_class(e) + .or_else(|| { + e.downcast_ref::() + .map(get_dlopen_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_env_var_error_class) + }) + .or_else(|| e.downcast_ref::().map(get_io_error_class)) + .or_else(|| { + e.downcast_ref::() + .map(get_module_resolution_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_notify_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_readline_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_request_error_class) + }) + .or_else(|| e.downcast_ref::().map(get_regex_error_class)) + .or_else(|| { + e.downcast_ref::() + .map(get_serde_json_error_class) + }) + .or_else(|| { + e.downcast_ref::() + .map(get_url_parse_error_class) + }) + .or_else(|| { + #[cfg(unix)] + let maybe_get_nix_error_class = + || e.downcast_ref::().map(get_nix_error_class); + #[cfg(not(unix))] + let maybe_get_nix_error_class = || Option::<&'static str>::None; + (maybe_get_nix_error_class)() + }) +} diff --git a/runtime/examples/hello_runtime.js b/runtime/examples/hello_runtime.js new file mode 100644 index 00000000000000..46609c7a0a5514 --- /dev/null +++ b/runtime/examples/hello_runtime.js @@ -0,0 +1,2 @@ +console.log("Hello world!"); +console.log(Deno); diff --git a/runtime/examples/hello_runtime.rs b/runtime/examples/hello_runtime.rs new file mode 100644 index 00000000000000..dbe539281d9f7a --- /dev/null +++ b/runtime/examples/hello_runtime.rs @@ -0,0 +1,55 @@ +// Copyright 2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +use deno_core::FsModuleLoader; +use deno_core::ModuleSpecifier; +use deno_runtime::permissions::Permissions; +use deno_runtime::worker::MainWorker; +use deno_runtime::worker::WorkerOptions; +use std::path::Path; +use std::rc::Rc; +use std::sync::Arc; + +fn get_error_class_name(e: &AnyError) -> &'static str { + deno_runtime::errors::get_error_class_name(e).unwrap_or("Error") +} + +#[tokio::main] +async fn main() -> Result<(), AnyError> { + let module_loader = Rc::new(FsModuleLoader); + let create_web_worker_cb = Arc::new(|_| { + todo!("Web workers are not supported in the example"); + }); + + let options = WorkerOptions { + apply_source_maps: false, + args: vec![], + debug_flag: false, + unstable: false, + ca_filepath: None, + user_agent: "hello_runtime".to_string(), + seed: None, + js_error_create_fn: None, + create_web_worker_cb, + attach_inspector: false, + maybe_inspector_server: None, + should_break_on_first_statement: false, + module_loader, + runtime_version: "x".to_string(), + ts_version: "x".to_string(), + no_color: false, + get_error_class_fn: Some(&get_error_class_name), + }; + + let js_path = + Path::new(env!("CARGO_MANIFEST_DIR")).join("examples/hello_runtime.js"); + let main_module = ModuleSpecifier::resolve_path(&js_path.to_string_lossy())?; + let permissions = Permissions::allow_all(); + + let mut worker = + MainWorker::from_options(main_module.clone(), permissions, &options); + worker.bootstrap(&options); + worker.execute_module(&main_module).await?; + worker.run_event_loop().await?; + Ok(()) +} diff --git a/runtime/fs_util.rs b/runtime/fs_util.rs new file mode 100644 index 00000000000000..028538d4fb81b6 --- /dev/null +++ b/runtime/fs_util.rs @@ -0,0 +1,80 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::AnyError; +pub use deno_core::normalize_path; +use std::env::current_dir; +use std::io::Error; +use std::path::{Path, PathBuf}; + +/// Similar to `std::fs::canonicalize()` but strips UNC prefixes on Windows. +pub fn canonicalize_path(path: &Path) -> Result { + let mut canonicalized_path = path.canonicalize()?; + if cfg!(windows) { + canonicalized_path = PathBuf::from( + canonicalized_path + .display() + .to_string() + .trim_start_matches("\\\\?\\"), + ); + } + Ok(canonicalized_path) +} + +pub fn resolve_from_cwd(path: &Path) -> Result { + let resolved_path = if path.is_absolute() { + path.to_owned() + } else { + let cwd = current_dir().unwrap(); + cwd.join(path) + }; + + Ok(normalize_path(&resolved_path)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn resolve_from_cwd_child() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new("a")).unwrap(), cwd.join("a")); + } + + #[test] + fn resolve_from_cwd_dot() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new(".")).unwrap(), cwd); + } + + #[test] + fn resolve_from_cwd_parent() { + let cwd = current_dir().unwrap(); + assert_eq!(resolve_from_cwd(Path::new("a/..")).unwrap(), cwd); + } + + #[test] + fn test_normalize_path() { + assert_eq!(normalize_path(Path::new("a/../b")), PathBuf::from("b")); + assert_eq!(normalize_path(Path::new("a/./b/")), PathBuf::from("a/b/")); + assert_eq!( + normalize_path(Path::new("a/./b/../c")), + PathBuf::from("a/c") + ); + + if cfg!(windows) { + assert_eq!( + normalize_path(Path::new("C:\\a\\.\\b\\..\\c")), + PathBuf::from("C:\\a\\c") + ); + } + } + + // TODO: Get a good expected value here for Windows. + #[cfg(not(windows))] + #[test] + fn resolve_from_cwd_absolute() { + let expected = Path::new("/a"); + assert_eq!(resolve_from_cwd(expected).unwrap(), expected); + } +} diff --git a/runtime/http_util.rs b/runtime/http_util.rs new file mode 100644 index 00000000000000..67703c214a0320 --- /dev/null +++ b/runtime/http_util.rs @@ -0,0 +1,46 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::generic_error; +use deno_core::error::AnyError; +use deno_fetch::reqwest; +use deno_fetch::reqwest::header::HeaderMap; +use deno_fetch::reqwest::header::USER_AGENT; +use deno_fetch::reqwest::redirect::Policy; +use deno_fetch::reqwest::Client; +use std::fs::File; +use std::io::Read; + +/// Create new instance of async reqwest::Client. This client supports +/// proxies and doesn't follow redirects. +pub fn create_http_client( + user_agent: String, + ca_file: Option<&str>, +) -> Result { + let mut headers = HeaderMap::new(); + headers.insert(USER_AGENT, user_agent.parse().unwrap()); + let mut builder = Client::builder() + .redirect(Policy::none()) + .default_headers(headers) + .use_rustls_tls(); + + if let Some(ca_file) = ca_file { + let mut buf = Vec::new(); + File::open(ca_file)?.read_to_end(&mut buf)?; + let cert = reqwest::Certificate::from_pem(&buf)?; + builder = builder.add_root_certificate(cert); + } + + builder + .build() + .map_err(|_| generic_error("Unable to build http client")) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn create_test_client() { + create_http_client("test_client".to_string(), None).unwrap(); + } +} diff --git a/cli/inspector.rs b/runtime/inspector.rs similarity index 100% rename from cli/inspector.rs rename to runtime/inspector.rs diff --git a/runtime/js.rs b/runtime/js.rs new file mode 100644 index 00000000000000..efbc958c7775d7 --- /dev/null +++ b/runtime/js.rs @@ -0,0 +1,31 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::Snapshot; + +pub static CLI_SNAPSHOT: &[u8] = + include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin")); + +pub fn deno_isolate_init() -> Snapshot { + debug!("Deno isolate init with snapshots."); + let data = CLI_SNAPSHOT; + Snapshot::Static(data) +} + +#[test] +fn cli_snapshot() { + let mut js_runtime = deno_core::JsRuntime::new(deno_core::RuntimeOptions { + startup_snapshot: Some(deno_isolate_init()), + ..Default::default() + }); + js_runtime + .execute( + "", + r#" + if (!(bootstrap.mainRuntime && bootstrap.workerRuntime)) { + throw Error("bad"); + } + console.log("we have console.log!!!"); + "#, + ) + .unwrap(); +} diff --git a/runtime/lib.rs b/runtime/lib.rs new file mode 100644 index 00000000000000..6745f3ec81a16f --- /dev/null +++ b/runtime/lib.rs @@ -0,0 +1,26 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +#![deny(warnings)] + +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate log; + +pub use deno_crypto; +pub use deno_fetch; +pub use deno_web; + +pub mod colors; +pub mod errors; +pub mod fs_util; +pub mod http_util; +pub mod inspector; +pub mod js; +pub mod metrics; +pub mod ops; +pub mod permissions; +pub mod resolve_addr; +pub mod tokio_util; +pub mod web_worker; +pub mod worker; diff --git a/cli/metrics.rs b/runtime/metrics.rs similarity index 100% rename from cli/metrics.rs rename to runtime/metrics.rs diff --git a/cli/ops/crypto.rs b/runtime/ops/crypto.rs similarity index 100% rename from cli/ops/crypto.rs rename to runtime/ops/crypto.rs diff --git a/cli/ops/dispatch_minimal.rs b/runtime/ops/dispatch_minimal.rs similarity index 100% rename from cli/ops/dispatch_minimal.rs rename to runtime/ops/dispatch_minimal.rs diff --git a/cli/ops/fetch.rs b/runtime/ops/fetch.rs similarity index 76% rename from cli/ops/fetch.rs rename to runtime/ops/fetch.rs index 18e9e9c9f242e7..0ef99f73d5a176 100644 --- a/cli/ops/fetch.rs +++ b/runtime/ops/fetch.rs @@ -3,13 +3,16 @@ use crate::http_util; use crate::permissions::Permissions; use deno_fetch::reqwest; -pub fn init(rt: &mut deno_core::JsRuntime, maybe_ca_file: Option<&str>) { +pub fn init( + rt: &mut deno_core::JsRuntime, + user_agent: String, + maybe_ca_file: Option<&str>, +) { { let op_state = rt.op_state(); let mut state = op_state.borrow_mut(); state.put::({ - http_util::create_http_client(http_util::get_user_agent(), maybe_ca_file) - .unwrap() + http_util::create_http_client(user_agent, maybe_ca_file).unwrap() }); } super::reg_json_async(rt, "op_fetch", deno_fetch::op_fetch::); diff --git a/cli/ops/fs.rs b/runtime/ops/fs.rs similarity index 100% rename from cli/ops/fs.rs rename to runtime/ops/fs.rs diff --git a/cli/ops/fs_events.rs b/runtime/ops/fs_events.rs similarity index 100% rename from cli/ops/fs_events.rs rename to runtime/ops/fs_events.rs diff --git a/cli/ops/io.rs b/runtime/ops/io.rs similarity index 100% rename from cli/ops/io.rs rename to runtime/ops/io.rs diff --git a/runtime/ops/mod.rs b/runtime/ops/mod.rs new file mode 100644 index 00000000000000..a271226572ef83 --- /dev/null +++ b/runtime/ops/mod.rs @@ -0,0 +1,89 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +mod dispatch_minimal; +pub use dispatch_minimal::MinimalOp; + +pub mod crypto; +pub mod fetch; +pub mod fs; +pub mod fs_events; +pub mod io; +pub mod net; +#[cfg(unix)] +mod net_unix; +pub mod os; +pub mod permissions; +pub mod plugin; +pub mod process; +pub mod runtime; +pub mod signal; +pub mod timers; +pub mod tls; +pub mod tty; +pub mod web_worker; +pub mod websocket; +pub mod worker_host; + +use crate::metrics::metrics_op; +use deno_core::error::AnyError; +use deno_core::json_op_async; +use deno_core::json_op_sync; +use deno_core::serde_json::Value; +use deno_core::BufVec; +use deno_core::JsRuntime; +use deno_core::OpState; +use deno_core::ZeroCopyBuf; +use std::cell::RefCell; +use std::future::Future; +use std::rc::Rc; + +pub fn reg_json_async(rt: &mut JsRuntime, name: &'static str, op_fn: F) +where + F: Fn(Rc>, Value, BufVec) -> R + 'static, + R: Future> + 'static, +{ + rt.register_op(name, metrics_op(json_op_async(op_fn))); +} + +pub fn reg_json_sync(rt: &mut JsRuntime, name: &'static str, op_fn: F) +where + F: Fn(&mut OpState, Value, &mut [ZeroCopyBuf]) -> Result + + 'static, +{ + rt.register_op(name, metrics_op(json_op_sync(op_fn))); +} + +/// `UnstableChecker` is a struct so it can be placed inside `GothamState`; +/// using type alias for a bool could work, but there's a high chance +/// that there might be another type alias pointing to a bool, which +/// would override previously used alias. +pub struct UnstableChecker { + pub unstable: bool, +} + +impl UnstableChecker { + /// Quits the process if the --unstable flag was not provided. + /// + /// This is intentionally a non-recoverable check so that people cannot probe + /// for unstable APIs from stable programs. + // NOTE(bartlomieju): keep in sync with `cli/program_state.rs` + pub fn check_unstable(&self, api_name: &str) { + if !self.unstable { + eprintln!( + "Unstable API '{}'. The --unstable flag must be provided.", + api_name + ); + std::process::exit(70); + } + } +} +/// Helper for checking unstable features. Used for sync ops. +pub fn check_unstable(state: &OpState, api_name: &str) { + state.borrow::().check_unstable(api_name) +} + +/// Helper for checking unstable features. Used for async ops. +pub fn check_unstable2(state: &Rc>, api_name: &str) { + let state = state.borrow(); + state.borrow::().check_unstable(api_name) +} diff --git a/cli/ops/net.rs b/runtime/ops/net.rs similarity index 100% rename from cli/ops/net.rs rename to runtime/ops/net.rs diff --git a/cli/ops/net_unix.rs b/runtime/ops/net_unix.rs similarity index 100% rename from cli/ops/net_unix.rs rename to runtime/ops/net_unix.rs diff --git a/cli/ops/os.rs b/runtime/ops/os.rs similarity index 100% rename from cli/ops/os.rs rename to runtime/ops/os.rs diff --git a/cli/ops/permissions.rs b/runtime/ops/permissions.rs similarity index 100% rename from cli/ops/permissions.rs rename to runtime/ops/permissions.rs diff --git a/cli/ops/plugin.rs b/runtime/ops/plugin.rs similarity index 100% rename from cli/ops/plugin.rs rename to runtime/ops/plugin.rs diff --git a/cli/ops/process.rs b/runtime/ops/process.rs similarity index 80% rename from cli/ops/process.rs rename to runtime/ops/process.rs index 60a6d5095e93da..67b3d076106db3 100644 --- a/cli/ops/process.rs +++ b/runtime/ops/process.rs @@ -2,7 +2,6 @@ use super::io::{std_file_resource, StreamResource, StreamResourceHolder}; use crate::permissions::Permissions; -use crate::signal::kill; use deno_core::error::bad_resource_id; use deno_core::error::type_error; use deno_core::error::AnyError; @@ -216,6 +215,61 @@ async fn op_run_status( })) } +#[cfg(not(unix))] +const SIGINT: i32 = 2; +#[cfg(not(unix))] +const SIGKILL: i32 = 9; +#[cfg(not(unix))] +const SIGTERM: i32 = 15; + +#[cfg(not(unix))] +use winapi::{ + shared::minwindef::DWORD, + um::{ + handleapi::CloseHandle, + processthreadsapi::{OpenProcess, TerminateProcess}, + winnt::PROCESS_TERMINATE, + }, +}; + +#[cfg(unix)] +pub fn kill(pid: i32, signo: i32) -> Result<(), AnyError> { + use nix::sys::signal::{kill as unix_kill, Signal}; + use nix::unistd::Pid; + use std::convert::TryFrom; + let sig = Signal::try_from(signo)?; + unix_kill(Pid::from_raw(pid), Option::Some(sig)).map_err(AnyError::from) +} + +#[cfg(not(unix))] +pub fn kill(pid: i32, signal: i32) -> Result<(), AnyError> { + use std::io::Error; + match signal { + SIGINT | SIGKILL | SIGTERM => { + if pid <= 0 { + return Err(type_error("unsupported pid")); + } + unsafe { + let handle = OpenProcess(PROCESS_TERMINATE, 0, pid as DWORD); + if handle.is_null() { + return Err(Error::last_os_error().into()); + } + if TerminateProcess(handle, 1) == 0 { + CloseHandle(handle); + return Err(Error::last_os_error().into()); + } + if CloseHandle(handle) == 0 { + return Err(Error::last_os_error().into()); + } + } + } + _ => { + return Err(type_error("unsupported signal")); + } + } + Ok(()) +} + #[derive(Deserialize)] struct KillArgs { pid: i32, diff --git a/cli/ops/runtime.rs b/runtime/ops/runtime.rs similarity index 100% rename from cli/ops/runtime.rs rename to runtime/ops/runtime.rs diff --git a/cli/ops/signal.rs b/runtime/ops/signal.rs similarity index 100% rename from cli/ops/signal.rs rename to runtime/ops/signal.rs diff --git a/cli/ops/timers.rs b/runtime/ops/timers.rs similarity index 100% rename from cli/ops/timers.rs rename to runtime/ops/timers.rs diff --git a/cli/ops/tls.rs b/runtime/ops/tls.rs similarity index 100% rename from cli/ops/tls.rs rename to runtime/ops/tls.rs diff --git a/cli/ops/tty.rs b/runtime/ops/tty.rs similarity index 96% rename from cli/ops/tty.rs rename to runtime/ops/tty.rs index be1d7d3e4d312c..ad66bcf1a5ef16 100644 --- a/cli/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -4,7 +4,6 @@ use super::io::std_file_resource; use super::io::StreamResource; use super::io::StreamResourceHolder; use deno_core::error::bad_resource_id; -use deno_core::error::last_os_error; use deno_core::error::not_supported; use deno_core::error::resource_unavailable; use deno_core::error::AnyError; @@ -15,6 +14,7 @@ use deno_core::OpState; use deno_core::ZeroCopyBuf; use serde::Deserialize; use serde::Serialize; +use std::io::Error; #[cfg(unix)] use nix::sys::termios; @@ -39,7 +39,7 @@ fn get_windows_handle( let handle = f.as_raw_handle(); if handle == handleapi::INVALID_HANDLE_VALUE { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } else if handle.is_null() { return Err(custom_error("ReferenceError", "null handle")); } @@ -131,7 +131,7 @@ fn op_set_raw( }; if handle == handleapi::INVALID_HANDLE_VALUE { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } else if handle.is_null() { return Err(custom_error("ReferenceError", "null handle")); } @@ -139,7 +139,7 @@ fn op_set_raw( if unsafe { consoleapi::GetConsoleMode(handle, &mut original_mode) } == FALSE { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } let new_mode = if is_raw { original_mode & !RAW_MODE_MASK @@ -147,7 +147,7 @@ fn op_set_raw( original_mode | RAW_MODE_MASK }; if unsafe { consoleapi::SetConsoleMode(handle, new_mode) } == FALSE { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } Ok(json!({})) @@ -298,7 +298,7 @@ fn op_console_size( &mut bufinfo, ) == 0 { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } Ok(ConsoleSize { @@ -316,7 +316,7 @@ fn op_console_size( unsafe { let mut size: libc::winsize = std::mem::zeroed(); if libc::ioctl(fd, libc::TIOCGWINSZ, &mut size as *mut _) != 0 { - return Err(last_os_error()); + return Err(Error::last_os_error().into()); } // TODO (caspervonb) return a tuple instead diff --git a/cli/ops/web_worker.rs b/runtime/ops/web_worker.rs similarity index 100% rename from cli/ops/web_worker.rs rename to runtime/ops/web_worker.rs diff --git a/cli/ops/websocket.rs b/runtime/ops/websocket.rs similarity index 100% rename from cli/ops/websocket.rs rename to runtime/ops/websocket.rs diff --git a/cli/ops/worker_host.rs b/runtime/ops/worker_host.rs similarity index 100% rename from cli/ops/worker_host.rs rename to runtime/ops/worker_host.rs diff --git a/cli/permissions.rs b/runtime/permissions.rs similarity index 96% rename from cli/permissions.rs rename to runtime/permissions.rs index cc3ce824237fba..88f9c717940bed 100644 --- a/cli/permissions.rs +++ b/runtime/permissions.rs @@ -1,7 +1,6 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::colors; -use crate::flags::Flags; use crate::fs_util::resolve_from_cwd; use deno_core::error::custom_error; use deno_core::error::uri_error; @@ -86,8 +85,22 @@ fn resolve_fs_allowlist(allowlist: &[PathBuf]) -> HashSet { .collect() } +#[derive(Clone, Debug, PartialEq, Default)] +pub struct PermissionsOptions { + pub allow_env: bool, + pub allow_hrtime: bool, + pub allow_net: bool, + pub allow_plugin: bool, + pub allow_read: bool, + pub allow_run: bool, + pub allow_write: bool, + pub net_allowlist: Vec, + pub read_allowlist: Vec, + pub write_allowlist: Vec, +} + impl Permissions { - pub fn from_flags(flags: &Flags) -> Self { + pub fn from_options(opts: &PermissionsOptions) -> Self { fn state_from_flag_bool(flag: bool) -> PermissionState { if flag { PermissionState::Granted @@ -97,24 +110,24 @@ impl Permissions { } Self { read: UnaryPermission:: { - global_state: state_from_flag_bool(flags.allow_read), - granted_list: resolve_fs_allowlist(&flags.read_allowlist), + global_state: state_from_flag_bool(opts.allow_read), + granted_list: resolve_fs_allowlist(&opts.read_allowlist), ..Default::default() }, write: UnaryPermission:: { - global_state: state_from_flag_bool(flags.allow_write), - granted_list: resolve_fs_allowlist(&flags.write_allowlist), + global_state: state_from_flag_bool(opts.allow_write), + granted_list: resolve_fs_allowlist(&opts.write_allowlist), ..Default::default() }, net: UnaryPermission:: { - global_state: state_from_flag_bool(flags.allow_net), - granted_list: flags.net_allowlist.iter().cloned().collect(), + global_state: state_from_flag_bool(opts.allow_net), + granted_list: opts.net_allowlist.iter().cloned().collect(), ..Default::default() }, - env: state_from_flag_bool(flags.allow_env), - run: state_from_flag_bool(flags.allow_run), - plugin: state_from_flag_bool(flags.allow_plugin), - hrtime: state_from_flag_bool(flags.allow_hrtime), + env: state_from_flag_bool(opts.allow_env), + run: state_from_flag_bool(opts.allow_run), + plugin: state_from_flag_bool(opts.allow_plugin), + hrtime: state_from_flag_bool(opts.allow_hrtime), } } @@ -693,7 +706,7 @@ mod tests { PathBuf::from("/b/c"), ]; - let perms = Permissions::from_flags(&Flags { + let perms = Permissions::from_options(&PermissionsOptions { read_allowlist: allowlist.clone(), write_allowlist: allowlist, ..Default::default() @@ -748,7 +761,7 @@ mod tests { #[test] fn test_check_net() { - let perms = Permissions::from_flags(&Flags { + let perms = Permissions::from_options(&PermissionsOptions { net_allowlist: svec![ "localhost", "deno.land", @@ -839,7 +852,7 @@ mod tests { } else { vec![PathBuf::from("/a")] }; - let perms = Permissions::from_flags(&Flags { + let perms = Permissions::from_options(&PermissionsOptions { read_allowlist, net_allowlist: svec!["localhost"], ..Default::default() diff --git a/cli/resolve_addr.rs b/runtime/resolve_addr.rs similarity index 100% rename from cli/resolve_addr.rs rename to runtime/resolve_addr.rs diff --git a/cli/rt/00_bootstrap_namespace.js b/runtime/rt/00_bootstrap_namespace.js similarity index 100% rename from cli/rt/00_bootstrap_namespace.js rename to runtime/rt/00_bootstrap_namespace.js diff --git a/cli/rt/01_build.js b/runtime/rt/01_build.js similarity index 100% rename from cli/rt/01_build.js rename to runtime/rt/01_build.js diff --git a/cli/rt/01_colors.js b/runtime/rt/01_colors.js similarity index 100% rename from cli/rt/01_colors.js rename to runtime/rt/01_colors.js diff --git a/cli/rt/01_errors.js b/runtime/rt/01_errors.js similarity index 100% rename from cli/rt/01_errors.js rename to runtime/rt/01_errors.js diff --git a/cli/rt/01_internals.js b/runtime/rt/01_internals.js similarity index 100% rename from cli/rt/01_internals.js rename to runtime/rt/01_internals.js diff --git a/cli/rt/01_version.js b/runtime/rt/01_version.js similarity index 100% rename from cli/rt/01_version.js rename to runtime/rt/01_version.js diff --git a/cli/rt/01_web_util.js b/runtime/rt/01_web_util.js similarity index 100% rename from cli/rt/01_web_util.js rename to runtime/rt/01_web_util.js diff --git a/cli/rt/02_console.js b/runtime/rt/02_console.js similarity index 100% rename from cli/rt/02_console.js rename to runtime/rt/02_console.js diff --git a/cli/rt/06_util.js b/runtime/rt/06_util.js similarity index 100% rename from cli/rt/06_util.js rename to runtime/rt/06_util.js diff --git a/cli/rt/10_dispatch_minimal.js b/runtime/rt/10_dispatch_minimal.js similarity index 100% rename from cli/rt/10_dispatch_minimal.js rename to runtime/rt/10_dispatch_minimal.js diff --git a/cli/rt/11_timers.js b/runtime/rt/11_timers.js similarity index 100% rename from cli/rt/11_timers.js rename to runtime/rt/11_timers.js diff --git a/cli/rt/11_workers.js b/runtime/rt/11_workers.js similarity index 100% rename from cli/rt/11_workers.js rename to runtime/rt/11_workers.js diff --git a/cli/rt/12_io.js b/runtime/rt/12_io.js similarity index 100% rename from cli/rt/12_io.js rename to runtime/rt/12_io.js diff --git a/cli/rt/13_buffer.js b/runtime/rt/13_buffer.js similarity index 100% rename from cli/rt/13_buffer.js rename to runtime/rt/13_buffer.js diff --git a/cli/rt/27_websocket.js b/runtime/rt/27_websocket.js similarity index 100% rename from cli/rt/27_websocket.js rename to runtime/rt/27_websocket.js diff --git a/cli/rt/30_files.js b/runtime/rt/30_files.js similarity index 100% rename from cli/rt/30_files.js rename to runtime/rt/30_files.js diff --git a/cli/rt/30_fs.js b/runtime/rt/30_fs.js similarity index 100% rename from cli/rt/30_fs.js rename to runtime/rt/30_fs.js diff --git a/cli/rt/30_metrics.js b/runtime/rt/30_metrics.js similarity index 100% rename from cli/rt/30_metrics.js rename to runtime/rt/30_metrics.js diff --git a/cli/rt/30_net.js b/runtime/rt/30_net.js similarity index 100% rename from cli/rt/30_net.js rename to runtime/rt/30_net.js diff --git a/cli/rt/30_os.js b/runtime/rt/30_os.js similarity index 100% rename from cli/rt/30_os.js rename to runtime/rt/30_os.js diff --git a/cli/rt/40_compiler_api.js b/runtime/rt/40_compiler_api.js similarity index 100% rename from cli/rt/40_compiler_api.js rename to runtime/rt/40_compiler_api.js diff --git a/cli/rt/40_diagnostics.js b/runtime/rt/40_diagnostics.js similarity index 100% rename from cli/rt/40_diagnostics.js rename to runtime/rt/40_diagnostics.js diff --git a/cli/rt/40_error_stack.js b/runtime/rt/40_error_stack.js similarity index 100% rename from cli/rt/40_error_stack.js rename to runtime/rt/40_error_stack.js diff --git a/cli/rt/40_fs_events.js b/runtime/rt/40_fs_events.js similarity index 100% rename from cli/rt/40_fs_events.js rename to runtime/rt/40_fs_events.js diff --git a/cli/rt/40_net_unstable.js b/runtime/rt/40_net_unstable.js similarity index 100% rename from cli/rt/40_net_unstable.js rename to runtime/rt/40_net_unstable.js diff --git a/cli/rt/40_performance.js b/runtime/rt/40_performance.js similarity index 100% rename from cli/rt/40_performance.js rename to runtime/rt/40_performance.js diff --git a/cli/rt/40_permissions.js b/runtime/rt/40_permissions.js similarity index 100% rename from cli/rt/40_permissions.js rename to runtime/rt/40_permissions.js diff --git a/cli/rt/40_plugins.js b/runtime/rt/40_plugins.js similarity index 100% rename from cli/rt/40_plugins.js rename to runtime/rt/40_plugins.js diff --git a/cli/rt/40_process.js b/runtime/rt/40_process.js similarity index 100% rename from cli/rt/40_process.js rename to runtime/rt/40_process.js diff --git a/cli/rt/40_read_file.js b/runtime/rt/40_read_file.js similarity index 100% rename from cli/rt/40_read_file.js rename to runtime/rt/40_read_file.js diff --git a/cli/rt/40_signals.js b/runtime/rt/40_signals.js similarity index 100% rename from cli/rt/40_signals.js rename to runtime/rt/40_signals.js diff --git a/cli/rt/40_testing.js b/runtime/rt/40_testing.js similarity index 100% rename from cli/rt/40_testing.js rename to runtime/rt/40_testing.js diff --git a/cli/rt/40_tls.js b/runtime/rt/40_tls.js similarity index 100% rename from cli/rt/40_tls.js rename to runtime/rt/40_tls.js diff --git a/cli/rt/40_tty.js b/runtime/rt/40_tty.js similarity index 100% rename from cli/rt/40_tty.js rename to runtime/rt/40_tty.js diff --git a/cli/rt/40_write_file.js b/runtime/rt/40_write_file.js similarity index 100% rename from cli/rt/40_write_file.js rename to runtime/rt/40_write_file.js diff --git a/cli/rt/41_prompt.js b/runtime/rt/41_prompt.js similarity index 100% rename from cli/rt/41_prompt.js rename to runtime/rt/41_prompt.js diff --git a/cli/rt/90_deno_ns.js b/runtime/rt/90_deno_ns.js similarity index 100% rename from cli/rt/90_deno_ns.js rename to runtime/rt/90_deno_ns.js diff --git a/cli/rt/99_main.js b/runtime/rt/99_main.js similarity index 100% rename from cli/rt/99_main.js rename to runtime/rt/99_main.js diff --git a/cli/rt/README.md b/runtime/rt/README.md similarity index 100% rename from cli/rt/README.md rename to runtime/rt/README.md diff --git a/runtime/tokio_util.rs b/runtime/tokio_util.rs new file mode 100644 index 00000000000000..b25a2994f4b582 --- /dev/null +++ b/runtime/tokio_util.rs @@ -0,0 +1,25 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +pub fn create_basic_runtime() -> tokio::runtime::Runtime { + tokio::runtime::Builder::new() + .basic_scheduler() + .enable_io() + .enable_time() + // This limits the number of threads for blocking operations (like for + // synchronous fs ops) or CPU bound tasks like when we run dprint in + // parallel for deno fmt. + // The default value is 512, which is an unhelpfully large thread pool. We + // don't ever want to have more than a couple dozen threads. + .max_threads(32) + .build() + .unwrap() +} + +// TODO(ry) rename to run_local ? +pub fn run_basic(future: F) -> R +where + F: std::future::Future, +{ + let mut rt = create_basic_runtime(); + rt.block_on(future) +} diff --git a/cli/web_worker.rs b/runtime/web_worker.rs similarity index 98% rename from cli/web_worker.rs rename to runtime/web_worker.rs index 18d391580c09a8..db97e36048669f 100644 --- a/cli/web_worker.rs +++ b/runtime/web_worker.rs @@ -18,6 +18,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url::Url; use deno_core::v8; +use deno_core::GetErrorClassFn; use deno_core::JsErrorCreateFn; use deno_core::JsRuntime; use deno_core::ModuleLoader; @@ -122,8 +123,8 @@ pub struct WebWorker { // Following fields are pub because they are accessed // when creating a new WebWorker instance. pub(crate) internal_channels: WorkerChannelsInternal, - pub(crate) js_runtime: JsRuntime, - pub(crate) name: String, + pub js_runtime: JsRuntime, + pub name: String, waker: AtomicWaker, event_loop_idle: bool, terminate_rx: mpsc::Receiver<()>, @@ -152,6 +153,7 @@ pub struct WebWorkerOptions { pub ts_version: String, /// Sets `Deno.noColor` in JS runtime. pub no_color: bool, + pub get_error_class_fn: Option, } impl WebWorker { @@ -166,7 +168,7 @@ impl WebWorker { module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(js::deno_isolate_init()), js_error_create_fn: options.js_error_create_fn.clone(), - get_error_class_fn: Some(&crate::errors::get_error_class_name), + get_error_class_fn: options.get_error_class_fn, ..Default::default() }); @@ -214,7 +216,11 @@ impl WebWorker { ops::web_worker::init(js_runtime, sender.clone(), handle); ops::runtime::init(js_runtime, main_module); - ops::fetch::init(js_runtime, options.ca_filepath.as_deref()); + ops::fetch::init( + js_runtime, + options.user_agent.clone(), + options.ca_filepath.as_deref(), + ); ops::timers::init(js_runtime); ops::worker_host::init( js_runtime, @@ -488,6 +494,7 @@ mod tests { runtime_version: "x".to_string(), ts_version: "x".to_string(), no_color: true, + get_error_class_fn: None, }; let mut worker = WebWorker::from_options( diff --git a/cli/worker.rs b/runtime/worker.rs similarity index 97% rename from cli/worker.rs rename to runtime/worker.rs index d1238df419e05f..a0e63afad00566 100644 --- a/cli/worker.rs +++ b/runtime/worker.rs @@ -13,6 +13,7 @@ use deno_core::futures::future::FutureExt; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url::Url; +use deno_core::GetErrorClassFn; use deno_core::JsErrorCreateFn; use deno_core::JsRuntime; use deno_core::ModuleId; @@ -61,6 +62,7 @@ pub struct WorkerOptions { pub ts_version: String, /// Sets `Deno.noColor` in JS runtime. pub no_color: bool, + pub get_error_class_fn: Option, } impl MainWorker { @@ -73,7 +75,7 @@ impl MainWorker { module_loader: Some(options.module_loader.clone()), startup_snapshot: Some(js::deno_isolate_init()), js_error_create_fn: options.js_error_create_fn.clone(), - get_error_class_fn: Some(&crate::errors::get_error_class_name), + get_error_class_fn: options.get_error_class_fn, ..Default::default() }); @@ -108,7 +110,11 @@ impl MainWorker { } ops::runtime::init(js_runtime, main_module); - ops::fetch::init(js_runtime, options.ca_filepath.as_deref()); + ops::fetch::init( + js_runtime, + options.user_agent.clone(), + options.ca_filepath.as_deref(), + ); ops::timers::init(js_runtime); ops::worker_host::init( js_runtime, @@ -274,6 +280,7 @@ mod tests { runtime_version: "x".to_string(), ts_version: "x".to_string(), no_color: true, + get_error_class_fn: None, }; MainWorker::from_options(main_module, permissions, &options) From 389f492551972c271d26f95dcc15b05836bfe4b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 13 Dec 2020 22:34:54 +0100 Subject: [PATCH 060/135] chore: release crates (#8744) --- Cargo.lock | 8 ++++---- cli/Cargo.toml | 8 ++++---- core/Cargo.toml | 2 +- op_crates/crypto/Cargo.toml | 4 ++-- op_crates/fetch/Cargo.toml | 4 ++-- op_crates/web/Cargo.toml | 4 ++-- runtime/Cargo.toml | 16 ++++++++-------- 7 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1ed126df572d8c..e9578e4ba362a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -510,7 +510,7 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.70.0" +version = "0.71.0" dependencies = [ "anyhow", "futures", @@ -529,7 +529,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.4.0" +version = "0.5.0" dependencies = [ "deno_core", "rand 0.7.3", @@ -553,7 +553,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.13.0" +version = "0.14.0" dependencies = [ "deno_core", "reqwest", @@ -622,7 +622,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.21.0" +version = "0.22.0" dependencies = [ "deno_core", "futures", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7ad0cc5ca10fa5..944025e13f07ec 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,9 +20,9 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.70.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } -deno_web = { path = "../op_crates/web", version = "0.21.0" } +deno_core = { path = "../core", version = "0.71.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } +deno_web = { path = "../op_crates/web", version = "0.22.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -31,7 +31,7 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.70.0" } +deno_core = { path = "../core", version = "0.71.0" } deno_doc = "0.1.18" deno_lint = "0.2.13" deno_runtime = { path = "../runtime", version = "0.1.0" } diff --git a/core/Cargo.toml b/core/Cargo.toml index c8f35aab1912dc..73eb1830513d31 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. [package] name = "deno_core" -version = "0.70.0" +version = "0.71.0" edition = "2018" description = "A secure JavaScript/TypeScript runtime built with V8, Rust, and Tokio" authors = ["the Deno authors"] diff --git a/op_crates/crypto/Cargo.toml b/op_crates/crypto/Cargo.toml index 64c057afbf8783..0bcb4e528fb3b7 100644 --- a/op_crates/crypto/Cargo.toml +++ b/op_crates/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.4.0" +version = "0.5.0" edition = "2018" description = "Collection of WebCrypto APIs" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.70.0", path = "../../core" } +deno_core = { version = "0.71.0", path = "../../core" } rand = "0.7.3" diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml index 49987cb4a8e4aa..3433576287c00f 100644 --- a/op_crates/fetch/Cargo.toml +++ b/op_crates/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.13.0" +version = "0.14.0" edition = "2018" description = "provides fetch Web API to deno_core" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.70.0", path = "../../core" } +deno_core = { version = "0.71.0", path = "../../core" } reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } serde = { version = "1.0.116", features = ["derive"] } diff --git a/op_crates/web/Cargo.toml b/op_crates/web/Cargo.toml index 560396a0cb4db1..18a5e85ea7f039 100644 --- a/op_crates/web/Cargo.toml +++ b/op_crates/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.21.0" +version = "0.22.0" edition = "2018" description = "Collection of Web APIs" authors = ["the Deno authors"] @@ -14,7 +14,7 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.70.0", path = "../../core" } +deno_core = { version = "0.71.0", path = "../../core" } idna = "0.2.0" serde = { version = "1.0.116", features = ["derive"] } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index fa097064e5f784..02e605c32f54ab 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -18,20 +18,20 @@ name = "hello_runtime" path = "examples/hello_runtime.rs" [build-dependencies] -deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } -deno_core = { path = "../core", version = "0.70.0" } -deno_web = { path = "../op_crates/web", version = "0.21.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } +deno_core = { path = "../core", version = "0.71.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.5.0" } +deno_web = { path = "../op_crates/web", version = "0.22.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } [target.'cfg(windows)'.build-dependencies] winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.70.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.4.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.13.0" } -deno_web = { path = "../op_crates/web", version = "0.21.0" } +deno_core = { path = "../core", version = "0.71.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.5.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } +deno_web = { path = "../op_crates/web", version = "0.22.0" } atty = "0.2.14" dlopen = "0.1.8" From 2c778f89da29b5e90c0224a0120d60397852ed6d Mon Sep 17 00:00:00 2001 From: Valentin Anger Date: Sun, 13 Dec 2020 23:22:26 +0100 Subject: [PATCH 061/135] docs(tools): Move setup environment's community paragraph back (#8745) --- docs/getting_started/setup_your_environment.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/getting_started/setup_your_environment.md b/docs/getting_started/setup_your_environment.md index 51f29ab9a3a864..b987e4e63830b8 100644 --- a/docs/getting_started/setup_your_environment.md +++ b/docs/getting_started/setup_your_environment.md @@ -168,10 +168,6 @@ command = "deno" args = ["lsp"] ``` -If you don't see your favorite IDE on this list, maybe you can develop an -extension. Our [community Discord group](https://discord.gg/deno) can give you -some pointers on where to get started. - ##### Example for Vim/Neovim After installing the [`vim-lsp`](https://github.com/prabirshrestha/vim-lsp) LSP @@ -191,3 +187,7 @@ if executable("deno") augroup END endif ``` + +If you don't see your favorite IDE on this list, maybe you can develop an +extension. Our [community Discord group](https://discord.gg/deno) can give you +some pointers on where to get started. From b5b7c7ee0154a28e6e15ec139bc3e0819f9eb910 Mon Sep 17 00:00:00 2001 From: Vishal Pratap Singh Date: Mon, 14 Dec 2020 08:22:24 +0530 Subject: [PATCH 062/135] docs: fixes logo in README (#8740) --- README.md | 2 +- docs/images/deno3.png | Bin 0 -> 17540 bytes 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 docs/images/deno3.png diff --git a/README.md b/README.md index 035b59eaf53776..ae71067fdc883f 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status - Cirrus][]][Build status] [![Twitter handle][]][Twitter badge] - + Deno is a _simple_, _modern_ and _secure_ runtime for **JavaScript** and **TypeScript** that uses V8 and is built in Rust. diff --git a/docs/images/deno3.png b/docs/images/deno3.png new file mode 100644 index 0000000000000000000000000000000000000000..dcd89492033111844729410054f7c688ddaf7520 GIT binary patch literal 17540 zcmXtgby!pH|2HB%kQ$>?Vsyuc9^H)Y?hXk_WrVbJgS2!?DprD}ODJ#ioqo6zkKfSOpfGY={!vVk-Y)>VlS12eT z;-}X$l$<;=;Nr7a+KMtLbw8>0fgjKuVCpawl*adakw-omE{`nXm5tJS>vGlYI3vZ|dsQzxw6@ z0jRIn1Zz}HD}@5M{&wdlE@4@8sk)8l7${LprR}>^S>fwq z>x4W)Dp`4Q;$cuiAFp}ZGc%7g1TrgD5TUBW?cMwDVeMZ`&%cLLq~@EuR7Y1=@agA^ z#Mh@^Zu;w~PJ8}I)*Ia)d%Pk35OQr)-LvUp@i*rdN!ru?b9~msL*neJ{!K{!>3{F) zDcwFL&%fSG7_`rIV-{Kqe${x~x)e+8zpUljG|Y?~wRmj$jy)ys)FSc`493*0NiHg8 z?KS4+Tc6UD!W-PUnzJAs&EzJn@4l-t?C`d0eoIGYM8siwM*HuK*81;p8)?3%FA=qv zAGqtZyKCNe&TKHl#67$0mjK4M+x07TQl=PS(4OU(@EezsQU2ML`p2qYZ_b&)WyV(U z<}u)`Ywbbf^Z5vSQQ>b7dDLnQ_VQP|KZ_OL(^vx!Mi_L|s0bD3tYsJn-mayXHE9W| zVy%Uvf;$NzGEJE?4^kidAZ5W}{7H$!v>je!)XePTP?d})G*9=*{(3dY<80+L)PTyf z_nCFuiXX_V8VujxJu7QSdyP(yo#@qfmVEiW=kbc0d_a8b;bx8d9WFUT+oCf&x#@CB zCOi)FO_1f__mcOL#;d^>Klq~2hkJ=qS$Wij^f)uq+GWe>7&(&YcIa8ua~OlJ4w%BT zWQrgmIf|B*-%nP$MlZ&tEHEIsxJboG{wZ5l4kqQ~f=Dz~+l3~(^AuICWZ1Ga25-0w z=h0^>*k}1f3PF{hxx$(l?W@B&cnJ?SiE#y&*NRB z@#g?Z&xF<`cd>xG{g0!h=CA3&uJ|=LiABYw0}<=imH3Grlf3_Kze#O<#h2oEFM`DC z*odNpI2S?AdO2|>C(o;@>rzg;|JGghvMuPo($zi>K*W`BiO{4Z1^{!q9f3`}^+N0j zUV(0;j(JvL^Ik&#XTF$0==FlbB=3|hxdrP)a^ec5+VO5dkOqzavX^&Bf)Lv@vU=xv zW`#+c`}y5reWuG?UHXfSMRHCR=iGIMJud1_;pGSW@dD1Eo0I&%D2LB5sR%ImSTypW z@9=304(ARw=dI&7x8@sdwrL-KW9}#EHGR@|#$QPL(xD)VO5br<*Cq4e3-pR1j+EOy z^Z{S|x3u%PiG>9>CO!>E(B&R|&t(~R+4W~PYKw;hgY9U-ms?eO)upmSF)54ELg;1& z%9!wfp1b+}BO3XlcHKO#_(<$n0d#bbMx)REybnPwtg zN7gx#vFYdUE~hj{3nYRd-QdujvFsmjNRcVC7e7Bp_52CAfI<(RcI9>!ug%sw+E6HU zgG$D%iZ30tou_%omEJBnDE{nuF$(@TI zCa)lh^V$2I=-;E}Z464+q`z@%JMEi^VmbeYYS1EaRgK4UFX!FbR%8yRVON}Uwnp6+ zjJBz9%__`q#U?NJr?!QbTy?VIR(IAtbi^;l_$wtmw9q4;ANu?%gq_#6t)^fbF;!2a ztCaQ0<)G0{C#{dFGQeo(dfP}bX@eWcM136R&uS?C07Yc-UgAM zmrTi!&Loob_mX!?@xX=v{z8_|e+D94d~#}E2!hRUT>{m!uS$e;niv zC2Z4rZRR;vYdXc`B>P)$sD_*ViZuAQcdELOR{^%j@u#VNzeMqG8e+$3)f|zbxRgRQ ze;#hvfa*V8V8Hzaf4L4=n6=@EZ!h1A^BrMR5!db07Z-vjWw2L0MSlWAiKy>G60#J$ zE*9BN(;4{i@7`g9qO--OjOU&GVQm{EK;A!K@>8kLXaxm!o%-1?S^UVbkeUoKPxaI( zS{59o4Yaaz1q*g->G+7=!xjIvW6qG_*C6!gHRrM24TMsH=)Z*POh-R|O&fZSFAg^i zxtj5s8pV?br^{fgY~#5TWYEz}ebmH%UavNDf&Eg7#E?Bez70;0CZO}J1r-?p6pT1v zJ!;gLb?FZjdKPPw!LV27UF2|i>GS<(jT~ZVfhtwhwV5mRc=rCs+O82?^6_?a=aevw zgHNJ8Y9?tv1{sD)gVikU@%bz&8Ya8F$c+|BkhYGO&zw_!X>`_yNgLfA@;BC{{);{u zOgkAv{3B3AS_z!q(p9u4uy&4?;7i`9k5bU)I=wxt@97rV$+8*HH*sT%9UA5C;Rw7w zVg>f*OKYCaCJdDnhTI$!6WNObjwX&UJqD>iTdBVV#qah?%s;Rbgn6dWtPITu`TYEE zmrSY^IHrD_l900o*4_oz9%PC7I$vB&7RL7UT{eb|Qb;CZqkb$1bY`ocyXs?}8ct>T zmLDvlzLjTc0!hyBj+?z88&_6=Pm%Oqm~|U1MEMzNvlRqIz?G7Q?kz>e*PK zHa_O4cPChDs^f~4WQx<9W?~d71N17*S z@zB57Yd63KoM5X?N8?kKjl#v|)Swl=n$|%An}CEi^tPx0O>0=sWOx}UD~mn9 zEdP*s{n^fZHz><1Y#RU@^f2UEyWbIx)J(yOJqaykf-0NRNUXlS%FKwA8Du{Z;N%J> zLD4Q>11fdHTrAVvcdFxu;mE@8<MHXi}q~pwMPFY=*T_1cPGT znKvcp+n?PbHQ1xH2X^$>8WMexL2>=a!MLy1YN|KyS}u~tU{dL7=<-6ol{xGjci9zL z*b-{zQ0BtZ-7~Ye;!{HUQJYki88fV0G49>ToAB|sfl2DZE7P})oVCNBkaWuMQ#|XG zAqcu%rD3c}zT?5i6sO&`8cuMm&w(qA^sDvHgKEYp<)f)`a37aG=54ixKZ;U!$?dnu zxIv(GqQ&4)pU^3lUmBIAo8JjpeDm*NFj4FTihNr^6>>Sz%DOE&VSDVLV2{uq-otGO zzzyRSy4yBVL!mV^(saVXiJMY@Upg-^@}|9@9`lF|q(VC^#e&2pCNL?YU%VQZVCe=r z;W&}8d@*4#$tbezj$>*#8<;l=+FzDu1ZV5v_)+S~=P4BK_)X#mzIQ**_Yer^fNy}S zv!Sv1y3qbK!w~#(EKQxCZIwmx4bD6gP zx7hIq8V8sO+U-x(cGPS|Y??qRYZazk3rly4bf1Vmq^P0Gq=;Tp{f8>dY^D@k1AUAqZH zFS-gEA^1afd`e)iBdQQouUmd%-wGN ze4w_eB_VuI#o;)k9Y7(9&sEfv;{2^zJo74hnJ9Bcx_N$vxA|IO>Y#QzsLue7Bk)F&N8cJX_>c_-42hKa8xGg1Fl0K4$^I%p{pb*NQUhWOo8R zvLOHyV%^&TL!uR!wGUzhGyN#E@eymjIaxSE?_eemaV=R#An; zbZwGp@!*YkZybU+j|~;5kVA3SW6R2DSF8dqZM^|~xh|kG7#XDDj6Q*kmqClQ^+bP* zGYU~yA;W=M#*C10g^|7!L0Id|NByY-sVw6rt7jzK$iFWtof*~t)`~w0)MUn?1WMAO z!Sn7ndq2fa!99O|c&KUaqP{w~6CqAh30-Q(d@jafP-94+^820CebJx4;xB&bXVaLH zrb4Aa>i!mLChP~mClDjb2>j0ryHjT&D>B0jkuFQDiB%3lJHGpq^7oH7J){9qE_*Ce ze`0>*eR#&@qGM_d9pX2L$!7HO1p`>a=~2>y3%^1Q-(NP>+2~|7kcufW()u}bhdccc zP*+ESF+bWyk^+j;}0JXvf(Q;M5M5QL`5(P<^bQf$&PDfgZyqvl zS8uvmu2=jmmI{|e7joJuK~vUqX8w3C8HOBUr=OdlL{6l?tpdC`4VT-$byO7kZk&^S zi~)-0$}du1^d8MY6hqu{5UNX)_(2827c`#r3aEC^H1dJ*D5@ zw{}%;)aRm@D@pR^4b;b~A)uSX8p?E5C}g)kSyuj$rAa%*5Q z8L&VF-gu;l+V`3iLY6ww zdG~wfQ}KWI7f1lTw(RdW)})DWjiXq#KT8suKbWbG%iraOzn7CEatuA;&mdBS|K#}(n^r8*7quz?-~STe z`s!9*`NHT0T^OK&*MrklF>#Nk<%4cc#oWJY7I_!fttn9F^eA{~IjTOHkjcRsmjIw? z9oX5DZp{Q03i6B2-#f#jyqtDQD_n1X>ZFs9Bva>OEjekX^;FGRN=IFB(1u!=>Wp zQfqQ>;5MJLb2q%fqHU##&n8g2KVK+!e&(~A2cOALO(k?;lVQiI(^H06Zwlq>(-Ejh zsH4eiQZIFicURlRob5&W1calscOJfk%I}>Sr%=hDL#$!~r`BsKBegwVn~OBCJJH3; zSDfTA|8?_$@v^#4jh-59NrK9tt~$w&;uv;%jb!lo3Qb2!dZCf7 zY)}Pe{Fe;0K}WpFU}K7^6koq(h4hGTwHfGWgS%$rAnCHci@ozC!3hr(c_nJnWbxVC z-wT=_Bv&qdUg9%p!(9=L;HHl6T{Z~z>Tspdv}()4^CVWdUd43OEf{R}IT-Wp=G(|( zOi{NSVTXjc9Pru}uUpNlCAYS0Ki_J8?4LP?P+7d6*cRXxmb%`>XNU+b23UsTRf-@X zm`g4_WaH*FP!OQD{h;yJ?lF+v!{7&b(1 zkt-C?244rLYe5NLC0)-}GZ;2aJi`q3b4MhS$P8Z0C$WsK>&vN^#0+`I1?>(m%SsPA zZpmyvsH&EunQ(Zv{Y8_VU~jg%QZ)sZoNHB$J5U-F#3E+|O=fMXbTDrS#HxKl|wwoT#sp&UIoN2=j&DL$1tg8U{O9bz>T9^U7vzpFwItFVrA!6OxHFU)o$Gb-$(^XaE2Oy6JB(=|8~Mt=dn;XSd()LV zTODj%*4oSVAS3##PL~xKR;Lf9fUJt{Jb>0T36@lZ}x$?$|szO~HLk5?d zMlAcWEPB`V=S4K(b*@@pgd0NN(pB#pYUn`sw22Q?fq_J;*>Yd$jB!TKbQp{!VgBgn zSpB`Wmc(uA1YV@_L4{J6pV1FpQO}MI279fha^OZ6trjD7yGm!MEus4gg4;&QKA5C0 zSj-9I<+5+~@|V6+?jv3C`bNDZF(>5f`g}HBC6mE*nM?oHchaVvzCIz#8W6|ot>S2<1=DAf5|ATs-AZ!?Uk3C$jLtk#kGSm zjj++kl1u9%p0lf!Meh5Q-xXR~olBh1hB-@8I%~h?_WY0ViUXr$m*j(u!8wXPiLWwC zzE8^Ve!Ik>fSyrLLMlVioesL7+d4z}?|6`u3Va$Y0SUv0u{wKE@|JO#bw-4*{dYZY zgMU`*5yB28$#hbeWVcq22CYmEd-Gft@IABmIx&fox;m?;Ny(?GNaYpr#P|+W*LSl` zqP2a4!zD?J%-6O`XB!;7|NcjiCi~()Ms2u~72&O2|a4{Z3=0T~GA=67NE3!L`= zc9%*&@xUU@z!Q|E0(eC0+gI+4%kcJ8-ZWhHawXYz0+m>1b*-2Pfub5@-m9P3isw!A zlh)5ItjUaG%dCIB0B_n*ZyW6rhItcYT`%N$PQ*I5k8Gp|UT z$Mfu)y$RV;>=SPNy@H?t*R+vJcA)8@pmX!*eOmKv9c9jM-U34dL zjNuh4)DS8zTu+Y=!O8e8`S<80#tSO4oGvXyF6Qe~=-dvOu)FCVQDaxmrOg6dRMKe3 zxt$(ZMbhv>Ns;pEe@6A`Z{lq?`XhhfB_MkhBq4l~Z*=|Sv2k`g3qHRis_xRvfZzxU z;lA6|7!L{pBp6Z|HDd&NjF^H6Y z_}DW^W8DTlY#g~`cx&X4OqB-RF;a*tOETmx;xEfBC3n{m-$ci&-DS-ae)KRJetR{? zgjGN(um(nqfN3!7=j$kHw1caEPThg*CMgM~G-Hd5N#z48bT7k}rf50~+kal}eX*OW zlI=^A0Qk$VHN&ZzlH@~5=aQ!E*aN5<5?8v7!GFBHR($8u^@gKtSC%q!Ypuvb^H+aL z_fS*BV7vE^q_fLw>rxGt8qeA*yys+_lBJFvr#C?38wYrOo@<{`&Nkhvf~48P>+L7} zVA$%KEwUNUKO%%pXe;5FAn4FoTW%y)gN`n>CeN)orG_8rS!Gdk_#j@0771GYCW(i0 zOF$%d8_QnjdD2RPhqSyj?{i}g%4Al}W4ii8D@1iTuvuSBs9uw zIp>F)8N5|Ep!E0z*$RiKq!oG!Yk7fb5yd&w#EiaL918F&pu@nskt2w|w9?Z9eJU*s zjl?xDHo}8oWvCcK05XiUk(YP_V`hs35qOo5KKd0oWa|^ALQRaDu-I35J>MW=_J)UR z^fU%j;&<$Hkh(15ZA;GKAS&Kku zS0*Jyc!BQHx*2~5RXp2BV5VE7ah~G^t@`qe4+hgfP?;vVzg%iN_8X`k2y?b8uhUHC zW+4YZZxm-WRn)#zU+SvL|8_7^&mTbG9$WqRA}i03l|y057)+ax;TbS(#;9@74ybU} zeF*f8v3zk23=Se)EFCM8Qm=}c7bivZSNDq8vB{CvOYx#Of+q}YQzdi7?TIA~W)?sz zlz=Ar4=B7W3h`uw8jT;9mw&BsIALCMHnLPKE}? zNU9M&Z?k2EXZR)yye_2N=B}4yrk^(7OG$F5Q3^WiW(^h@;<-dpw=)fk+^&azHPmHg zQ@8yOT;LhLvGbj$vIK1qYT-;)$h!mR&iFkFM$pn*-JJ23!rxXC@-I|3feH{K{(-vJ z)?*K`>TM|e@BOb|g%ynCD!L=Hc??q%%+qhlUzFh|5ZQntLrK|Y4sn>ET9r0P&MLE# z_X__bfz2|Y82B_)?m0OUL4LUsd|0MW*pGn=E6>^UYvLwAO7#BuYYrwLvA1_H6EtS5 zX!gpt(mfPFG~9a2r?Zf`b5<^Sx){fG0I74?uh8wG_#pignRzP$Z>gVn}$WQ}&cJlm85^_k=`pvnISE%vsqanw^&Aa#TfE;>cwd7LH!EW%I^+8tUbTrp0Cd zMW>IE5WUfxXF9Z`@x`4f%x8A~-qYpU1_aBtaSFB8sVjEdWO~DQIqEX#9Ov17LF0=3;h&txdtEZ>67?;e?BU2 z-0sW)dG}fKzv&8ozxLqtf7&)M5)OW8CE9Ag_q9cQ63md;Shd_%G0QF4w54Q3SCRt0eH{c+FV#@w_F8z*(>JTx4HPa95N z!xIW=08B)PUj9n(eN%;f#UJ{m41ZUD+{2(joUL=L&B7YZi=FU^ z{3fgN7{+!TwDz|E%E}cl{UfOQ`BGzztE!f*8KzBmW8rPY&>R{AE7%2pQILpAh+I{r zgQrLHlzfTeh>+Dp2ttW^H;tl4?DOn1;E(9vF*}GdK{;sOq@1WW;6^~RY1s|xa4i7v z6|yM7t;9M)_kHo?KeJAs_q7ePd} z{do`OIyn9=Lh95yTx#epK}k$d8KkgAj#l)1h;lUglqdiJn_JQVXPGwa&vjc5^+I4%siCL3+;N`@rC#d?eah#8 zQ4x$P7{1UQL`vprL7iwDHDG}$F1@oP2r57vjcl`O&$Tgvg`!fXWs0(DOYaKJ> zRSiwGGy^jV8))(~#XRpsVtT0bu~sq@q$z08K?xYbziLdi zyOWhID({kIZe;>SlVxYeT+O{q+0TjYL4v);HXtVMgpl9Lte!7dC=I~#GbRHYGfeiL zCBv#1_=gc*1x_8}*5W>W=Rk0qQEw7+qB|{ERkX2$7<&nAJ&eeI@61&yHTkh=a5p&~{F!VecUX zmlRPtwPOhw7XPqnb6)}GtFq7%XYu5bgOw)UyH>D`Wizl*it-!0xO}@q+SiER$p*zM zZmELqlGmYv(c%aY@hh+LYwbC3(53h^C_fejU%u`2j?Nj5{Y@aq0;Sx2A!nKSr=DpB z&u*_ppYZtWb|Vk7KJb2fpHC<~GP02r{(8|!e%l((m*t9kR z9B6j6jfGi$?=?{~o}Z{qmrIyt2)fKh?6@v-UA-4SSGvgikaAEqizcrqdhncjW=ySQ zVt#u8Fe1rQ{Q4TfJgt?1*&p6T+y+7C?Eq*VIc49AC&VDE*X$(IMQHHFkOK*KNkC^h z>2E=YZQw^&-M(r(%t|iN;$%>b#S_j4PCbKZ)B#OhH7(IRZL7v1iaC(EbjffwG=zdx zgaW$JEZsji zUukg$ZA6l0oTahZ->mp^-?%ds0@^zBN6rlZ9A)H)7wde~=ogO0_DX=B74sy#G8^!l zzu>VR!19{WGq7tas)DXl?39sI;2F+o4W=;FhEv+{Rr1pv`8pk)zb)EN#o{tVLbXWW zR>>yloG`L64=j;H0iu`|C4W5ZYQfxlILn0LCsCUIK(@d^K7CZ9rIx{-P zO6yP*qEa&aviN$w9bxp1`Gmv`R}C$a$Td}z;-nWB^Khc%-D>~@xQH6Y^-fal0S2Pb zdHrOs+NV+>pLes<@bv|;fq6g~6sKKcCehgpr8M45pi&|FYHjJoA=Zd^s^Ssg?yAHs zx1Z&eA-pn0vmeHi*E`dec>V9;hLW_8-Z7q~EL;?ZG=`BtG_rW;KPlW@9RgSl8Kb{K zW%`xZW+)o)fWg3F#tm?v+hEi7Q;34x%YWGDg95&487v92F&vJu$;E*~ND6vUtw_}; zC!xq*VfX;hQZTA*e6<8K{-}9}M+xaJcaF@O0SIiOPCm4CEbZL;FU!wL>=k#>(V8HX zTug(yaKw!tH3*%eghsA(nFpDR#bsZ>&c-yh0{+0u#-oao*?S0_u(c*zycKrALUok`kexmz$Daka-ys6 zr@^~o{ld1Md#Lh96bpyi@66 zj1xPN+ThT;nrEbvN858e0c|y_~7nAc<8{w^CL{lEHJX&ZAUn z!O|z_75Q1Z%`|9D0~i);>>~NijDc~$1R(jvpPZD!2Ah)Vm1X5K?d{6@3(X(* zD?gB?tut6qa>NgI%jS1F}zU5?w}3u-kUn7fymc*?ilY7Hwn`G zb*MV{m2nF`Q|hu|TIE&lP2TS|ilrGnpZ&=nd)e>1xi(p~w|(`3N(7!4dbe*iRQ)|W zCEssRIW4Pi*IlH}AA7Or@nhMPre&*!oV1CA(9E9*v4-Jxcx=5Ia3mVLkr^XU=+LMF zJdPqNtjq{$$;DqcsE6m9%HHEzakm-B$?AQ!!Y}m|&&@#aX_BGY6M|24CE1nt<78|L zy4vH=l{j%2SMp73e+MwED^mNW-EQE&F?{V(Dp*m1{CPMfSOW>XIdKa-5M9`qMPnk0 zjkxs;7GjY>!D`n=oXWr}x=@g0+h)#Yfma74kOx9aD}#U*L{Ln%5eSjuBV`;C@@2pB z&=7hkH_JxSoLq}*nAF`Bb5TUh&*K%OM)!I&SM9#ICd)K)qD1kX30{?v8rbR6`gfcf zZeKT+4y%;GZDt$klBy)VPmrn)Ot=j^G1jtFgEOtKP;+(1&h7u~;hNh%sTf-9WbT99 zjMK1=b|dXu$N99!S8kfd#Nd)k-WDvbI7j1fwfF2G^g;7>&!Tp~S^2qzg)TaM2t#G= z^e?LOc%Ekfh|*Mvq2VpTg^X3*=s-@ zfDI&)JNlW{@vzwdi6P<*UbLPFS)l16Dkag1{iM{<6I#P{p`U#g4)l2N~>@mw7%e7V&X0n~A;03<3 zyNwWIStyOil%{Od(Zo9$|n+XQ);Mkk-A> z>+$+Fx07H0=Hc+i+PBJTxuq|N__A9+Rcpnx?L~>iUKpe+$7)r{aqBp)VWnAn<1c@EJG4=gi zj~+`0)Z78Pmz|4fOnp-}G4NdTj$$h&2}f#Nuhx zOoW+3+vtE5T;ACjF)wMEwaOpGirOMg=$B{VJ_@h? zj|R!=i&=zUqmF47aOd4!TUHFrc-`VL>VMTNd~?^MAWBX={#7K~wL1ZO0UPcdvOM1! zwsHvWME*s+c>(h>6E|Q-XxFZs5)%6AG+XWcDk?v;H=o94=5tIthFv{*tv4uwgT~q_ z;@%EQFL)M+j{iliPUxB98FYw0a5gyBtu9Prv)>hIu23&x{$}kBEA=8|g{GrIO4d2D zA++$9guaB9MM*}Lv&Br*dZ$Ry->P3Z4os`4>xkFHwwvGP9#m+R)X>P<)mRbQ+n5Zx zik2tlXeCoVZ`hMRpcv^dxvon(w@o$Cs&Y>4&5w>e?VOEVGXbrPd6JkJQ2gz;H^rMn zi!3Z(-!UY6rhrYM1M8UGVjS~SFz$>B;MbC*aYqT!aJC8(^4J)>R^4D`p$K1nB@b9X@8{qq+NoD3MUp0a`-P{ z`k&bFs**~-*rxCmQdfsYXp8X3B?6EGQTZ;^ep3JEZOa^Df~_*6qR6GaFNs9)x_%2L zoYVK2uuPzUSWuwbkhIS5EA$zuzz7B{PHV>7nveTW%l(@?{j!$M*_^jlKaVpD%vuDtv>ci6H~& zsONBBW#RG~ABuYE)3Dl>EX_xfnrAr|k+(pX<(>PagccGB-Wd~uO+b+~a6z>DMh#qk z(;BZ5wg74uSYxAhmfxw%l%-39QzEWXONRd8?>6ng(?yC4!Dgt(JPyujbG>BWv6ap| z1YO2^h}S)f%r^t$;&*)^3|FlXqt|o& zekn4G&DfZkP@4vr^MqRjsB2GS_YMKy^q?T#y;V^XwkIdF1$j=e)ZCI6$46VmWcqS* zD52SD9y}@UfU^5pI z5VGEHeYEydJJofaBhzLsVC))VCB~=cd-DtMkA9wuP7>uRj_k(*anT?d z%FAE>dwj^h;Vwd#<1~a?iv66q+Dj1-0ZNtIfy0TCb^hD988_Fm-|z5%-sBbyR`jCG z@tMHljnygvTwgvz%oq?9Inizl~fHu4J&+T>O9LcT%cMzhOx@aE!0W%$`m z2Z4pa2u~zAwNf#gT_3~CblG{+YJl7cfx$Al+baNj?ft2Z03=8Z5{u* zar|8udz~yHdgL6Qk@=oo#z!|(O>#$k#HG}~KjVXda2mnsP!Lf5!*F$9=s}~{GvE6^ z9Wo5ZY=ONJA$dGv?xx5TyZS_9LgEJBfJs*i&nKc1Xw7j`@DbzqgH zdGQwO0lWDQqK#uXCztXy89%}!wNpyp)D$5)isZR$VL{szfolL zzx~cOj%Qp#qIVJblS#f(nZ^s_;e}ZkTo;c8|I-c%{sX-L)BI+DhKcYv{c2w#Haj;< z%_w7)6?Z0Vi6>iEX>CZe2fij4?q{_(`+Ztc&4ugS zIBOyg)0Izxi__eIr*f*vznRoPal`kcIa80@(H##Yne#pXO&BLw-=K9}L2^4+P6bD%eY1ca-M#%uc*=+IgWvFxh30M+dMW5i4 zeY)@zAyIF1*gOC}bT1F*#&@h%&8X#_i5uDE1%}Sj^8HthmI2nHW8X!_0%x+*@P%Z^ zzVPbnmp&oRWkx=;SvdaYlJ5a6lPpd9?Na*rJ zaJkR0&&J7h5|Gli%q9~uhY0*2HrpjFGxtzFiWua0@w07hudFD>Zkby-= ziwJ-n1ljeZ8~{$$54&gw)P7E)pgrI}bH$1j7~6anhleBz@fZ8C`p zyeh!Y+Z?IqWOdLTdOpm;;6@;}oJS)OI4|yMPjSWUmJICmLdmj;OIyj{$zk1Tdl+!& zN(Mswku^LCq!TW}>g}@*KXUAr*jltiTzARdsnt_)DlIwQkrkh3y)}x)3DGaiK`)#A zUhsPilH^vKJF)!-puk8UOAJ>SSg^sH;sIZXyW|QmOVAJ1YOcn`8KJ%Z%2t{#(tF)8?Z;2b2)Q+75AM z1Ts*@#-yGpDyug;T`=vxjt*}&t$out96^R?9E7$@-zZ=;__o!o#^*VThT6b8$k-gO z;j(q0ugglp0ue*k9oYYV*Vd5Ja3vi9Gj!&j9g|jmHQ~tM?7f__@ zM&5s~kZKcoDT=&d%jW;352^Xji$}d>PQ+l^~m4P?J}UL1Ox5MkN?SP;48KFk{* z*||*iqkx_$tA3KBAK3tV;x<9CtYt*mVH=WcgAjIi92}#vz9TaRJqb-#&NqNw@D+wlUvP<8oLM0Hgi0)KJn+z2W%_8^LDhkv3nGz zm-Qp50e#%Dw%wiWN1~0~&&!(>a&2-11Mcst!66N11j0Av6AMmbB>=F@#UC!G^jMkU zSPmWM3dV!F>ra-{EU7o$Q_sxaOI#a@z?JoWD8NXA1R1Na+^Ag|mqIB%fF$g>dhX9X z2UO!HsC8oLm?K{{;8NJ)u7=#0EdWb7_@i24nQCTK{bof;;>eL;77Y-}YN`=@A|mQf z_n&Oum*`)M=M~^VL7_+kbhj< zX6dNQ({ zFcmwc!JpTjZExe032sGHHofVqx(Y+k9Rory#EhTuqA1|-=p7~8FYYx>Vk z&p$H^)QrHSnd|nyCtY?e=i_f4b$({^E7JJ^quv`N|g_C6D^@5jUsSFKkN zGc6DY6Hp^I>B&={VsRv#=2KD!sj~e-G!Ye*g%bdX$-(pGF zZ3(625SKkqCd_SMWgQ7V4#JM?)_cReuMTGT=uK1YK9^RY(XKjX_o5@7@@ZDeqRGL@ zvV@)iTYB(EAYnZWu#3`?K(X+?{k`E0_)9gmAEcGK0qSuS2!E+CN4W5`xnZhsB)u@D zGii zef)c>iU}l$1GTg%l`v;t*-Gw((GwI(i5TlB20%2xz_AcWwNWG zf87FxnKC1oyP@;_)ve*Ugnug`T&855wlA#kjq;(aP6_MCip&>?R#EVG!rtS#dW-gd zvS3$U$Wy5nmXg-3h46Q1JQ&V zFDwx|iL5KRDX+hmpNBNTs!MI!e>s`20KBk2&~R6AWvFi6?g;m=K1G!sfCzN>dc!4( zOn(bJ<3}c=Y+!W#9vV4)hwDO918m$p=$I+oRkaB6O8*;>&wZ&Wx?c*HLvW;C%CRys zg(ek5qBod&#<}-ATzej24C>Fo*O17Jt@@{hiV1~wu^}fu%$a1x@CnD9BZAkbfv%DK1C7l?(RG*ZlC^3fVga`W7LQl zC02(o@ul%ca2lF!gq^f!9z7NKTedVIxef1!r|ACD+M^ISYiLN}Z2#kjScd-#;{hE0 z!@MMBKL^2T5m+IjXV0FZTeogvi!HVgDNzcE14==0gly`*HBO`?ED9=gKltE-u(Ljv zOc3Z5FTeb?4VK4t+ifShcI_&O}@tOG%qF z2FD$ubZfj(3Q7uSR?7jR6qmF?I1rJUr8VN8QczMR5Ep{D9GSU9H2@;Qj$dYAFDb2& z^e6=-BP#U`^`c2!yr-XQ07PS^XbP~-D@7z5NVd3 zQwmA}sl;ED$f(Rw(M6_FG>He+2LaVGm|8$eL8&4tmQ0;!5*I8PB|Lcv_%@1vXJA%W z{=c+5s)$lhYJ^H$hNFY_b^FR!B{+4Julmp0qEY*PZRJouTixPkjiqSR2LB(p18<-T SF2!R20000 Date: Mon, 14 Dec 2020 13:55:07 +0100 Subject: [PATCH 063/135] fix(cli): show canary string in long version (#8675) --- cli/flags.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/cli/flags.rs b/cli/flags.rs index bafd8fe91d1b71..d3fd18c32a7a06 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -226,7 +226,11 @@ lazy_static! { static ref LONG_VERSION: String = format!( "{} ({}, {})\nv8 {}\ntypescript {}", crate::version::deno(), - env!("PROFILE"), + if crate::version::is_canary() { + "canary" + } else { + env!("PROFILE") + }, env!("TARGET"), deno_core::v8_version(), crate::version::TYPESCRIPT From 502c77aad969fe8557aaf6d56432f94450c26a2a Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 14 Dec 2020 08:28:56 -0500 Subject: [PATCH 064/135] build: Factor out common code into core/build_util.rs (#8756) --- cli/build.rs | 47 ++------------------------------ core/build_util.rs | 44 ++++++++++++++++++++++++++++++ core/lib.rs | 1 + runtime/build.rs | 68 ++++++++-------------------------------------- 4 files changed, 60 insertions(+), 100 deletions(-) create mode 100644 core/build_util.rs diff --git a/cli/build.rs b/cli/build.rs index 4be71bb9e182d0..35fadfa6422fb0 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -1,5 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use deno_core::build_util::create_snapshot; +use deno_core::build_util::get_js_files; use deno_core::error::custom_error; use deno_core::json_op_sync; use deno_core::serde_json; @@ -13,35 +15,6 @@ use std::env; use std::path::Path; use std::path::PathBuf; -// TODO(bartlomieju): this module contains a lot of duplicated -// logic with `runtime/build.rs`, factor out to `deno_core`. -fn create_snapshot( - mut js_runtime: JsRuntime, - snapshot_path: &Path, - files: Vec, -) { - // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the - // workspace root. - let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); - for file in files { - println!("cargo:rerun-if-changed={}", file.display()); - let display_path = file.strip_prefix(display_root).unwrap(); - let display_path_str = display_path.display().to_string(); - js_runtime - .execute( - &("deno:".to_string() + &display_path_str.replace('\\', "/")), - &std::fs::read_to_string(&file).unwrap(), - ) - .unwrap(); - } - - let snapshot = js_runtime.snapshot(); - let snapshot_slice: &[u8] = &*snapshot; - println!("Snapshot size: {}", snapshot_slice.len()); - std::fs::write(&snapshot_path, snapshot_slice).unwrap(); - println!("Snapshot written to: {} ", snapshot_path.display()); -} - #[derive(Debug, Deserialize)] struct LoadArgs { /// The fully qualified specifier that should be loaded. @@ -258,7 +231,7 @@ fn main() { // Main snapshot let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin"); - let js_files = get_js_files("tsc"); + let js_files = get_js_files(&c.join("tsc")); create_compiler_snapshot(&compiler_snapshot_path, js_files, &c); #[cfg(target_os = "windows")] @@ -272,17 +245,3 @@ fn main() { res.compile().unwrap(); } } - -fn get_js_files(d: &str) -> Vec { - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let mut js_files = std::fs::read_dir(d) - .unwrap() - .map(|dir_entry| { - let file = dir_entry.unwrap(); - manifest_dir.join(file.path()) - }) - .filter(|path| path.extension().unwrap_or_default() == "js") - .collect::>(); - js_files.sort(); - js_files -} diff --git a/core/build_util.rs b/core/build_util.rs new file mode 100644 index 00000000000000..04a0f870bccfc2 --- /dev/null +++ b/core/build_util.rs @@ -0,0 +1,44 @@ +//! Helper module used in cli/build.rs and runtime/build.rs +use crate::JsRuntime; +use std::path::Path; +use std::path::PathBuf; + +pub fn create_snapshot( + mut js_runtime: JsRuntime, + snapshot_path: &Path, + files: Vec, +) { + // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the + // workspace root. + let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); + for file in files { + println!("cargo:rerun-if-changed={}", file.display()); + let display_path = file.strip_prefix(display_root).unwrap(); + let display_path_str = display_path.display().to_string(); + js_runtime + .execute( + &("deno:".to_string() + &display_path_str.replace('\\', "/")), + &std::fs::read_to_string(&file).unwrap(), + ) + .unwrap(); + } + + let snapshot = js_runtime.snapshot(); + let snapshot_slice: &[u8] = &*snapshot; + println!("Snapshot size: {}", snapshot_slice.len()); + std::fs::write(&snapshot_path, snapshot_slice).unwrap(); + println!("Snapshot written to: {} ", snapshot_path.display()); +} + +pub fn get_js_files(d: &Path) -> Vec { + let mut js_files = std::fs::read_dir(d) + .unwrap() + .map(|dir_entry| { + let file = dir_entry.unwrap(); + d.join(file.path()) + }) + .filter(|path| path.extension().unwrap_or_default() == "js") + .collect::>(); + js_files.sort(); + js_files +} diff --git a/core/lib.rs b/core/lib.rs index 5846ad99d47703..0f7c23c26a45e2 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -8,6 +8,7 @@ extern crate log; mod async_cancel; mod async_cell; mod bindings; +pub mod build_util; pub mod error; mod flags; mod gotham_state; diff --git a/runtime/build.rs b/runtime/build.rs index 7c74c9793bcd84..e8f036eff778c1 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -1,51 +1,12 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use deno_core::build_util::create_snapshot; +use deno_core::build_util::get_js_files; use deno_core::JsRuntime; use deno_core::RuntimeOptions; use std::env; -use std::path::Path; use std::path::PathBuf; -// TODO(bartlomieju): this module contains a lot of duplicated -// logic with `cli/build.rs`, factor out to `deno_core`. -fn create_snapshot( - mut js_runtime: JsRuntime, - snapshot_path: &Path, - files: Vec, -) { - deno_web::init(&mut js_runtime); - deno_fetch::init(&mut js_runtime); - deno_crypto::init(&mut js_runtime); - // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the - // workspace root. - let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); - for file in files { - println!("cargo:rerun-if-changed={}", file.display()); - let display_path = file.strip_prefix(display_root).unwrap(); - let display_path_str = display_path.display().to_string(); - js_runtime - .execute( - &("deno:".to_string() + &display_path_str.replace('\\', "/")), - &std::fs::read_to_string(&file).unwrap(), - ) - .unwrap(); - } - - let snapshot = js_runtime.snapshot(); - let snapshot_slice: &[u8] = &*snapshot; - println!("Snapshot size: {}", snapshot_slice.len()); - std::fs::write(&snapshot_path, snapshot_slice).unwrap(); - println!("Snapshot written to: {} ", snapshot_path.display()); -} - -fn create_runtime_snapshot(snapshot_path: &Path, files: Vec) { - let js_runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); - create_snapshot(js_runtime, snapshot_path, files); -} - fn main() { // Don't build V8 if "cargo doc" is being run. This is to support docs.rs. if env::var_os("RUSTDOCFLAGS").is_some() { @@ -62,20 +23,15 @@ fn main() { // Main snapshot let runtime_snapshot_path = o.join("CLI_SNAPSHOT.bin"); - let js_files = get_js_files("rt"); - create_runtime_snapshot(&runtime_snapshot_path, js_files); -} + let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); + let js_files = get_js_files(&c.join("rt")); -fn get_js_files(d: &str) -> Vec { - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let mut js_files = std::fs::read_dir(d) - .unwrap() - .map(|dir_entry| { - let file = dir_entry.unwrap(); - manifest_dir.join(file.path()) - }) - .filter(|path| path.extension().unwrap_or_default() == "js") - .collect::>(); - js_files.sort(); - js_files + let mut js_runtime = JsRuntime::new(RuntimeOptions { + will_snapshot: true, + ..Default::default() + }); + deno_web::init(&mut js_runtime); + deno_fetch::init(&mut js_runtime); + deno_crypto::init(&mut js_runtime); + create_snapshot(js_runtime, &runtime_snapshot_path, js_files); } From a38b6b8acceabf0e4d877af19c0967d7c7a77cd7 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 14 Dec 2020 09:15:16 -0500 Subject: [PATCH 065/135] fix: docs.rs detection (#8755) --- cli/build.rs | 4 ++-- runtime/build.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cli/build.rs b/cli/build.rs index 35fadfa6422fb0..84c74683d9be1d 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -200,8 +200,8 @@ fn git_commit_hash() -> String { } fn main() { - // Don't build V8 if "cargo doc" is being run. This is to support docs.rs. - if env::var_os("RUSTDOCFLAGS").is_some() { + // Skip building from docs.rs. + if env::var_os("DOCS_RS").is_some() { return; } diff --git a/runtime/build.rs b/runtime/build.rs index e8f036eff778c1..3b8be413673773 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -8,8 +8,8 @@ use std::env; use std::path::PathBuf; fn main() { - // Don't build V8 if "cargo doc" is being run. This is to support docs.rs. - if env::var_os("RUSTDOCFLAGS").is_some() { + // Skip building from docs.rs. + if env::var_os("DOCS_RS").is_some() { return; } From 3476d5434fa713f455e4c5d0bff66581065cc7f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 14 Dec 2020 15:47:20 +0100 Subject: [PATCH 066/135] chore: release deno_runtime 0.2.0 (#8758) --- Cargo.lock | 2 +- cli/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e9578e4ba362a7..13cb78c6e66ff4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -580,7 +580,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.1.0" +version = "0.2.0" dependencies = [ "atty", "deno_core", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 944025e13f07ec..9f0dee41341157 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -34,7 +34,7 @@ winapi = "0.3.9" deno_core = { path = "../core", version = "0.71.0" } deno_doc = "0.1.18" deno_lint = "0.2.13" -deno_runtime = { path = "../runtime", version = "0.1.0" } +deno_runtime = { path = "../runtime", version = "0.2.0" } atty = "0.2.14" base64 = "0.12.3" diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 02e605c32f54ab..262dda036b1a52 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.1.0" +version = "0.2.0" license = "MIT" authors = ["the Deno authors"] edition = "2018" From 8f8749095c0dc5e71104228de650f671674f6fdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 14 Dec 2020 17:11:17 +0100 Subject: [PATCH 067/135] chore: release crates (#8760) --- Cargo.lock | 10 +++++----- cli/Cargo.toml | 10 +++++----- core/Cargo.toml | 2 +- op_crates/crypto/Cargo.toml | 4 ++-- op_crates/fetch/Cargo.toml | 4 ++-- op_crates/web/Cargo.toml | 4 ++-- runtime/Cargo.toml | 18 +++++++++--------- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 13cb78c6e66ff4..b11ae0c5678c23 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -510,7 +510,7 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.71.0" +version = "0.72.0" dependencies = [ "anyhow", "futures", @@ -529,7 +529,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.5.0" +version = "0.6.0" dependencies = [ "deno_core", "rand 0.7.3", @@ -553,7 +553,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.14.0" +version = "0.15.0" dependencies = [ "deno_core", "reqwest", @@ -580,7 +580,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.2.0" +version = "0.2.1" dependencies = [ "atty", "deno_core", @@ -622,7 +622,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.22.0" +version = "0.23.0" dependencies = [ "deno_core", "futures", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 9f0dee41341157..8c24dc7182e167 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,9 +20,9 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.71.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } -deno_web = { path = "../op_crates/web", version = "0.22.0" } +deno_core = { path = "../core", version = "0.72.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } +deno_web = { path = "../op_crates/web", version = "0.23.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -31,10 +31,10 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.71.0" } +deno_core = { path = "../core", version = "0.72.0" } deno_doc = "0.1.18" deno_lint = "0.2.13" -deno_runtime = { path = "../runtime", version = "0.2.0" } +deno_runtime = { path = "../runtime", version = "0.2.1" } atty = "0.2.14" base64 = "0.12.3" diff --git a/core/Cargo.toml b/core/Cargo.toml index 73eb1830513d31..78d56703e1a801 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. [package] name = "deno_core" -version = "0.71.0" +version = "0.72.0" edition = "2018" description = "A secure JavaScript/TypeScript runtime built with V8, Rust, and Tokio" authors = ["the Deno authors"] diff --git a/op_crates/crypto/Cargo.toml b/op_crates/crypto/Cargo.toml index 0bcb4e528fb3b7..7631271e9e0d35 100644 --- a/op_crates/crypto/Cargo.toml +++ b/op_crates/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.5.0" +version = "0.6.0" edition = "2018" description = "Collection of WebCrypto APIs" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.71.0", path = "../../core" } +deno_core = { version = "0.72.0", path = "../../core" } rand = "0.7.3" diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml index 3433576287c00f..2a094f49c38bb6 100644 --- a/op_crates/fetch/Cargo.toml +++ b/op_crates/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.14.0" +version = "0.15.0" edition = "2018" description = "provides fetch Web API to deno_core" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.71.0", path = "../../core" } +deno_core = { version = "0.72.0", path = "../../core" } reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } serde = { version = "1.0.116", features = ["derive"] } diff --git a/op_crates/web/Cargo.toml b/op_crates/web/Cargo.toml index 18a5e85ea7f039..60bcbeba3fcc48 100644 --- a/op_crates/web/Cargo.toml +++ b/op_crates/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.22.0" +version = "0.23.0" edition = "2018" description = "Collection of Web APIs" authors = ["the Deno authors"] @@ -14,7 +14,7 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.71.0", path = "../../core" } +deno_core = { version = "0.72.0", path = "../../core" } idna = "0.2.0" serde = { version = "1.0.116", features = ["derive"] } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 262dda036b1a52..47b02bf25c6470 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.2.0" +version = "0.2.1" license = "MIT" authors = ["the Deno authors"] edition = "2018" @@ -18,20 +18,20 @@ name = "hello_runtime" path = "examples/hello_runtime.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.71.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.5.0" } -deno_web = { path = "../op_crates/web", version = "0.22.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } +deno_core = { path = "../core", version = "0.72.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.6.0" } +deno_web = { path = "../op_crates/web", version = "0.23.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } [target.'cfg(windows)'.build-dependencies] winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.71.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.5.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.14.0" } -deno_web = { path = "../op_crates/web", version = "0.22.0" } +deno_core = { path = "../core", version = "0.72.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.6.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } +deno_web = { path = "../op_crates/web", version = "0.23.0" } atty = "0.2.14" dlopen = "0.1.8" From ce6b738ac08b66b0ee8bfa3f17f8510ab094d5d9 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Tue, 15 Dec 2020 00:37:08 +0800 Subject: [PATCH 068/135] fix(repl): recover from invalid input (#8759) --- cli/tests/integration_tests.rs | 25 +++++++++++++++++++++++++ runtime/inspector.rs | 23 ++++++++++++++++++++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index cb6dfad6d93df2..e4de423ee06b36 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -1656,6 +1656,31 @@ fn repl_test_pty_unpaired_braces() { } } +#[cfg(unix)] +#[test] +fn repl_test_pty_bad_input() { + use std::io::Read; + use util::pty::fork::*; + let deno_exe = util::deno_exe_path(); + let fork = Fork::from_ptmx().unwrap(); + if let Ok(mut master) = fork.is_parent() { + master.write_all(b"'\\u{1f3b5}'[0]\n").unwrap(); + master.write_all(b"close();\n").unwrap(); + + let mut output = String::new(); + master.read_to_string(&mut output).unwrap(); + + assert!(output.contains("Unterminated string literal")); + + fork.wait().unwrap(); + } else { + std::env::set_var("NO_COLOR", "1"); + let err = exec::Command::new(deno_exe).arg("repl").exec(); + println!("err {}", err); + unreachable!() + } +} + #[test] #[ignore] fn run_watch_with_importmap_and_relative_paths() { diff --git a/runtime/inspector.rs b/runtime/inspector.rs index 89fd5bf576ddf9..fc0e793d9d4375 100644 --- a/runtime/inspector.rs +++ b/runtime/inspector.rs @@ -859,7 +859,28 @@ impl v8::inspector::ChannelImpl for InspectorSession { message: v8::UniquePtr, ) { let raw_message = message.unwrap().string().to_string(); - let message = serde_json::from_str(&raw_message).unwrap(); + let message: serde_json::Value = match serde_json::from_str(&raw_message) { + Ok(v) => v, + Err(error) => match error.classify() { + serde_json::error::Category::Syntax => json!({ + "id": call_id, + "result": { + "result": { + "type": "error", + "description": "Unterminated string literal", + "value": "Unterminated string literal", + }, + "exceptionDetails": { + "exceptionId": 0, + "text": "Unterminated string literal", + "lineNumber": 0, + "columnNumber": 0 + }, + }, + }), + _ => panic!("Could not parse inspector message"), + }, + }; self .response_tx_map From a19fea918f12249d2c248eb013b3b8baf2eacb84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 14 Dec 2020 18:42:40 +0100 Subject: [PATCH 069/135] Revert "build: Factor out common code into core/build_util.rs (#8756)" (#8763) This reverts commit 502c77aad969fe8557aaf6d56432f94450c26a2a. --- cli/build.rs | 47 ++++++++++++++++++++++++++++++-- core/build_util.rs | 44 ------------------------------ core/lib.rs | 1 - runtime/build.rs | 68 ++++++++++++++++++++++++++++++++++++++-------- 4 files changed, 100 insertions(+), 60 deletions(-) delete mode 100644 core/build_util.rs diff --git a/cli/build.rs b/cli/build.rs index 84c74683d9be1d..45221281ff37bc 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -1,7 +1,5 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use deno_core::build_util::create_snapshot; -use deno_core::build_util::get_js_files; use deno_core::error::custom_error; use deno_core::json_op_sync; use deno_core::serde_json; @@ -15,6 +13,35 @@ use std::env; use std::path::Path; use std::path::PathBuf; +// TODO(bartlomieju): this module contains a lot of duplicated +// logic with `runtime/build.rs`, factor out to `deno_core`. +fn create_snapshot( + mut js_runtime: JsRuntime, + snapshot_path: &Path, + files: Vec, +) { + // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the + // workspace root. + let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); + for file in files { + println!("cargo:rerun-if-changed={}", file.display()); + let display_path = file.strip_prefix(display_root).unwrap(); + let display_path_str = display_path.display().to_string(); + js_runtime + .execute( + &("deno:".to_string() + &display_path_str.replace('\\', "/")), + &std::fs::read_to_string(&file).unwrap(), + ) + .unwrap(); + } + + let snapshot = js_runtime.snapshot(); + let snapshot_slice: &[u8] = &*snapshot; + println!("Snapshot size: {}", snapshot_slice.len()); + std::fs::write(&snapshot_path, snapshot_slice).unwrap(); + println!("Snapshot written to: {} ", snapshot_path.display()); +} + #[derive(Debug, Deserialize)] struct LoadArgs { /// The fully qualified specifier that should be loaded. @@ -231,7 +258,7 @@ fn main() { // Main snapshot let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin"); - let js_files = get_js_files(&c.join("tsc")); + let js_files = get_js_files("tsc"); create_compiler_snapshot(&compiler_snapshot_path, js_files, &c); #[cfg(target_os = "windows")] @@ -245,3 +272,17 @@ fn main() { res.compile().unwrap(); } } + +fn get_js_files(d: &str) -> Vec { + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let mut js_files = std::fs::read_dir(d) + .unwrap() + .map(|dir_entry| { + let file = dir_entry.unwrap(); + manifest_dir.join(file.path()) + }) + .filter(|path| path.extension().unwrap_or_default() == "js") + .collect::>(); + js_files.sort(); + js_files +} diff --git a/core/build_util.rs b/core/build_util.rs deleted file mode 100644 index 04a0f870bccfc2..00000000000000 --- a/core/build_util.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Helper module used in cli/build.rs and runtime/build.rs -use crate::JsRuntime; -use std::path::Path; -use std::path::PathBuf; - -pub fn create_snapshot( - mut js_runtime: JsRuntime, - snapshot_path: &Path, - files: Vec, -) { - // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the - // workspace root. - let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); - for file in files { - println!("cargo:rerun-if-changed={}", file.display()); - let display_path = file.strip_prefix(display_root).unwrap(); - let display_path_str = display_path.display().to_string(); - js_runtime - .execute( - &("deno:".to_string() + &display_path_str.replace('\\', "/")), - &std::fs::read_to_string(&file).unwrap(), - ) - .unwrap(); - } - - let snapshot = js_runtime.snapshot(); - let snapshot_slice: &[u8] = &*snapshot; - println!("Snapshot size: {}", snapshot_slice.len()); - std::fs::write(&snapshot_path, snapshot_slice).unwrap(); - println!("Snapshot written to: {} ", snapshot_path.display()); -} - -pub fn get_js_files(d: &Path) -> Vec { - let mut js_files = std::fs::read_dir(d) - .unwrap() - .map(|dir_entry| { - let file = dir_entry.unwrap(); - d.join(file.path()) - }) - .filter(|path| path.extension().unwrap_or_default() == "js") - .collect::>(); - js_files.sort(); - js_files -} diff --git a/core/lib.rs b/core/lib.rs index 0f7c23c26a45e2..5846ad99d47703 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -8,7 +8,6 @@ extern crate log; mod async_cancel; mod async_cell; mod bindings; -pub mod build_util; pub mod error; mod flags; mod gotham_state; diff --git a/runtime/build.rs b/runtime/build.rs index 3b8be413673773..78f17f61fcf550 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -1,12 +1,51 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use deno_core::build_util::create_snapshot; -use deno_core::build_util::get_js_files; use deno_core::JsRuntime; use deno_core::RuntimeOptions; use std::env; +use std::path::Path; use std::path::PathBuf; +// TODO(bartlomieju): this module contains a lot of duplicated +// logic with `cli/build.rs`, factor out to `deno_core`. +fn create_snapshot( + mut js_runtime: JsRuntime, + snapshot_path: &Path, + files: Vec, +) { + deno_web::init(&mut js_runtime); + deno_fetch::init(&mut js_runtime); + deno_crypto::init(&mut js_runtime); + // TODO(nayeemrmn): https://github.com/rust-lang/cargo/issues/3946 to get the + // workspace root. + let display_root = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap(); + for file in files { + println!("cargo:rerun-if-changed={}", file.display()); + let display_path = file.strip_prefix(display_root).unwrap(); + let display_path_str = display_path.display().to_string(); + js_runtime + .execute( + &("deno:".to_string() + &display_path_str.replace('\\', "/")), + &std::fs::read_to_string(&file).unwrap(), + ) + .unwrap(); + } + + let snapshot = js_runtime.snapshot(); + let snapshot_slice: &[u8] = &*snapshot; + println!("Snapshot size: {}", snapshot_slice.len()); + std::fs::write(&snapshot_path, snapshot_slice).unwrap(); + println!("Snapshot written to: {} ", snapshot_path.display()); +} + +fn create_runtime_snapshot(snapshot_path: &Path, files: Vec) { + let js_runtime = JsRuntime::new(RuntimeOptions { + will_snapshot: true, + ..Default::default() + }); + create_snapshot(js_runtime, snapshot_path, files); +} + fn main() { // Skip building from docs.rs. if env::var_os("DOCS_RS").is_some() { @@ -23,15 +62,20 @@ fn main() { // Main snapshot let runtime_snapshot_path = o.join("CLI_SNAPSHOT.bin"); - let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); - let js_files = get_js_files(&c.join("rt")); + let js_files = get_js_files("rt"); + create_runtime_snapshot(&runtime_snapshot_path, js_files); +} - let mut js_runtime = JsRuntime::new(RuntimeOptions { - will_snapshot: true, - ..Default::default() - }); - deno_web::init(&mut js_runtime); - deno_fetch::init(&mut js_runtime); - deno_crypto::init(&mut js_runtime); - create_snapshot(js_runtime, &runtime_snapshot_path, js_files); +fn get_js_files(d: &str) -> Vec { + let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); + let mut js_files = std::fs::read_dir(d) + .unwrap() + .map(|dir_entry| { + let file = dir_entry.unwrap(); + manifest_dir.join(file.path()) + }) + .filter(|path| path.extension().unwrap_or_default() == "js") + .collect::>(); + js_files.sort(); + js_files } From 025c0a1d372845b753fe682edd9fd050e1de1884 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 14 Dec 2020 19:47:28 +0100 Subject: [PATCH 070/135] chore: release crates (#8765) --- Cargo.lock | 10 +++++----- cli/Cargo.toml | 10 +++++----- core/Cargo.toml | 2 +- op_crates/crypto/Cargo.toml | 4 ++-- op_crates/fetch/Cargo.toml | 4 ++-- op_crates/web/Cargo.toml | 4 ++-- runtime/Cargo.toml | 18 +++++++++--------- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b11ae0c5678c23..c817b835169497 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -510,7 +510,7 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.72.0" +version = "0.73.0" dependencies = [ "anyhow", "futures", @@ -529,7 +529,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.6.0" +version = "0.7.0" dependencies = [ "deno_core", "rand 0.7.3", @@ -553,7 +553,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.15.0" +version = "0.16.0" dependencies = [ "deno_core", "reqwest", @@ -580,7 +580,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.2.1" +version = "0.3.0" dependencies = [ "atty", "deno_core", @@ -622,7 +622,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.23.0" +version = "0.24.0" dependencies = [ "deno_core", "futures", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 8c24dc7182e167..ba17a1e88db577 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,9 +20,9 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.72.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } -deno_web = { path = "../op_crates/web", version = "0.23.0" } +deno_core = { path = "../core", version = "0.73.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } +deno_web = { path = "../op_crates/web", version = "0.24.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -31,10 +31,10 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.72.0" } +deno_core = { path = "../core", version = "0.73.0" } deno_doc = "0.1.18" deno_lint = "0.2.13" -deno_runtime = { path = "../runtime", version = "0.2.1" } +deno_runtime = { path = "../runtime", version = "0.3.0" } atty = "0.2.14" base64 = "0.12.3" diff --git a/core/Cargo.toml b/core/Cargo.toml index 78d56703e1a801..f9ba05192da526 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. [package] name = "deno_core" -version = "0.72.0" +version = "0.73.0" edition = "2018" description = "A secure JavaScript/TypeScript runtime built with V8, Rust, and Tokio" authors = ["the Deno authors"] diff --git a/op_crates/crypto/Cargo.toml b/op_crates/crypto/Cargo.toml index 7631271e9e0d35..d94913ca92b74b 100644 --- a/op_crates/crypto/Cargo.toml +++ b/op_crates/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.6.0" +version = "0.7.0" edition = "2018" description = "Collection of WebCrypto APIs" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.72.0", path = "../../core" } +deno_core = { version = "0.73.0", path = "../../core" } rand = "0.7.3" diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml index 2a094f49c38bb6..778513ed5acaeb 100644 --- a/op_crates/fetch/Cargo.toml +++ b/op_crates/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.15.0" +version = "0.16.0" edition = "2018" description = "provides fetch Web API to deno_core" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.72.0", path = "../../core" } +deno_core = { version = "0.73.0", path = "../../core" } reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } serde = { version = "1.0.116", features = ["derive"] } diff --git a/op_crates/web/Cargo.toml b/op_crates/web/Cargo.toml index 60bcbeba3fcc48..b8f21cbc989193 100644 --- a/op_crates/web/Cargo.toml +++ b/op_crates/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.23.0" +version = "0.24.0" edition = "2018" description = "Collection of Web APIs" authors = ["the Deno authors"] @@ -14,7 +14,7 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.72.0", path = "../../core" } +deno_core = { version = "0.73.0", path = "../../core" } idna = "0.2.0" serde = { version = "1.0.116", features = ["derive"] } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 47b02bf25c6470..d5a84a4b998280 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.2.1" +version = "0.3.0" license = "MIT" authors = ["the Deno authors"] edition = "2018" @@ -18,20 +18,20 @@ name = "hello_runtime" path = "examples/hello_runtime.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.72.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.6.0" } -deno_web = { path = "../op_crates/web", version = "0.23.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } +deno_core = { path = "../core", version = "0.73.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.7.0" } +deno_web = { path = "../op_crates/web", version = "0.24.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } [target.'cfg(windows)'.build-dependencies] winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.72.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.6.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.15.0" } -deno_web = { path = "../op_crates/web", version = "0.23.0" } +deno_core = { path = "../core", version = "0.73.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.7.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } +deno_web = { path = "../op_crates/web", version = "0.24.0" } atty = "0.2.14" dlopen = "0.1.8" From b6d5ae1ecd125ce8a14cddc877cb66dbcfc9bb04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Mon, 14 Dec 2020 21:25:05 +0100 Subject: [PATCH 071/135] v1.6.1 --- Cargo.lock | 2 +- Releases.md | 17 +++++++++++++++++ cli/Cargo.toml | 2 +- std/version.ts | 2 +- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c817b835169497..58c278e56dac3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -450,7 +450,7 @@ dependencies = [ [[package]] name = "deno" -version = "1.6.0" +version = "1.6.1" dependencies = [ "atty", "base64 0.12.3", diff --git a/Releases.md b/Releases.md index da6720e2f9ac4a..57e97625c40f3e 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,23 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.6.1 / 2020.12.14 + +- feat(lsp): support import maps (#8683) +- fix: show canary string in long version (#8675) +- fix: zsh completions (#8718) +- fix(compile): error when the output path already exists (#8681) +- fix(lsp): only resolve sources with supported schemas (#8696) +- fix(op_crates/fetch): support non-ascii response headers value (#8600) +- fix(repl): recover from invalid input (#8759) +- refactor: deno_runtime crate (#8640) +- upgrade: swc_ecmascript to 0.15.0 (#8688) + +Changes in std version 0.80.0: + +- fix(std/datetime): partsToDate (#8553) +- fix(std/wasi): disallow multiple starts (#8712) + ### 1.6.0 / 2020.12.08 - BREAKING: Make "isolatedModules" setting non-configurable (#8482) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index ba17a1e88db577..b6979c638d9f23 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.6.0" +version = "1.6.1" license = "MIT" authors = ["the Deno authors"] edition = "2018" diff --git a/std/version.ts b/std/version.ts index f9e88e58a04755..05603ffc5b0cb6 100644 --- a/std/version.ts +++ b/std/version.ts @@ -5,4 +5,4 @@ * the cli's API is stable. In the future when std becomes stable, likely we * will match versions with cli as we have in the past. */ -export const VERSION = "0.80.0"; +export const VERSION = "0.81.0"; From 63a821b78b5a293c0eb5b3ecc18d67bde8331eda Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Tue, 15 Dec 2020 16:52:55 +1100 Subject: [PATCH 072/135] fix(cli): make dynamic import errors catchable (#8750) Fixes #6259 --- cli/disk_cache.rs | 24 +- cli/module_graph.rs | 422 ++++++++++++------ cli/module_loader.rs | 18 +- cli/program_state.rs | 136 +++--- cli/specifier_handler.rs | 86 ++-- cli/tests/dynamic_import/b.js | 2 + cli/tests/dynamic_import/c.js | 2 + cli/tests/error_004_missing_module.ts | 2 + .../error_005_missing_dynamic_import.ts.out | 3 +- .../error_015_dynamic_import_permissions.out | 3 +- cli/tests/fix_dynamic_import_errors.js | 7 + cli/tests/fix_dynamic_import_errors.js.out | 2 + cli/tests/integration_tests.rs | 5 + cli/tests/redirect_cache.out | 1 - .../unsupported_dynamic_import_scheme.out | 2 +- core/modules.rs | 2 +- 16 files changed, 447 insertions(+), 270 deletions(-) create mode 100644 cli/tests/dynamic_import/b.js create mode 100644 cli/tests/dynamic_import/c.js create mode 100644 cli/tests/fix_dynamic_import_errors.js create mode 100644 cli/tests/fix_dynamic_import_errors.js.out diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs index 96a4ff41aad2c0..233990903bc731 100644 --- a/cli/disk_cache.rs +++ b/cli/disk_cache.rs @@ -46,7 +46,7 @@ impl DiskCache { }) } - pub fn get_cache_filename(&self, url: &Url) -> PathBuf { + fn get_cache_filename(&self, url: &Url) -> Option { let mut out = PathBuf::new(); let scheme = url.scheme(); @@ -105,31 +105,25 @@ impl DiskCache { out = out.join(remaining_components); } - scheme => { - unimplemented!( - "Don't know how to create cache name for scheme: {}\n Url: {}", - scheme, - url - ); - } + _ => return None, }; - out + Some(out) } pub fn get_cache_filename_with_extension( &self, url: &Url, extension: &str, - ) -> PathBuf { - let base = self.get_cache_filename(url); + ) -> Option { + let base = self.get_cache_filename(url)?; match base.extension() { - None => base.with_extension(extension), + None => Some(base.with_extension(extension)), Some(ext) => { let original_extension = OsStr::to_str(ext).unwrap(); let final_extension = format!("{}.{}", original_extension, extension); - base.with_extension(final_extension) + Some(base.with_extension(final_extension)) } } } @@ -234,7 +228,7 @@ mod tests { for test_case in &test_cases { let cache_filename = cache.get_cache_filename(&Url::parse(test_case.0).unwrap()); - assert_eq!(cache_filename, PathBuf::from(test_case.1)); + assert_eq!(cache_filename, Some(PathBuf::from(test_case.1))); } } @@ -280,7 +274,7 @@ mod tests { &Url::parse(test_case.0).unwrap(), test_case.1 ), - PathBuf::from(test_case.2) + Some(PathBuf::from(test_case.2)) ) } } diff --git a/cli/module_graph.rs b/cli/module_graph.rs index 4144ee5ee9df75..f5e08882ed5209 100644 --- a/cli/module_graph.rs +++ b/cli/module_graph.rs @@ -28,6 +28,8 @@ use crate::tsc_config::TsConfig; use crate::version; use crate::AnyError; +use deno_core::error::anyhow; +use deno_core::error::custom_error; use deno_core::error::Context; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::stream::StreamExt; @@ -38,6 +40,7 @@ use deno_core::serde::Serializer; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::ModuleResolutionError; +use deno_core::ModuleSource; use deno_core::ModuleSpecifier; use regex::Regex; use std::cell::RefCell; @@ -482,6 +485,10 @@ impl Module { Ok(specifier) } + pub fn set_emit(&mut self, code: String, maybe_map: Option) { + self.maybe_emit = Some(Emit::Cli((code, maybe_map))); + } + /// Calculate the hashed version of the module and update the `maybe_version`. pub fn set_version(&mut self, config: &[u8]) { self.maybe_version = @@ -523,6 +530,9 @@ pub struct ResultInfo { /// A structure which provides diagnostic information (usually from `tsc`) /// about the code in the module graph. pub diagnostics: Diagnostics, + /// A map of specifiers to the result of their resolution in the module graph. + pub loadable_modules: + HashMap>, /// Optionally ignored compiler options that represent any options that were /// ignored if there was a user provided configuration. pub maybe_ignored_options: Option, @@ -637,6 +647,18 @@ pub struct TranspileOptions { pub reload: bool, } +#[derive(Debug, Clone)] +enum ModuleSlot { + /// The module fetch resulted in a non-recoverable error. + Err(Rc), + /// The the fetch resulted in a module. + Module(Box), + /// Used to denote a module that isn't part of the graph. + None, + /// The fetch of the module is pending. + Pending, +} + /// A dependency graph of modules, were the modules that have been inserted via /// the builder will be loaded into the graph. Also provides an interface to /// be able to manipulate and handle the graph. @@ -649,7 +671,7 @@ pub struct Graph { /// invoked. maybe_tsbuildinfo: Option, /// The modules that are part of the graph. - modules: HashMap, + modules: HashMap, /// A map of redirects, where a module specifier is redirected to another /// module specifier by the handler. All modules references should be /// resolved internally via this, before attempting to access the module via @@ -667,6 +689,44 @@ pub struct Graph { maybe_lockfile: Option>>, } +/// Convert a specifier and a module slot in a result to the module source which +/// is needed by Deno core for loading the module. +fn to_module_result( + (specifier, module_slot): (&ModuleSpecifier, &ModuleSlot), +) -> (ModuleSpecifier, Result) { + match module_slot { + ModuleSlot::Err(err) => (specifier.clone(), Err(anyhow!(err.to_string()))), + ModuleSlot::Module(module) => ( + specifier.clone(), + if let Some(emit) = &module.maybe_emit { + match emit { + Emit::Cli((code, _)) => Ok(ModuleSource { + code: code.clone(), + module_url_found: module.specifier.to_string(), + module_url_specified: specifier.to_string(), + }), + } + } else { + match module.media_type { + MediaType::JavaScript | MediaType::Unknown => Ok(ModuleSource { + code: module.source.clone(), + module_url_found: module.specifier.to_string(), + module_url_specified: specifier.to_string(), + }), + _ => Err(custom_error( + "NotFound", + format!("Compiled module not found \"{}\"", specifier), + )), + } + }, + ), + _ => ( + specifier.clone(), + Err(anyhow!("Module \"{}\" unavailable.", specifier)), + ), + } +} + impl Graph { /// Create a new instance of a graph, ready to have modules loaded it. /// @@ -767,6 +827,7 @@ impl Graph { debug!("graph does not need to be checked or emitted."); return Ok(ResultInfo { maybe_ignored_options, + loadable_modules: self.get_loadable_modules(), ..Default::default() }); } @@ -834,9 +895,10 @@ impl Graph { } let config = config.as_bytes(); for (specifier, code) in codes.iter() { - if let Some(module) = graph.get_module_mut(specifier) { - module.maybe_emit = - Some(Emit::Cli((code.clone(), maps.get(specifier).cloned()))); + if let ModuleSlot::Module(module) = + graph.get_module_mut(specifier).unwrap() + { + module.set_emit(code.clone(), maps.get(specifier).cloned()); module.set_version(&config); module.is_dirty = true; } else { @@ -849,16 +911,12 @@ impl Graph { Ok(ResultInfo { diagnostics: response.diagnostics, + loadable_modules: graph.get_loadable_modules(), maybe_ignored_options, stats: response.stats, }) } - fn contains_module(&self, specifier: &ModuleSpecifier) -> bool { - let s = self.resolve_specifier(specifier); - self.modules.contains_key(s) - } - /// Emit the module graph in a specific format. This is specifically designed /// to be an "all-in-one" API for access by the runtime, allowing both /// emitting single modules as well as bundles, using Deno module resolution @@ -921,13 +979,13 @@ impl Graph { )?; let mut emitted_files = HashMap::new(); + let graph = graph.borrow(); match options.bundle_type { BundleType::Esm => { assert!( response.emitted_files.is_empty(), "No files should have been emitted from tsc." ); - let graph = graph.borrow(); assert_eq!( graph.roots.len(), 1, @@ -966,6 +1024,7 @@ impl Graph { emitted_files, ResultInfo { diagnostics: response.diagnostics, + loadable_modules: graph.get_loadable_modules(), maybe_ignored_options, stats: response.stats, }, @@ -1023,15 +1082,17 @@ impl Graph { /// any build info if present. fn flush(&mut self) -> Result<(), AnyError> { let mut handler = self.handler.borrow_mut(); - for (_, module) in self.modules.iter_mut() { - if module.is_dirty { - if let Some(emit) = &module.maybe_emit { - handler.set_cache(&module.specifier, emit)?; - } - if let Some(version) = &module.maybe_version { - handler.set_version(&module.specifier, version.clone())?; + for (_, module_slot) in self.modules.iter_mut() { + if let ModuleSlot::Module(module) = module_slot { + if module.is_dirty { + if let Some(emit) = &module.maybe_emit { + handler.set_cache(&module.specifier, emit)?; + } + if let Some(version) = &module.maybe_version { + handler.set_version(&module.specifier, version.clone())?; + } + module.is_dirty = false; } - module.is_dirty = false; } } for root_specifier in self.roots.iter() { @@ -1050,7 +1111,12 @@ impl Graph { totals: &mut HashMap, ) -> ModuleInfo { let not_seen = seen.insert(specifier.clone()); - let module = self.get_module(specifier).unwrap(); + let module = if let ModuleSlot::Module(module) = self.get_module(specifier) + { + module + } else { + unreachable!(); + }; let mut deps = Vec::new(); let mut total_size = None; @@ -1097,50 +1163,79 @@ impl Graph { let map = self .modules .iter() - .map(|(specifier, module)| { - let mut deps = BTreeSet::new(); - for (_, dep) in module.dependencies.iter() { - if let Some(code_dep) = &dep.maybe_code { - deps.insert(code_dep.clone()); + .filter_map(|(specifier, module_slot)| { + if let ModuleSlot::Module(module) = module_slot { + let mut deps = BTreeSet::new(); + for (_, dep) in module.dependencies.iter() { + if let Some(code_dep) = &dep.maybe_code { + deps.insert(code_dep.clone()); + } + if let Some(type_dep) = &dep.maybe_type { + deps.insert(type_dep.clone()); + } } - if let Some(type_dep) = &dep.maybe_type { - deps.insert(type_dep.clone()); + if let Some((_, types_dep)) = &module.maybe_types { + deps.insert(types_dep.clone()); } + let item = ModuleInfoMapItem { + deps: deps.into_iter().collect(), + size: module.size(), + }; + Some((specifier.clone(), item)) + } else { + None } - if let Some((_, types_dep)) = &module.maybe_types { - deps.insert(types_dep.clone()); - } - let item = ModuleInfoMapItem { - deps: deps.into_iter().collect(), - size: module.size(), - }; - (specifier.clone(), item) }) .collect(); ModuleInfoMap::new(map) } + /// Retrieve a map that contains a representation of each module in the graph + /// which can be used to provide code to a module loader without holding all + /// the state to be able to operate on the graph. + pub fn get_loadable_modules( + &self, + ) -> HashMap> { + let mut loadable_modules: HashMap< + ModuleSpecifier, + Result, + > = self.modules.iter().map(to_module_result).collect(); + for (specifier, _) in self.redirects.iter() { + if let Some(module_slot) = + self.modules.get(self.resolve_specifier(specifier)) + { + let (_, result) = to_module_result((specifier, module_slot)); + loadable_modules.insert(specifier.clone(), result); + } + } + loadable_modules + } + pub fn get_media_type( &self, specifier: &ModuleSpecifier, ) -> Option { - if let Some(module) = self.get_module(specifier) { + if let ModuleSlot::Module(module) = self.get_module(specifier) { Some(module.media_type) } else { None } } - fn get_module(&self, specifier: &ModuleSpecifier) -> Option<&Module> { + fn get_module(&self, specifier: &ModuleSpecifier) -> &ModuleSlot { let s = self.resolve_specifier(specifier); - self.modules.get(s) + if let Some(module_slot) = self.modules.get(s) { + module_slot + } else { + &ModuleSlot::None + } } fn get_module_mut( &mut self, specifier: &ModuleSpecifier, - ) -> Option<&mut Module> { + ) -> Option<&mut ModuleSlot> { // this is duplicated code because `.resolve_specifier` requires an // immutable borrow, but if `.resolve_specifier` is mut, then everything // that calls it is is mut @@ -1174,12 +1269,14 @@ impl Graph { // files will not get emitted. To counter act that behavior, we will // include all modules that are emittable. let mut specifiers = HashSet::<&ModuleSpecifier>::new(); - for (_, module) in self.modules.iter() { - if module.media_type == MediaType::JSX - || module.media_type == MediaType::TypeScript - || module.media_type == MediaType::TSX - { - specifiers.insert(&module.specifier); + for (_, module_slot) in self.modules.iter() { + if let ModuleSlot::Module(module) = module_slot { + if module.media_type == MediaType::JSX + || module.media_type == MediaType::TypeScript + || module.media_type == MediaType::TSX + { + specifiers.insert(&module.specifier); + } } } // We should include all the original roots as well. @@ -1196,7 +1293,12 @@ impl Graph { // if the root module has a types specifier, we should be sending that // to tsc instead of the original specifier let specifier = self.resolve_specifier(ms); - let module = self.get_module(specifier).unwrap(); + let module = + if let ModuleSlot::Module(module) = self.get_module(specifier) { + module + } else { + panic!("missing module"); + }; let specifier = if let Some((_, types_specifier)) = &module.maybe_types { self.resolve_specifier(types_specifier) @@ -1216,7 +1318,7 @@ impl Graph { /// Get the source for a given module specifier. If the module is not part /// of the graph, the result will be `None`. pub fn get_source(&self, specifier: &ModuleSpecifier) -> Option { - if let Some(module) = self.get_module(specifier) { + if let ModuleSlot::Module(module) = self.get_module(specifier) { Some(module.source.clone()) } else { None @@ -1232,7 +1334,11 @@ impl Graph { } let module = self.roots[0].clone(); - let m = self.get_module(&module).unwrap(); + let m = if let ModuleSlot::Module(module) = self.get_module(&module) { + module + } else { + return Err(GraphError::MissingSpecifier(module.clone()).into()); + }; let mut seen = HashSet::new(); let mut totals = HashMap::new(); @@ -1247,9 +1353,19 @@ impl Graph { (None, None) }; + let dep_count = self + .modules + .iter() + .filter_map(|(_, m)| match m { + ModuleSlot::Module(_) => Some(1), + _ => None, + }) + .count() + - 1; + Ok(ModuleGraphInfo { compiled, - dep_count: self.modules.len() - 1, + dep_count, file_type: m.media_type, files, info, @@ -1267,15 +1383,19 @@ impl Graph { let check_js = config.get_check_js(); let config = config.as_bytes(); self.modules.iter().all(|(_, m)| { - let needs_emit = match m.media_type { - MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true, - MediaType::JavaScript => check_js, - _ => false, - }; - if needs_emit { - m.is_emit_valid(&config) + if let ModuleSlot::Module(m) = m { + let needs_emit = match m.media_type { + MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true, + MediaType::JavaScript => check_js, + _ => false, + }; + if needs_emit { + m.is_emit_valid(&config) + } else { + true + } } else { - true + false } }) } @@ -1286,15 +1406,17 @@ impl Graph { pub fn lock(&self) { if let Some(lf) = self.maybe_lockfile.as_ref() { let mut lockfile = lf.lock().unwrap(); - for (ms, module) in self.modules.iter() { - let specifier = module.specifier.to_string(); - let valid = lockfile.check_or_insert(&specifier, &module.source); - if !valid { - eprintln!( - "{}", - GraphError::InvalidSource(ms.clone(), lockfile.filename.clone()) - ); - std::process::exit(10); + for (ms, module_slot) in self.modules.iter() { + if let ModuleSlot::Module(module) = module_slot { + let specifier = module.specifier.to_string(); + let valid = lockfile.check_or_insert(&specifier, &module.source); + if !valid { + eprintln!( + "{}", + GraphError::InvalidSource(ms.clone(), lockfile.filename.clone()) + ); + std::process::exit(10); + } } } } @@ -1305,9 +1427,12 @@ impl Graph { /// checked to determine if it is valid. fn needs_emit(&self, config: &TsConfig) -> bool { let check_js = config.get_check_js(); - self.modules.iter().any(|(_, m)| match m.media_type { - MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true, - MediaType::JavaScript => check_js, + self.modules.iter().any(|(_, m)| match m { + ModuleSlot::Module(m) => match m.media_type { + MediaType::TypeScript | MediaType::TSX | MediaType::JSX => true, + MediaType::JavaScript => check_js, + _ => false, + }, _ => false, }) } @@ -1332,10 +1457,11 @@ impl Graph { referrer: &ModuleSpecifier, prefer_types: bool, ) -> Result { - if !self.contains_module(referrer) { - return Err(GraphError::MissingSpecifier(referrer.to_owned()).into()); - } - let module = self.get_module(referrer).unwrap(); + let module = if let ModuleSlot::Module(module) = self.get_module(referrer) { + module + } else { + return Err(GraphError::MissingSpecifier(referrer.clone()).into()); + }; if !module.dependencies.contains_key(specifier) { return Err( GraphError::MissingDependency( @@ -1363,7 +1489,11 @@ impl Graph { .into(), ); }; - if !self.contains_module(&resolved_specifier) { + let dep_module = if let ModuleSlot::Module(dep_module) = + self.get_module(&resolved_specifier) + { + dep_module + } else { return Err( GraphError::MissingDependency( referrer.to_owned(), @@ -1371,8 +1501,7 @@ impl Graph { ) .into(), ); - } - let dep_module = self.get_module(&resolved_specifier).unwrap(); + }; // In the case that there is a X-TypeScript-Types or a triple-slash types, // then the `maybe_types` specifier will be populated and we should use that // instead. @@ -1424,7 +1553,7 @@ impl Graph { pub fn transpile( &mut self, options: TranspileOptions, - ) -> Result<(Stats, Option), AnyError> { + ) -> Result { let start = Instant::now(); let mut ts_config = TsConfig::new(json!({ @@ -1443,37 +1572,39 @@ impl Graph { let mut emit_count: u128 = 0; let config = ts_config.as_bytes(); - for (_, module) in self.modules.iter_mut() { - // TODO(kitsonk) a lot of this logic should be refactored into `Module` as - // we start to support other methods on the graph. Especially managing - // the dirty state is something the module itself should "own". - - // if the module is a Dts file we should skip it - if module.media_type == MediaType::Dts { - continue; - } - // if we don't have check_js enabled, we won't touch non TypeScript or JSX - // modules - if !(emit_options.check_js - || module.media_type == MediaType::JSX - || module.media_type == MediaType::TSX - || module.media_type == MediaType::TypeScript) - { - continue; - } - // skip modules that already have a valid emit - if !options.reload && module.is_emit_valid(&config) { - continue; - } - if module.maybe_parsed_module.is_none() { - module.parse()?; + for (_, module_slot) in self.modules.iter_mut() { + if let ModuleSlot::Module(module) = module_slot { + // TODO(kitsonk) a lot of this logic should be refactored into `Module` as + // we start to support other methods on the graph. Especially managing + // the dirty state is something the module itself should "own". + + // if the module is a Dts file we should skip it + if module.media_type == MediaType::Dts { + continue; + } + // if we don't have check_js enabled, we won't touch non TypeScript or JSX + // modules + if !(emit_options.check_js + || module.media_type == MediaType::JSX + || module.media_type == MediaType::TSX + || module.media_type == MediaType::TypeScript) + { + continue; + } + // skip modules that already have a valid emit + if !options.reload && module.is_emit_valid(&config) { + continue; + } + if module.maybe_parsed_module.is_none() { + module.parse()?; + } + let parsed_module = module.maybe_parsed_module.clone().unwrap(); + let emit = parsed_module.transpile(&emit_options)?; + emit_count += 1; + module.maybe_emit = Some(Emit::Cli(emit)); + module.set_version(&config); + module.is_dirty = true; } - let parsed_module = module.maybe_parsed_module.clone().unwrap(); - let emit = parsed_module.transpile(&emit_options)?; - emit_count += 1; - module.maybe_emit = Some(Emit::Cli(emit)); - module.set_version(&config); - module.is_dirty = true; } self.flush()?; @@ -1483,7 +1614,12 @@ impl Graph { ("Total time".to_string(), start.elapsed().as_millis()), ]); - Ok((stats, maybe_ignored_options)) + Ok(ResultInfo { + diagnostics: Default::default(), + loadable_modules: self.get_loadable_modules(), + maybe_ignored_options, + stats, + }) } } @@ -1510,7 +1646,6 @@ impl swc_bundler::Resolve for Graph { /// A structure for building a dependency graph of modules. pub struct GraphBuilder { - fetched: HashSet, graph: Graph, maybe_import_map: Option>>, pending: FuturesUnordered, @@ -1529,7 +1664,6 @@ impl GraphBuilder { }; GraphBuilder { graph: Graph::new(handler, maybe_lockfile), - fetched: HashSet::new(), maybe_import_map: internal_import_map, pending: FuturesUnordered::new(), } @@ -1543,12 +1677,22 @@ impl GraphBuilder { specifier: &ModuleSpecifier, is_dynamic: bool, ) -> Result<(), AnyError> { - self.fetch(specifier, &None, is_dynamic)?; + self.fetch(specifier, &None, is_dynamic); loop { - let cached_module = self.pending.next().await.unwrap()?; - let is_root = &cached_module.specifier == specifier; - self.visit(cached_module, is_root)?; + match self.pending.next().await { + Some(Err((specifier, err))) => { + self + .graph + .modules + .insert(specifier, ModuleSlot::Err(Rc::new(err))); + } + Some(Ok(cached_module)) => { + let is_root = &cached_module.specifier == specifier; + self.visit(cached_module, is_root)?; + } + _ => {} + } if self.pending.is_empty() { break; } @@ -1573,20 +1717,19 @@ impl GraphBuilder { specifier: &ModuleSpecifier, maybe_referrer: &Option, is_dynamic: bool, - ) -> Result<(), AnyError> { - if self.fetched.contains(&specifier) { - return Ok(()); + ) { + if !self.graph.modules.contains_key(&specifier) { + self + .graph + .modules + .insert(specifier.clone(), ModuleSlot::Pending); + let future = self.graph.handler.borrow_mut().fetch( + specifier.clone(), + maybe_referrer.clone(), + is_dynamic, + ); + self.pending.push(future); } - - self.fetched.insert(specifier.clone()); - let future = self.graph.handler.borrow_mut().fetch( - specifier.clone(), - maybe_referrer.clone(), - is_dynamic, - ); - self.pending.push(future); - - Ok(()) } /// Visit a module that has been fetched, hydrating the module, analyzing its @@ -1632,14 +1775,14 @@ impl GraphBuilder { for (_, dep) in module.dependencies.iter() { let maybe_referrer = Some(dep.location.clone()); if let Some(specifier) = dep.maybe_code.as_ref() { - self.fetch(specifier, &maybe_referrer, dep.is_dynamic)?; + self.fetch(specifier, &maybe_referrer, dep.is_dynamic); } if let Some(specifier) = dep.maybe_type.as_ref() { - self.fetch(specifier, &maybe_referrer, dep.is_dynamic)?; + self.fetch(specifier, &maybe_referrer, dep.is_dynamic); } } if let Some((_, specifier)) = module.maybe_types.as_ref() { - self.fetch(specifier, &None, false)?; + self.fetch(specifier, &None, false); } if specifier != requested_specifier { self @@ -1647,7 +1790,10 @@ impl GraphBuilder { .redirects .insert(requested_specifier, specifier.clone()); } - self.graph.modules.insert(specifier, module); + self + .graph + .modules + .insert(specifier, ModuleSlot::Module(Box::new(module))); Ok(()) } @@ -1702,7 +1848,7 @@ pub mod tests { fn get_cache( &self, specifier: ModuleSpecifier, - ) -> Result { + ) -> Result { let specifier_text = specifier .to_string() .replace(":///", "_") @@ -1710,7 +1856,8 @@ pub mod tests { .replace("/", "-"); let source_path = self.fixtures.join(specifier_text); let media_type = MediaType::from(&source_path); - let source = fs::read_to_string(&source_path)?; + let source = fs::read_to_string(&source_path) + .map_err(|err| (specifier.clone(), err.into()))?; let is_remote = specifier.as_url().scheme() != "file"; Ok(CachedModule { @@ -2280,10 +2427,9 @@ pub mod tests { ModuleSpecifier::resolve_url_or_path("file:///tests/main.ts") .expect("could not resolve module"); let (mut graph, handler) = setup(specifier).await; - let (stats, maybe_ignored_options) = - graph.transpile(TranspileOptions::default()).unwrap(); - assert_eq!(stats.0.len(), 3); - assert_eq!(maybe_ignored_options, None); + let result_info = graph.transpile(TranspileOptions::default()).unwrap(); + assert_eq!(result_info.stats.0.len(), 3); + assert_eq!(result_info.maybe_ignored_options, None); let h = handler.borrow(); assert_eq!(h.cache_calls.len(), 2); match &h.cache_calls[0].1 { @@ -2334,7 +2480,7 @@ pub mod tests { ModuleSpecifier::resolve_url_or_path("https://deno.land/x/transpile.tsx") .expect("could not resolve module"); let (mut graph, handler) = setup(specifier).await; - let (_, maybe_ignored_options) = graph + let result_info = graph .transpile(TranspileOptions { debug: false, maybe_config_path: Some("tests/module_graph/tsconfig.json".to_string()), @@ -2342,7 +2488,7 @@ pub mod tests { }) .unwrap(); assert_eq!( - maybe_ignored_options.unwrap().items, + result_info.maybe_ignored_options.unwrap().items, vec!["target".to_string()], "the 'target' options should have been ignored" ); diff --git a/cli/module_loader.rs b/cli/module_loader.rs index da75b8510a5802..aab951c4a5fa1e 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -94,26 +94,14 @@ impl ModuleLoader for CliModuleLoader { maybe_referrer: Option, _is_dynamic: bool, ) -> Pin> { - let module_specifier = module_specifier.to_owned(); - let module_url_specified = module_specifier.to_string(); + let module_specifier = module_specifier.clone(); let program_state = self.program_state.clone(); // NOTE: this block is async only because of `deno_core` // interface requirements; module was already loaded // when constructing module graph during call to `prepare_load`. - let fut = async move { - let compiled_module = program_state - .fetch_compiled_module(module_specifier, maybe_referrer)?; - Ok(deno_core::ModuleSource { - // Real module name, might be different from initial specifier - // due to redirections. - code: compiled_module.code, - module_url_specified, - module_url_found: compiled_module.name, - }) - }; - - fut.boxed_local() + async move { program_state.load(module_specifier, maybe_referrer) } + .boxed_local() } fn prepare_load( diff --git a/cli/program_state.rs b/cli/program_state.rs index 223d043bae17ee..008244b5f6c666 100644 --- a/cli/program_state.rs +++ b/cli/program_state.rs @@ -8,7 +8,6 @@ use crate::http_cache; use crate::http_util; use crate::import_map::ImportMap; use crate::lockfile::Lockfile; -use crate::media_type::MediaType; use crate::module_graph::CheckOptions; use crate::module_graph::GraphBuilder; use crate::module_graph::TranspileOptions; @@ -18,11 +17,14 @@ use crate::specifier_handler::FetchHandler; use deno_runtime::inspector::InspectorServer; use deno_runtime::permissions::Permissions; -use deno_core::error::generic_error; +use deno_core::error::anyhow; +use deno_core::error::get_custom_error_class; use deno_core::error::AnyError; use deno_core::url::Url; +use deno_core::ModuleSource; use deno_core::ModuleSpecifier; use std::cell::RefCell; +use std::collections::HashMap; use std::env; use std::rc::Rc; use std::sync::Arc; @@ -36,12 +38,6 @@ pub fn exit_unstable(api_name: &str) { std::process::exit(70); } -// TODO(@kitsonk) probably can refactor this better with the graph. -pub struct CompiledModule { - pub code: String, - pub name: String, -} - /// This structure represents state of single "deno" program. /// /// It is shared by all created workers (thus V8 isolates). @@ -50,6 +46,8 @@ pub struct ProgramState { pub flags: flags::Flags, pub dir: deno_dir::DenoDir, pub file_fetcher: FileFetcher, + pub modules: + Arc>>>, pub lockfile: Option>>, pub maybe_import_map: Option, pub maybe_inspector_server: Option>, @@ -111,6 +109,7 @@ impl ProgramState { dir, flags, file_fetcher, + modules: Default::default(), lockfile, maybe_import_map, maybe_inspector_server, @@ -146,17 +145,17 @@ impl ProgramState { let debug = self.flags.log_level == Some(log::Level::Debug); let maybe_config_path = self.flags.config_path.clone(); - if self.flags.no_check { - let (stats, maybe_ignored_options) = - graph.transpile(TranspileOptions { - debug, - maybe_config_path, - reload: self.flags.reload, - })?; - debug!("{}", stats); - if let Some(ignored_options) = maybe_ignored_options { - eprintln!("{}", ignored_options); + let result_modules = if self.flags.no_check { + let result_info = graph.transpile(TranspileOptions { + debug, + maybe_config_path, + reload: self.flags.reload, + })?; + debug!("{}", result_info.stats); + if let Some(ignored_options) = result_info.maybe_ignored_options { + warn!("{}", ignored_options); } + result_info.loadable_modules } else { let result_info = graph.check(CheckOptions { debug, @@ -171,10 +170,14 @@ impl ProgramState { eprintln!("{}", ignored_options); } if !result_info.diagnostics.is_empty() { - return Err(generic_error(result_info.diagnostics.to_string())); + return Err(anyhow!(result_info.diagnostics)); } + result_info.loadable_modules }; + let mut loadable_modules = self.modules.lock().unwrap(); + loadable_modules.extend(result_modules); + if let Some(ref lockfile) = self.lockfile { let g = lockfile.lock().unwrap(); g.write()?; @@ -183,56 +186,55 @@ impl ProgramState { Ok(()) } - pub fn fetch_compiled_module( + pub fn load( &self, - module_specifier: ModuleSpecifier, + specifier: ModuleSpecifier, maybe_referrer: Option, - ) -> Result { - // TODO(@kitsonk) this really needs to be avoided and refactored out, as we - // really should just be getting this from the module graph. - let out = self - .file_fetcher - .get_source(&module_specifier) - .expect("Cached source file doesn't exist"); - - let specifier = out.specifier.clone(); - let compiled_module = if let Some((code, _)) = - self.get_emit(&specifier.as_url()) - { - CompiledModule { - code: String::from_utf8(code).unwrap(), - name: specifier.as_url().to_string(), - } - // We expect a compiled source for any non-JavaScript files, except for - // local files that have an unknown media type and no referrer (root modules - // that do not have an extension.) - } else if out.media_type != MediaType::JavaScript - && !(out.media_type == MediaType::Unknown - && maybe_referrer.is_none() - && specifier.as_url().scheme() == "file") - { - let message = if let Some(referrer) = maybe_referrer { - format!("Compiled module not found \"{}\"\n From: {}\n If the source module contains only types, use `import type` and `export type` to import it instead.", module_specifier, referrer) - } else { - format!("Compiled module not found \"{}\"\n If the source module contains only types, use `import type` and `export type` to import it instead.", module_specifier) - }; - info!("{}: {}", crate::colors::yellow("warning"), message); - CompiledModule { - code: "".to_string(), - name: specifier.as_url().to_string(), - } - } else { - CompiledModule { - code: out.source, - name: specifier.as_url().to_string(), - } - }; - - Ok(compiled_module) + ) -> Result { + let modules = self.modules.lock().unwrap(); + modules + .get(&specifier) + .map(|r| match r { + Ok(module_source) => Ok(module_source.clone()), + Err(err) => { + // TODO(@kitsonk) this feels a bit hacky but it works, without + // introducing another enum to have to try to deal with. + if get_custom_error_class(err) == Some("NotFound") { + let message = if let Some(referrer) = &maybe_referrer { + format!("{}\n From: {}\n If the source module contains only types, use `import type` and `export type` to import it instead.", err, referrer) + } else { + format!("{}\n If the source module contains only types, use `import type` and `export type` to import it instead.", err) + }; + warn!("{}: {}", crate::colors::yellow("warning"), message); + Ok(ModuleSource { + code: "".to_string(), + module_url_found: specifier.to_string(), + module_url_specified: specifier.to_string(), + }) + } else { + // anyhow errors don't support cloning, so we have to manage this + // ourselves + Err(anyhow!(err.to_string())) + } + }, + }) + .unwrap_or_else(|| { + if let Some(referrer) = maybe_referrer { + Err(anyhow!( + "Module \"{}\" is missing from the graph.\n From: {}", + specifier, + referrer + )) + } else { + Err(anyhow!( + "Module \"{}\" is missing from the graph.", + specifier + )) + } + }) } - // TODO(@kitsonk) this should be a straight forward API on file_fetcher or - // whatever future refactors do... + // TODO(@kitsonk) this should be refactored to get it from the module graph fn get_emit(&self, url: &Url) -> Option<(Vec, Option>)> { match url.scheme() { // we should only be looking for emits for schemes that denote external @@ -245,11 +247,11 @@ impl ProgramState { let emit_path = self .dir .gen_cache - .get_cache_filename_with_extension(&url, "js"); + .get_cache_filename_with_extension(&url, "js")?; let emit_map_path = self .dir .gen_cache - .get_cache_filename_with_extension(&url, "js.map"); + .get_cache_filename_with_extension(&url, "js.map")?; if let Ok(code) = self.dir.gen_cache.get(&emit_path) { let maybe_map = if let Ok(map) = self.dir.gen_cache.get(&emit_map_path) { Some(map) diff --git a/cli/specifier_handler.rs b/cli/specifier_handler.rs index 02a1196d3a8e79..752316467502b8 100644 --- a/cli/specifier_handler.rs +++ b/cli/specifier_handler.rs @@ -25,8 +25,12 @@ use std::pin::Pin; use std::sync::Arc; pub type DependencyMap = HashMap; -pub type FetchFuture = - Pin> + 'static)>>; +pub type FetchFuture = Pin< + Box< + (dyn Future> + + 'static), + >, +>; /// A group of errors that represent errors that can occur with an /// an implementation of `SpecifierHandler`. @@ -287,23 +291,30 @@ impl SpecifierHandler for FetchHandler { // they cannot actually get to the source code that is quoted, as // it only exists in the runtime memory of Deno. if !location.filename.contains("$deno$") { - HandlerError::FetchErrorWithLocation(err.to_string(), location) - .into() + ( + requested_specifier.clone(), + HandlerError::FetchErrorWithLocation(err.to_string(), location) + .into(), + ) } else { - err + (requested_specifier.clone(), err) } } else { - err + (requested_specifier.clone(), err) } })?; let url = source_file.specifier.as_url(); let is_remote = url.scheme() != "file"; let filename = disk_cache.get_cache_filename_with_extension(url, "meta"); - let maybe_version = if let Ok(bytes) = disk_cache.get(&filename) { - if let Ok(compiled_file_metadata) = - CompiledFileMetadata::from_bytes(&bytes) - { - Some(compiled_file_metadata.version_hash) + let maybe_version = if let Some(filename) = filename { + if let Ok(bytes) = disk_cache.get(&filename) { + if let Ok(compiled_file_metadata) = + CompiledFileMetadata::from_bytes(&bytes) + { + Some(compiled_file_metadata.version_hash) + } else { + None + } } else { None } @@ -314,19 +325,26 @@ impl SpecifierHandler for FetchHandler { let mut maybe_map_path = None; let map_path = disk_cache.get_cache_filename_with_extension(&url, "js.map"); - let maybe_map = if let Ok(map) = disk_cache.get(&map_path) { - maybe_map_path = Some(disk_cache.location.join(map_path)); - Some(String::from_utf8(map)?) + let maybe_map = if let Some(map_path) = map_path { + if let Ok(map) = disk_cache.get(&map_path) { + maybe_map_path = Some(disk_cache.location.join(map_path)); + Some(String::from_utf8(map).unwrap()) + } else { + None + } } else { None }; let mut maybe_emit = None; let mut maybe_emit_path = None; let emit_path = disk_cache.get_cache_filename_with_extension(&url, "js"); - if let Ok(code) = disk_cache.get(&emit_path) { - maybe_emit = Some(Emit::Cli((String::from_utf8(code)?, maybe_map))); - maybe_emit_path = - Some((disk_cache.location.join(emit_path), maybe_map_path)); + if let Some(emit_path) = emit_path { + if let Ok(code) = disk_cache.get(&emit_path) { + maybe_emit = + Some(Emit::Cli((String::from_utf8(code).unwrap(), maybe_map))); + maybe_emit_path = + Some((disk_cache.location.join(emit_path), maybe_map_path)); + } }; Ok(CachedModule { @@ -353,8 +371,12 @@ impl SpecifierHandler for FetchHandler { let filename = self .disk_cache .get_cache_filename_with_extension(specifier.as_url(), "buildinfo"); - if let Ok(tsbuildinfo) = self.disk_cache.get(&filename) { - Ok(Some(String::from_utf8(tsbuildinfo)?)) + if let Some(filename) = filename { + if let Ok(tsbuildinfo) = self.disk_cache.get(&filename) { + Ok(Some(String::from_utf8(tsbuildinfo)?)) + } else { + Ok(None) + } } else { Ok(None) } @@ -367,7 +389,8 @@ impl SpecifierHandler for FetchHandler { ) -> Result<(), AnyError> { let filename = self .disk_cache - .get_cache_filename_with_extension(specifier.as_url(), "buildinfo"); + .get_cache_filename_with_extension(specifier.as_url(), "buildinfo") + .unwrap(); debug!("set_tsbuildinfo - filename {:?}", filename); self .disk_cache @@ -383,14 +406,17 @@ impl SpecifierHandler for FetchHandler { match emit { Emit::Cli((code, maybe_map)) => { let url = specifier.as_url(); - let filename = - self.disk_cache.get_cache_filename_with_extension(url, "js"); + let filename = self + .disk_cache + .get_cache_filename_with_extension(url, "js") + .unwrap(); self.disk_cache.set(&filename, code.as_bytes())?; if let Some(map) = maybe_map { let filename = self .disk_cache - .get_cache_filename_with_extension(url, "js.map"); + .get_cache_filename_with_extension(url, "js.map") + .unwrap(); self.disk_cache.set(&filename, map.as_bytes())?; } } @@ -425,7 +451,8 @@ impl SpecifierHandler for FetchHandler { let compiled_file_metadata = CompiledFileMetadata { version_hash }; let filename = self .disk_cache - .get_cache_filename_with_extension(specifier.as_url(), "meta"); + .get_cache_filename_with_extension(specifier.as_url(), "meta") + .unwrap(); self .disk_cache @@ -475,9 +502,12 @@ impl SpecifierHandler for MemoryHandler { ..Default::default() }) } else { - Err(custom_error( - "NotFound", - format!("Unable to find specifier in sources: {}", specifier), + Err(( + specifier.clone(), + custom_error( + "NotFound", + format!("Unable to find specifier in sources: {}", specifier), + ), )) }; diff --git a/cli/tests/dynamic_import/b.js b/cli/tests/dynamic_import/b.js new file mode 100644 index 00000000000000..6ea50d36080e5b --- /dev/null +++ b/cli/tests/dynamic_import/b.js @@ -0,0 +1,2 @@ +import "./bad.mjs"; +export default () => "error"; diff --git a/cli/tests/dynamic_import/c.js b/cli/tests/dynamic_import/c.js new file mode 100644 index 00000000000000..20546455ea7d9e --- /dev/null +++ b/cli/tests/dynamic_import/c.js @@ -0,0 +1,2 @@ +await import("./bad2.mjs"); +export default () => "error"; diff --git a/cli/tests/error_004_missing_module.ts b/cli/tests/error_004_missing_module.ts index 24ae52cf72c804..ab5350408ccb62 100644 --- a/cli/tests/error_004_missing_module.ts +++ b/cli/tests/error_004_missing_module.ts @@ -1,2 +1,4 @@ // eslint-disable-next-line import * as badModule from "./bad-module.ts"; + +console.log(badModule); diff --git a/cli/tests/error_005_missing_dynamic_import.ts.out b/cli/tests/error_005_missing_dynamic_import.ts.out index 79c61e7c898776..bffa9079618348 100644 --- a/cli/tests/error_005_missing_dynamic_import.ts.out +++ b/cli/tests/error_005_missing_dynamic_import.ts.out @@ -1,2 +1 @@ -error: Cannot resolve module "[WILDCARD]/bad-module.ts" from "[WILDCARD]/error_005_missing_dynamic_import.ts". - at file:///[WILDCARD]/error_005_missing_dynamic_import.ts:3:26 +error: Uncaught (in promise) TypeError: Cannot resolve module "[WILDCARD]/cli/tests/bad-module.ts". diff --git a/cli/tests/error_015_dynamic_import_permissions.out b/cli/tests/error_015_dynamic_import_permissions.out index b385da0d45c32d..577dbcc5ce8db0 100644 --- a/cli/tests/error_015_dynamic_import_permissions.out +++ b/cli/tests/error_015_dynamic_import_permissions.out @@ -1,2 +1 @@ -error: network access to "http://localhost:4545/cli/tests/subdir/mod4.js", run again with the --allow-net flag - at file:///[WILDCARD]cli/tests/error_015_dynamic_import_permissions.js:[WILDCARD] +error: Uncaught (in promise) TypeError: network access to "http://localhost:4545/cli/tests/subdir/mod4.js", run again with the --allow-net flag diff --git a/cli/tests/fix_dynamic_import_errors.js b/cli/tests/fix_dynamic_import_errors.js new file mode 100644 index 00000000000000..317047ccb5deb1 --- /dev/null +++ b/cli/tests/fix_dynamic_import_errors.js @@ -0,0 +1,7 @@ +import("./dynamic_import/b.js").catch(() => { + console.log("caught import error from b.js"); +}); + +import("./dynamic_import/c.js").catch(() => { + console.log("caught import error from c.js"); +}); diff --git a/cli/tests/fix_dynamic_import_errors.js.out b/cli/tests/fix_dynamic_import_errors.js.out new file mode 100644 index 00000000000000..e7856fb9ce2a19 --- /dev/null +++ b/cli/tests/fix_dynamic_import_errors.js.out @@ -0,0 +1,2 @@ +caught import error from [WILDCARD].js +caught import error from [WILDCARD].js diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index e4de423ee06b36..2f23e2dee62247 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -3227,6 +3227,11 @@ itest!(tsx_imports { output: "tsx_imports.ts.out", }); +itest!(fix_dynamic_import_errors { + args: "run --reload fix_dynamic_import_errors.js", + output: "fix_dynamic_import_errors.js.out", +}); + itest!(fix_emittable_skipped { args: "run --reload fix_emittable_skipped.js", output: "fix_emittable_skipped.ts.out", diff --git a/cli/tests/redirect_cache.out b/cli/tests/redirect_cache.out index ad26d01086eb20..609e523f7fec76 100644 --- a/cli/tests/redirect_cache.out +++ b/cli/tests/redirect_cache.out @@ -2,5 +2,4 @@ Download http://localhost:4548/cli/tests/subdir/redirects/a.ts Download http://localhost:4546/cli/tests/subdir/redirects/a.ts Download http://localhost:4545/cli/tests/subdir/redirects/a.ts Download http://localhost:4545/cli/tests/subdir/redirects/b.ts -Download http://localhost:4545/cli/tests/subdir/redirects/a.ts Check http://localhost:4548/cli/tests/subdir/redirects/a.ts diff --git a/cli/tests/unsupported_dynamic_import_scheme.out b/cli/tests/unsupported_dynamic_import_scheme.out index 0161b7a99f7fa9..434f14c4c1569e 100644 --- a/cli/tests/unsupported_dynamic_import_scheme.out +++ b/cli/tests/unsupported_dynamic_import_scheme.out @@ -1,4 +1,4 @@ -error: Unsupported scheme "xxx" for module "xxx:". Supported schemes: [ +error: Uncaught (in promise) TypeError: Unsupported scheme "xxx" for module "xxx:". Supported schemes: [ "http", "https", "file", diff --git a/core/modules.rs b/core/modules.rs index 6f330f55949b54..546f2464f89400 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -43,7 +43,7 @@ pub type ModuleLoadId = i32; // that happened; not only first and final target. It would simplify a lot // of things throughout the codebase otherwise we may end up requesting // intermediate redirects from file loader. -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Clone, Eq, PartialEq)] pub struct ModuleSource { pub code: String, pub module_url_specified: String, From a5a151389e58f2715d8afe3fd4a8009979943ddc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=BF=B7=E6=B8=A1?= Date: Tue, 15 Dec 2020 17:56:41 +0800 Subject: [PATCH 073/135] fix std version (#8773) --- Releases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Releases.md b/Releases.md index 57e97625c40f3e..7127d03718cd39 100644 --- a/Releases.md +++ b/Releases.md @@ -18,7 +18,7 @@ https://github.com/denoland/deno_install - refactor: deno_runtime crate (#8640) - upgrade: swc_ecmascript to 0.15.0 (#8688) -Changes in std version 0.80.0: +Changes in std version 0.81.0: - fix(std/datetime): partsToDate (#8553) - fix(std/wasi): disallow multiple starts (#8712) From 7a9766dd18cc85053c984cb991dc3debac92530c Mon Sep 17 00:00:00 2001 From: MVEMCJSUNPE <2frac.d.x@gmail.com> Date: Tue, 15 Dec 2020 04:13:22 -0600 Subject: [PATCH 074/135] feat(std/node): Added os.type (#8591) --- std/node/os.ts | 13 +++++++++++-- std/node/os_test.ts | 14 +++++++------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/std/node/os.ts b/std/node/os.ts index bec3de5ee16d8c..776eff92d6ef10 100644 --- a/std/node/os.ts +++ b/std/node/os.ts @@ -190,9 +190,18 @@ export function totalmem(): number { return Deno.systemMemoryInfo().total; } -/** Not yet implemented */ +/** Returns operating system type (i.e. 'Windows_NT', 'Linux', 'Darwin') */ export function type(): string { - notImplemented(SEE_GITHUB_ISSUE); + switch (Deno.build.os) { + case "windows": + return "Windows_NT"; + case "linux": + return "Linux"; + case "darwin": + return "Darwin"; + default: + throw Error("unreachable"); + } } /** Not yet implemented */ diff --git a/std/node/os_test.ts b/std/node/os_test.ts index 8879f8345a109a..3200083a138d75 100644 --- a/std/node/os_test.ts +++ b/std/node/os_test.ts @@ -47,6 +47,13 @@ Deno.test({ }, }); +Deno.test({ + name: "type is a string", + fn() { + assertEquals(typeof os.type(), "string"); + }, +}); + Deno.test({ name: "getPriority(): PID must be a 32 bit integer", fn() { @@ -245,13 +252,6 @@ Deno.test({ Error, "Not implemented", ); - assertThrows( - () => { - os.type(); - }, - Error, - "Not implemented", - ); assertThrows( () => { os.uptime(); From 6356345365e766d984f591506fb475d7935685de Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Tue, 15 Dec 2020 21:02:26 +0900 Subject: [PATCH 075/135] fix: make DNS resolution async (#8743) This commit adds a new function that is an asynchronous version of `resolve_addr` using `tokio::net::lookup_host`, and accordingly, renames the synchronous version to `resolve_addr_sync`. This allows async ops to resolve hosts with non-blocking. --- runtime/ops/net.rs | 7 +-- runtime/ops/tls.rs | 5 +- runtime/resolve_addr.rs | 110 +++++++++++++++++++++++++++++++--------- 3 files changed, 93 insertions(+), 29 deletions(-) diff --git a/runtime/ops/net.rs b/runtime/ops/net.rs index 98ff83fc023d63..8770ef103bb102 100644 --- a/runtime/ops/net.rs +++ b/runtime/ops/net.rs @@ -4,6 +4,7 @@ use crate::ops::io::StreamResource; use crate::ops::io::StreamResourceHolder; use crate::permissions::Permissions; use crate::resolve_addr::resolve_addr; +use crate::resolve_addr::resolve_addr_sync; use deno_core::error::bad_resource; use deno_core::error::bad_resource_id; use deno_core::error::custom_error; @@ -205,7 +206,7 @@ async fn op_datagram_send( s.borrow::() .check_net(&args.hostname, args.port)?; } - let addr = resolve_addr(&args.hostname, args.port)?; + let addr = resolve_addr(&args.hostname, args.port).await?; poll_fn(move |cx| { let mut state = state.borrow_mut(); let resource = state @@ -272,7 +273,7 @@ async fn op_connect( .borrow::() .check_net(&args.hostname, args.port)?; } - let addr = resolve_addr(&args.hostname, args.port)?; + let addr = resolve_addr(&args.hostname, args.port).await?; let tcp_stream = TcpStream::connect(&addr).await?; let local_addr = tcp_stream.local_addr()?; let remote_addr = tcp_stream.peer_addr()?; @@ -505,7 +506,7 @@ fn op_listen( } permissions.check_net(&args.hostname, args.port)?; } - let addr = resolve_addr(&args.hostname, args.port)?; + let addr = resolve_addr_sync(&args.hostname, args.port)?; let (rid, local_addr) = if transport == "tcp" { listen_tcp(state, addr)? } else { diff --git a/runtime/ops/tls.rs b/runtime/ops/tls.rs index 37fd8f206801da..b59650ab0ef07a 100644 --- a/runtime/ops/tls.rs +++ b/runtime/ops/tls.rs @@ -3,6 +3,7 @@ use super::io::{StreamResource, StreamResourceHolder}; use crate::permissions::Permissions; use crate::resolve_addr::resolve_addr; +use crate::resolve_addr::resolve_addr_sync; use deno_core::error::bad_resource; use deno_core::error::bad_resource_id; use deno_core::error::custom_error; @@ -160,7 +161,7 @@ async fn op_connect_tls( domain.push_str("localhost"); } - let addr = resolve_addr(&args.hostname, args.port)?; + let addr = resolve_addr(&args.hostname, args.port).await?; let tcp_stream = TcpStream::connect(&addr).await?; let local_addr = tcp_stream.local_addr()?; let remote_addr = tcp_stream.peer_addr()?; @@ -334,7 +335,7 @@ fn op_listen_tls( .set_single_cert(load_certs(&cert_file)?, load_keys(&key_file)?.remove(0)) .expect("invalid key or certificate"); let tls_acceptor = TlsAcceptor::from(Arc::new(config)); - let addr = resolve_addr(&args.hostname, args.port)?; + let addr = resolve_addr_sync(&args.hostname, args.port)?; let std_listener = std::net::TcpListener::bind(&addr)?; let listener = TcpListener::from_std(std_listener)?; let local_addr = listener.local_addr()?; diff --git a/runtime/resolve_addr.rs b/runtime/resolve_addr.rs index c3dc52f8fef3d5..d4f500802321ef 100644 --- a/runtime/resolve_addr.rs +++ b/runtime/resolve_addr.rs @@ -1,29 +1,45 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use deno_core::error::AnyError; +use deno_core::error::Context; use std::net::SocketAddr; use std::net::ToSocketAddrs; +use tokio::net::lookup_host; -/// Resolve network address. Returns a future. -pub fn resolve_addr(hostname: &str, port: u16) -> Result { +/// Resolve network address *asynchronously*. +pub async fn resolve_addr( + hostname: &str, + port: u16, +) -> Result { + let addr_port_pair = make_addr_port_pair(hostname, port); + lookup_host(addr_port_pair) + .await? + .next() + .context("No resolved address found") +} + +/// Resolve network address *synchronously*. +pub fn resolve_addr_sync( + hostname: &str, + port: u16, +) -> Result { + let addr_port_pair = make_addr_port_pair(hostname, port); + addr_port_pair + .to_socket_addrs()? + .next() + .context("No resolved address found") +} + +fn make_addr_port_pair(hostname: &str, port: u16) -> (&str, u16) { // Default to localhost if given just the port. Example: ":80" - let addr: &str = if !hostname.is_empty() { - &hostname - } else { - "0.0.0.0" - }; + if hostname.is_empty() { + return ("0.0.0.0", port); + } // If this looks like an ipv6 IP address. Example: "[2001:db8::1]" // Then we remove the brackets. - let addr = if addr.starts_with('[') && addr.ends_with(']') { - let l = addr.len() - 1; - addr.get(1..l).unwrap() - } else { - addr - }; - let addr_port_pair = (addr, port); - let mut iter = addr_port_pair.to_socket_addrs()?; - Ok(iter.next().unwrap()) + let addr = hostname.trim_start_matches('[').trim_end_matches(']'); + (addr, port) } #[cfg(test)] @@ -34,39 +50,85 @@ mod tests { use std::net::SocketAddrV4; use std::net::SocketAddrV6; + #[tokio::test] + async fn resolve_addr1() { + let expected = + SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 80)); + let actual = resolve_addr("127.0.0.1", 80).await.unwrap(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr2() { + let expected = + SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 80)); + let actual = resolve_addr("", 80).await.unwrap(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr3() { + let expected = + SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(192, 0, 2, 1), 25)); + let actual = resolve_addr("192.0.2.1", 25).await.unwrap(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr_ipv6() { + let expected = SocketAddr::V6(SocketAddrV6::new( + Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1), + 8080, + 0, + 0, + )); + let actual = resolve_addr("[2001:db8::1]", 8080).await.unwrap(); + assert_eq!(actual, expected); + } + + #[tokio::test] + async fn resolve_addr_err() { + assert!(resolve_addr("INVALID ADDR", 1234).await.is_err()); + } + #[test] - fn resolve_addr1() { + fn resolve_addr_sync1() { let expected = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 80)); - let actual = resolve_addr("127.0.0.1", 80).unwrap(); + let actual = resolve_addr_sync("127.0.0.1", 80).unwrap(); assert_eq!(actual, expected); } #[test] - fn resolve_addr2() { + fn resolve_addr_sync2() { let expected = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), 80)); - let actual = resolve_addr("", 80).unwrap(); + let actual = resolve_addr_sync("", 80).unwrap(); assert_eq!(actual, expected); } #[test] - fn resolve_addr3() { + fn resolve_addr_sync3() { let expected = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(192, 0, 2, 1), 25)); - let actual = resolve_addr("192.0.2.1", 25).unwrap(); + let actual = resolve_addr_sync("192.0.2.1", 25).unwrap(); assert_eq!(actual, expected); } #[test] - fn resolve_addr_ipv6() { + fn resolve_addr_sync_ipv6() { let expected = SocketAddr::V6(SocketAddrV6::new( Ipv6Addr::new(0x2001, 0xdb8, 0, 0, 0, 0, 0, 1), 8080, 0, 0, )); - let actual = resolve_addr("[2001:db8::1]", 8080).unwrap(); + let actual = resolve_addr_sync("[2001:db8::1]", 8080).unwrap(); assert_eq!(actual, expected); } + + #[test] + fn resolve_addr_sync_err() { + assert!(resolve_addr_sync("INVALID ADDR", 1234).is_err()); + } } From 892d6cc99749247884fdbe2d89ca13def3af8bdb Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Wed, 16 Dec 2020 06:34:39 +1100 Subject: [PATCH 076/135] refactor(lsp): optimise static assets (#8771) Fixes #8158 --- cli/lsp/handlers.rs | 16 ++- cli/lsp/mod.rs | 6 +- cli/lsp/state.rs | 4 + cli/lsp/tsc.rs | 196 ++++++++++++++++++++---------------- cli/tsc.rs | 2 +- cli/tsc/99_main_compiler.js | 10 ++ cli/tsc/compiler.d.ts | 6 ++ 7 files changed, 142 insertions(+), 98 deletions(-) diff --git a/cli/lsp/handlers.rs b/cli/lsp/handlers.rs index ccda69f7d5a058..69cdd8041f1912 100644 --- a/cli/lsp/handlers.rs +++ b/cli/lsp/handlers.rs @@ -31,8 +31,11 @@ fn get_line_index( specifier: &ModuleSpecifier, ) -> Result, AnyError> { let line_index = if specifier.as_url().scheme() == "asset" { - if let Some(source) = tsc::get_asset(specifier.as_url().path()) { - text::index_lines(source) + let server_state = state.snapshot(); + if let Some(source) = + tsc::get_asset(specifier, &mut state.ts_runtime, &server_state)? + { + text::index_lines(&source) } else { return Err(custom_error( "NotFound", @@ -256,7 +259,7 @@ pub fn handle_references( } pub fn handle_virtual_text_document( - state: ServerStateSnapshot, + state: &mut ServerState, params: lsp_extensions::VirtualTextDocumentParams, ) -> Result { let specifier = utils::normalize_url(params.text_document.uri); @@ -274,8 +277,11 @@ pub fn handle_virtual_text_document( } else { match url.scheme() { "asset" => { - if let Some(text) = tsc::get_asset(url.path()) { - text.to_string() + let server_state = state.snapshot(); + if let Some(text) = + tsc::get_asset(&specifier, &mut state.ts_runtime, &server_state)? + { + text } else { error!("Missing asset: {}", specifier); "".to_string() diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index 784f3503dd5b8b..0f83e4ab2c178c 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -408,10 +408,10 @@ impl ServerState { .on_sync::(handlers::handle_hover)? .on_sync::(handlers::handle_completion)? .on_sync::(handlers::handle_references)? - .on::(handlers::handle_formatting) - .on::( + .on_sync::( handlers::handle_virtual_text_document, - ) + )? + .on::(handlers::handle_formatting) .finish(); Ok(()) diff --git a/cli/lsp/state.rs b/cli/lsp/state.rs index 579a749f6bc4d2..ceb4325a193eb7 100644 --- a/cli/lsp/state.rs +++ b/cli/lsp/state.rs @@ -192,6 +192,7 @@ impl DocumentData { /// An immutable snapshot of the server state at a point in time. #[derive(Debug, Clone, Default)] pub struct ServerStateSnapshot { + pub assets: Arc>>>, pub config: Config, pub diagnostics: DiagnosticCollection, pub doc_data: HashMap, @@ -200,6 +201,7 @@ pub struct ServerStateSnapshot { } pub struct ServerState { + pub assets: Arc>>>, pub config: Config, pub diagnostics: DiagnosticCollection, pub doc_data: HashMap, @@ -230,6 +232,7 @@ impl ServerState { let ts_runtime = tsc::start(false).expect("could not start tsc"); Self { + assets: Default::default(), config, diagnostics: Default::default(), doc_data: Default::default(), @@ -315,6 +318,7 @@ impl ServerState { pub fn snapshot(&self) -> ServerStateSnapshot { ServerStateSnapshot { + assets: Arc::clone(&self.assets), config: self.config.clone(), diagnostics: self.diagnostics.clone(), doc_data: self.doc_data.clone(), diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 649dd1bb5a97b7..5cbf1ecc56f9ff 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -7,6 +7,7 @@ use super::utils; use crate::js; use crate::media_type::MediaType; +use crate::tsc; use crate::tsc::ResolveArgs; use crate::tsc_config::TsConfig; @@ -27,92 +28,26 @@ use regex::Regex; use std::borrow::Cow; use std::collections::HashMap; -/// Provide static assets for the language server. -/// -/// TODO(@kitsonk) this should be DRY'ed up with `cli/tsc.rs` and the -/// `cli/build.rs` -pub fn get_asset(asset: &str) -> Option<&'static str> { - macro_rules! inc { - ($e:expr) => { - Some(include_str!(concat!("../dts/", $e))) - }; - } - match asset { - // These are not included in the snapshot - "/lib.dom.d.ts" => inc!("lib.dom.d.ts"), - "/lib.dom.iterable.d.ts" => inc!("lib.dom.iterable.d.ts"), - "/lib.es6.d.ts" => inc!("lib.es6.d.ts"), - "/lib.es2016.full.d.ts" => inc!("lib.es2016.full.d.ts"), - "/lib.es2017.full.d.ts" => inc!("lib.es2017.full.d.ts"), - "/lib.es2018.full.d.ts" => inc!("lib.es2018.full.d.ts"), - "/lib.es2019.full.d.ts" => inc!("lib.es2019.full.d.ts"), - "/lib.es2020.full.d.ts" => inc!("lib.es2020.full.d.ts"), - "/lib.esnext.full.d.ts" => inc!("lib.esnext.full.d.ts"), - "/lib.scripthost.d.ts" => inc!("lib.scripthost.d.ts"), - "/lib.webworker.d.ts" => inc!("lib.webworker.d.ts"), - "/lib.webworker.importscripts.d.ts" => { - inc!("lib.webworker.importscripts.d.ts") - } - "/lib.webworker.iterable.d.ts" => inc!("lib.webworker.iterable.d.ts"), - // These come from op crates - // TODO(@kitsonk) these is even hackier than the rest of this... - "/lib.deno.web.d.ts" => Some(js::DENO_WEB_LIB), - "/lib.deno.fetch.d.ts" => Some(js::DENO_FETCH_LIB), - // These are included in the snapshot for TypeScript, and could be retrieved - // from there? - "/lib.d.ts" => inc!("lib.d.ts"), - "/lib.deno.ns.d.ts" => inc!("lib.deno.ns.d.ts"), - "/lib.deno.shared_globals.d.ts" => inc!("lib.deno.shared_globals.d.ts"), - "/lib.deno.unstable.d.ts" => inc!("lib.deno.unstable.d.ts"), - "/lib.deno.window.d.ts" => inc!("lib.deno.window.d.ts"), - "/lib.deno.worker.d.ts" => inc!("lib.deno.worker.d.ts"), - "/lib.es5.d.ts" => inc!("lib.es5.d.ts"), - "/lib.es2015.collection.d.ts" => inc!("lib.es2015.collection.d.ts"), - "/lib.es2015.core.d.ts" => inc!("lib.es2015.core.d.ts"), - "/lib.es2015.d.ts" => inc!("lib.es2015.d.ts"), - "/lib.es2015.generator.d.ts" => inc!("lib.es2015.generator.d.ts"), - "/lib.es2015.iterable.d.ts" => inc!("lib.es2015.iterable.d.ts"), - "/lib.es2015.promise.d.ts" => inc!("lib.es2015.promise.d.ts"), - "/lib.es2015.proxy.d.ts" => inc!("lib.es2015.proxy.d.ts"), - "/lib.es2015.reflect.d.ts" => inc!("lib.es2015.reflect.d.ts"), - "/lib.es2015.symbol.d.ts" => inc!("lib.es2015.symbol.d.ts"), - "/lib.es2015.symbol.wellknown.d.ts" => { - inc!("lib.es2015.symbol.wellknown.d.ts") - } - "/lib.es2016.array.include.d.ts" => inc!("lib.es2016.array.include.d.ts"), - "/lib.es2016.d.ts" => inc!("lib.es2016.d.ts"), - "/lib.es2017.d.ts" => inc!("lib.es2017.d.ts"), - "/lib.es2017.intl.d.ts" => inc!("lib.es2017.intl.d.ts"), - "/lib.es2017.object.d.ts" => inc!("lib.es2017.object.d.ts"), - "/lib.es2017.sharedmemory.d.ts" => inc!("lib.es2017.sharedmemory.d.ts"), - "/lib.es2017.string.d.ts" => inc!("lib.es2017.string.d.ts"), - "/lib.es2017.typedarrays.d.ts" => inc!("lib.es2017.typedarrays.d.ts"), - "/lib.es2018.asyncgenerator.d.ts" => inc!("lib.es2018.asyncgenerator.d.ts"), - "/lib.es2018.asynciterable.d.ts" => inc!("lib.es2018.asynciterable.d.ts"), - "/lib.es2018.d.ts" => inc!("lib.es2018.d.ts"), - "/lib.es2018.intl.d.ts" => inc!("lib.es2018.intl.d.ts"), - "/lib.es2018.promise.d.ts" => inc!("lib.es2018.promise.d.ts"), - "/lib.es2018.regexp.d.ts" => inc!("lib.es2018.regexp.d.ts"), - "/lib.es2019.array.d.ts" => inc!("lib.es2019.array.d.ts"), - "/lib.es2019.d.ts" => inc!("lib.es2019.d.ts"), - "/lib.es2019.object.d.ts" => inc!("lib.es2019.object.d.ts"), - "/lib.es2019.string.d.ts" => inc!("lib.es2019.string.d.ts"), - "/lib.es2019.symbol.d.ts" => inc!("lib.es2019.symbol.d.ts"), - "/lib.es2020.bigint.d.ts" => inc!("lib.es2020.bigint.d.ts"), - "/lib.es2020.d.ts" => inc!("lib.es2020.d.ts"), - "/lib.es2020.intl.d.ts" => inc!("lib.es2020.intl.d.ts"), - "/lib.es2020.promise.d.ts" => inc!("lib.es2020.promise.d.ts"), - "/lib.es2020.sharedmemory.d.ts" => inc!("lib.es2020.sharedmemory.d.ts"), - "/lib.es2020.string.d.ts" => inc!("lib.es2020.string.d.ts"), - "/lib.es2020.symbol.wellknown.d.ts" => { - inc!("lib.es2020.symbol.wellknown.d.ts") +/// Optionally returns an internal asset, first checking for any static assets +/// in Rust, then checking any previously retrieved static assets from the +/// isolate, and then finally, the tsc isolate itself. +pub fn get_asset( + specifier: &ModuleSpecifier, + runtime: &mut JsRuntime, + server_state: &ServerStateSnapshot, +) -> Result, AnyError> { + let specifier_str = specifier.to_string().replace("asset:///", ""); + if let Some(asset_text) = tsc::get_asset(&specifier_str) { + Ok(Some(asset_text.to_string())) + } else { + let mut assets = server_state.assets.write().unwrap(); + if let Some(asset) = assets.get(specifier) { + Ok(asset.clone()) + } else { + let asset = request_asset(specifier, runtime, server_state)?; + assets.insert(specifier.clone(), asset.clone()); + Ok(asset) } - "/lib.esnext.d.ts" => inc!("lib.esnext.d.ts"), - "/lib.esnext.intl.d.ts" => inc!("lib.esnext.intl.d.ts"), - "/lib.esnext.promise.d.ts" => inc!("lib.esnext.promise.d.ts"), - "/lib.esnext.string.d.ts" => inc!("lib.esnext.string.d.ts"), - "/lib.esnext.weakref.d.ts" => inc!("lib.esnext.weakref.d.ts"), - _ => None, } } @@ -661,6 +596,7 @@ struct Response { } struct State<'a> { + asset: Option, last_id: usize, response: Option, server_state: ServerStateSnapshot, @@ -670,6 +606,7 @@ struct State<'a> { impl<'a> State<'a> { fn new(server_state: ServerStateSnapshot) -> Self { Self { + asset: None, last_id: 1, response: None, server_state, @@ -928,6 +865,18 @@ fn script_version(state: &mut State, args: Value) -> Result { Ok(json!(None::)) } +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +struct SetAssetArgs { + text: Option, +} + +fn set_asset(state: &mut State, args: Value) -> Result { + let v: SetAssetArgs = serde_json::from_value(args)?; + state.asset = v.text; + Ok(json!(true)) +} + /// Create and setup a JsRuntime based on a snapshot. It is expected that the /// supplied snapshot is an isolate that contains the TypeScript language /// server. @@ -951,6 +900,7 @@ pub fn start(debug: bool) -> Result { runtime.register_op("op_respond", op(respond)); runtime.register_op("op_script_names", op(script_names)); runtime.register_op("op_script_version", op(script_version)); + runtime.register_op("op_set_asset", op(set_asset)); let init_config = json!({ "debug": debug }); let init_src = format!("globalThis.serverInit({});", init_config); @@ -1028,6 +978,8 @@ pub struct UserPreferences { pub enum RequestMethod { /// Configure the compilation settings for the server. Configure(TsConfig), + /// Retrieve the text of an assets that exists in memory in the isolate. + GetAsset(ModuleSpecifier), /// Return semantic diagnostics for given file. GetSemanticDiagnostics(ModuleSpecifier), /// Returns suggestion diagnostics for given file. @@ -1054,6 +1006,11 @@ impl RequestMethod { "method": "configure", "compilerOptions": config, }), + RequestMethod::GetAsset(specifier) => json!({ + "id": id, + "method": "getAsset", + "specifier": specifier, + }), RequestMethod::GetSemanticDiagnostics(specifier) => json!({ "id": id, "method": "getSemanticDiagnostics", @@ -1144,6 +1101,30 @@ pub fn request( } } +fn request_asset( + specifier: &ModuleSpecifier, + runtime: &mut JsRuntime, + server_state: &ServerStateSnapshot, +) -> Result, AnyError> { + let id = { + let op_state = runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + let state = op_state.borrow_mut::(); + state.server_state = server_state.clone(); + state.last_id += 1; + state.last_id + }; + let request_params = RequestMethod::GetAsset(specifier.clone()).to_value(id); + let request_src = format!("globalThis.serverRequest({});", request_params); + runtime.execute("[native_code]", &request_src)?; + + let op_state = runtime.op_state(); + let mut op_state = op_state.borrow_mut(); + let state = op_state.borrow_mut::(); + + Ok(state.asset.clone()) +} + #[cfg(test)] mod tests { use super::super::memory_cache::MemoryCache; @@ -1167,6 +1148,7 @@ mod tests { } let file_cache = Arc::new(RwLock::new(file_cache)); ServerStateSnapshot { + assets: Default::default(), config: Default::default(), diagnostics: Default::default(), doc_data, @@ -1422,9 +1404,45 @@ mod tests { &server_state, RequestMethod::GetSyntacticDiagnostics(specifier), ); - println!("{:?}", result); - // assert!(result.is_ok()); - // let response = result.unwrap(); - // assert_eq!(response, json!([])); + assert!(result.is_ok()); + let response = result.unwrap(); + assert_eq!( + response, + json!([{ + "start": { + "line": 8, + "character": 29 + }, + "end": { + "line": 8, + "character": 29 + }, + "fileName": "file:///a.ts", + "messageText": "Expression expected.", + "sourceLine": " import * as test from", + "category": 1, + "code": 1109 + }]) + ); + } + + #[test] + fn test_request_asset() { + let (mut runtime, server_state) = setup( + false, + json!({ + "target": "esnext", + "module": "esnext", + "lib": ["deno.ns", "deno.window"], + "noEmit": true, + }), + vec![], + ); + let specifier = ModuleSpecifier::resolve_url("asset:///lib.esnext.d.ts") + .expect("could not resolve url"); + let result = request_asset(&specifier, &mut runtime, &server_state); + assert!(result.is_ok()); + let response = result.unwrap(); + assert!(response.is_some()); } } diff --git a/cli/tsc.rs b/cli/tsc.rs index 69373b2fa84159..d6de4e1228f274 100644 --- a/cli/tsc.rs +++ b/cli/tsc.rs @@ -26,7 +26,7 @@ use std::path::PathBuf; use std::rc::Rc; /// Provide static assets that are not preloaded in the compiler snapshot. -fn get_asset(asset: &str) -> Option<&'static str> { +pub fn get_asset(asset: &str) -> Option<&'static str> { macro_rules! inc { ($e:expr) => { Some(include_str!(concat!("dts/", $e))) diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index a78b85203ec328..0be0fdc2c7f3a9 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -487,6 +487,16 @@ delete Object.prototype.__proto__; compilationSettings = options; return respond(id, true); } + case "getAsset": { + const sourceFile = host.getSourceFile( + request.specifier, + ts.ScriptTarget.ESNext, + ); + return core.jsonOpSync( + "op_set_asset", + { text: sourceFile && sourceFile.text }, + ); + } case "getSemanticDiagnostics": { const diagnostics = languageService.getSemanticDiagnostics( request.specifier, diff --git a/cli/tsc/compiler.d.ts b/cli/tsc/compiler.d.ts index a1f4e851cb3982..39afbe884e9509 100644 --- a/cli/tsc/compiler.d.ts +++ b/cli/tsc/compiler.d.ts @@ -42,6 +42,7 @@ declare global { type LanguageServerRequest = | ConfigureRequest + | GetAsset | GetSyntacticDiagnosticsRequest | GetSemanticDiagnosticsRequest | GetSuggestionDiagnosticsRequest @@ -62,6 +63,11 @@ declare global { compilerOptions: Record; } + interface GetAsset extends BaseLanguageServerRequest { + method: "getAsset"; + specifier: string; + } + interface GetSyntacticDiagnosticsRequest extends BaseLanguageServerRequest { method: "getSyntacticDiagnostics"; specifier: string; From 9fe26f8ca189ac81d9c20c454b9dbfa5e1011c3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 15 Dec 2020 21:45:29 +0100 Subject: [PATCH 077/135] refactor: remove dead code (#8781) --- Cargo.lock | 2 -- cli/Cargo.toml | 5 +--- cli/http_util.rs | 74 ---------------------------------------------- runtime/Cargo.toml | 3 ++ 4 files changed, 4 insertions(+), 80 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 58c278e56dac3e..4f12da908a597d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -455,7 +455,6 @@ dependencies = [ "atty", "base64 0.12.3", "byteorder", - "bytes 0.5.6", "chrono", "clap", "crossbeam-channel 0.5.0", @@ -503,7 +502,6 @@ dependencies = [ "tokio-tungstenite", "uuid", "walkdir", - "warp", "winapi 0.3.9", "winres", ] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index b6979c638d9f23..b0dc9e5093f24c 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -38,7 +38,6 @@ deno_runtime = { path = "../runtime", version = "0.3.0" } atty = "0.2.14" base64 = "0.12.3" -bytes = "0.5.6" byteorder = "1.3.4" clap = "2.33.3" crossbeam-channel = "0.5.0" @@ -72,11 +71,8 @@ tempfile = "3.1.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } tokio-rustls = "0.14.1" -# Keep in-sync with warp. -tokio-tungstenite = "0.11.0" uuid = { version = "0.8.1", features = ["v4"] } walkdir = "2.3.1" -warp = { version = "0.2.5", features = ["tls"] } [target.'cfg(windows)'.dependencies] winapi = { version = "0.3.9", features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] } @@ -90,6 +86,7 @@ nix = "0.19.0" chrono = "0.4.15" os_pipe = "0.9.2" test_util = { path = "../test_util" } +tokio-tungstenite = "0.11.0" [target.'cfg(unix)'.dev-dependencies] exec = "0.3.1" # Used in test_raw_tty diff --git a/cli/http_util.rs b/cli/http_util.rs index 97e3453ec4a3a4..f6f8095a0694d2 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -1,10 +1,8 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::version; -use bytes::Bytes; use deno_core::error::generic_error; use deno_core::error::AnyError; -use deno_core::futures; use deno_core::url::Url; use deno_runtime::deno_fetch::reqwest; use deno_runtime::deno_fetch::reqwest::header::HeaderMap; @@ -14,18 +12,10 @@ use deno_runtime::deno_fetch::reqwest::header::LOCATION; use deno_runtime::deno_fetch::reqwest::header::USER_AGENT; use deno_runtime::deno_fetch::reqwest::redirect::Policy; use deno_runtime::deno_fetch::reqwest::Client; -use deno_runtime::deno_fetch::reqwest::Response; use deno_runtime::deno_fetch::reqwest::StatusCode; -use std::cmp::min; use std::collections::HashMap; use std::fs::File; -use std::future::Future; -use std::io; use std::io::Read; -use std::pin::Pin; -use std::task::Context; -use std::task::Poll; -use tokio::io::AsyncRead; pub fn get_user_agent() -> String { format!("Deno/{}", version::deno()) @@ -166,70 +156,6 @@ pub async fn fetch_once( Ok(FetchOnceResult::Code(body, headers_)) } -/// Wraps reqwest `Response` so that it can be exposed as an `AsyncRead` and integrated -/// into resources more easily. -pub struct HttpBody { - response: Response, - chunk: Option, - pos: usize, -} - -impl AsyncRead for HttpBody { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context, - buf: &mut [u8], - ) -> Poll> { - let mut inner = self.get_mut(); - if let Some(chunk) = inner.chunk.take() { - debug!( - "HttpBody Fake Read buf {} chunk {} pos {}", - buf.len(), - chunk.len(), - inner.pos - ); - let n = min(buf.len(), chunk.len() - inner.pos); - { - let rest = &chunk[inner.pos..]; - buf[..n].copy_from_slice(&rest[..n]); - } - inner.pos += n; - if inner.pos == chunk.len() { - inner.pos = 0; - } else { - inner.chunk = Some(chunk); - } - return Poll::Ready(Ok(n)); - } else { - assert_eq!(inner.pos, 0); - } - - let chunk_future = inner.response.chunk(); - futures::pin_mut!(chunk_future); - - let result = match futures::ready!(chunk_future.poll(cx)) { - Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)), - Ok(Some(chunk)) => { - debug!( - "HttpBody Real Read buf {} chunk {} pos {}", - buf.len(), - chunk.len(), - inner.pos - ); - let n = min(buf.len(), chunk.len()); - buf[..n].copy_from_slice(&chunk[..n]); - if buf.len() < chunk.len() { - inner.pos = n; - inner.chunk = Some(chunk); - } - Ok(n) - } - Ok(None) => Ok(0), - }; - result.into() - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index d5a84a4b998280..b7bfc494e92ac1 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -58,6 +58,9 @@ tokio-rustls = "0.14.1" # Keep in-sync with warp. tokio-tungstenite = "0.11.0" uuid = { version = "0.8.1", features = ["v4"] } +# TODO(bartlomieju): remove dependency on warp, it's only used +# for a WebSocket server in inspector.rs +# Keep in-sync with tokio-tungestenite. warp = { version = "0.2.5", features = ["tls"] } webpki = "0.21.3" webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. From 6984b63f2f3c8d0819fe2dced8252a81f3400ae7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Wed, 16 Dec 2020 17:14:12 +0100 Subject: [PATCH 078/135] refactor: rewrite ops to use ResourceTable2 (#8512) This commit migrates all ops to use new resource table and "AsyncRefCell". Old implementation of resource table was completely removed and all code referencing it was updated to use new system. --- cli/tests/unit/net_test.ts | 60 ++- core/async_cell.rs | 15 + core/examples/http_bench_bin_ops.rs | 12 +- core/examples/http_bench_json_ops.rs | 12 +- core/lib.rs | 6 +- core/ops.rs | 15 +- core/resources.rs | 253 +++++------ core/resources2.rs | 146 ------ op_crates/fetch/lib.rs | 68 +-- runtime/errors.rs | 6 + runtime/ops/fs.rs | 20 +- runtime/ops/fs_events.rs | 58 ++- runtime/ops/io.rs | 633 ++++++++++++++++----------- runtime/ops/net.rs | 255 +++++------ runtime/ops/net_unix.rs | 106 +++-- runtime/ops/plugin.rs | 12 +- runtime/ops/process.rs | 74 ++-- runtime/ops/signal.rs | 74 ++-- runtime/ops/tls.rs | 245 +++++------ runtime/ops/tty.rs | 127 +++--- runtime/ops/websocket.rs | 173 ++++---- runtime/rt/30_net.js | 32 +- runtime/rt/40_fs_events.js | 2 + runtime/rt/40_signals.js | 11 +- runtime/web_worker.rs | 7 +- runtime/worker.rs | 6 +- 26 files changed, 1222 insertions(+), 1206 deletions(-) delete mode 100644 core/resources2.rs diff --git a/cli/tests/unit/net_test.ts b/cli/tests/unit/net_test.ts index abbb23b339ef91..62b00e43c5c3f9 100644 --- a/cli/tests/unit/net_test.ts +++ b/cli/tests/unit/net_test.ts @@ -21,8 +21,6 @@ unitTest({ perms: { net: true } }, function netTcpListenClose(): void { unitTest( { perms: { net: true }, - // TODO: - ignore: Deno.build.os === "windows", }, function netUdpListenClose(): void { const socket = Deno.listenDatagram({ @@ -257,7 +255,7 @@ unitTest( ); unitTest( - { ignore: Deno.build.os === "windows", perms: { net: true } }, + { perms: { net: true } }, async function netUdpSendReceive(): Promise { const alice = Deno.listenDatagram({ port: 3500, transport: "udp" }); assert(alice.addr.transport === "udp"); @@ -287,7 +285,31 @@ unitTest( ); unitTest( - { ignore: Deno.build.os === "windows", perms: { net: true } }, + { perms: { net: true } }, + async function netUdpConcurrentSendReceive(): Promise { + const socket = Deno.listenDatagram({ port: 3500, transport: "udp" }); + assert(socket.addr.transport === "udp"); + assertEquals(socket.addr.port, 3500); + assertEquals(socket.addr.hostname, "127.0.0.1"); + + const recvPromise = socket.receive(); + + const sendBuf = new Uint8Array([1, 2, 3]); + const sendLen = await socket.send(sendBuf, socket.addr); + assertEquals(sendLen, 3); + + const [recvBuf, recvAddr] = await recvPromise; + assertEquals(recvBuf.length, 3); + assertEquals(1, recvBuf[0]); + assertEquals(2, recvBuf[1]); + assertEquals(3, recvBuf[2]); + + socket.close(); + }, +); + +unitTest( + { perms: { net: true } }, async function netUdpBorrowMutError(): Promise { const socket = Deno.listenDatagram({ port: 4501, @@ -335,6 +357,34 @@ unitTest( }, ); +// TODO(piscisaureus): Enable after Tokio v0.3/v1.0 upgrade. +unitTest( + { ignore: true, perms: { read: true, write: true } }, + async function netUnixPacketConcurrentSendReceive(): Promise { + const filePath = await Deno.makeTempFile(); + const socket = Deno.listenDatagram({ + path: filePath, + transport: "unixpacket", + }); + assert(socket.addr.transport === "unixpacket"); + assertEquals(socket.addr.path, filePath); + + const recvPromise = socket.receive(); + + const sendBuf = new Uint8Array([1, 2, 3]); + const sendLen = await socket.send(sendBuf, socket.addr); + assertEquals(sendLen, 3); + + const [recvBuf, recvAddr] = await recvPromise; + assertEquals(recvBuf.length, 3); + assertEquals(1, recvBuf[0]); + assertEquals(2, recvBuf[1]); + assertEquals(3, recvBuf[2]); + + socket.close(); + }, +); + unitTest( { perms: { net: true } }, async function netTcpListenIteratorBreakClosesResource(): Promise { @@ -385,7 +435,7 @@ unitTest( ); unitTest( - { ignore: Deno.build.os === "windows", perms: { net: true } }, + { perms: { net: true } }, async function netUdpListenCloseWhileIterating(): Promise { const socket = Deno.listenDatagram({ port: 8000, transport: "udp" }); const nextWhileClosing = socket[Symbol.asyncIterator]().next(); diff --git a/core/async_cell.rs b/core/async_cell.rs index d11b839325579f..cd6c51682883af 100644 --- a/core/async_cell.rs +++ b/core/async_cell.rs @@ -1,10 +1,14 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use std::any::type_name; use std::any::Any; use std::borrow::Borrow; use std::cell::Cell; use std::cell::UnsafeCell; use std::collections::VecDeque; +use std::fmt; +use std::fmt::Debug; +use std::fmt::Formatter; use std::ops::Deref; use std::rc::Rc; @@ -45,6 +49,17 @@ impl AsyncRefCell { pub fn as_ptr(&self) -> *mut T { self.value.get() } + + pub fn into_inner(self) -> T { + assert!(self.borrow_count.get().is_empty()); + self.value.into_inner() + } +} + +impl Debug for AsyncRefCell { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "AsyncRefCell<{}>", type_name::()) + } } impl Default for AsyncRefCell { diff --git a/core/examples/http_bench_bin_ops.rs b/core/examples/http_bench_bin_ops.rs index 1d7a76c3d03fae..32529652b3d0bb 100644 --- a/core/examples/http_bench_bin_ops.rs +++ b/core/examples/http_bench_bin_ops.rs @@ -170,7 +170,7 @@ fn op_listen( let std_listener = std::net::TcpListener::bind(&addr)?; std_listener.set_nonblocking(true)?; let listener = TcpListener::try_from(std_listener)?; - let rid = state.resource_table_2.add(listener); + let rid = state.resource_table.add(listener); Ok(rid) } @@ -181,7 +181,7 @@ fn op_close( ) -> Result { debug!("close rid={}", rid); state - .resource_table_2 + .resource_table .close(rid) .map(|_| 0) .ok_or_else(bad_resource_id) @@ -196,11 +196,11 @@ async fn op_accept( let listener = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; let stream = listener.accept().await?; - let rid = state.borrow_mut().resource_table_2.add(stream); + let rid = state.borrow_mut().resource_table.add(stream); Ok(rid) } @@ -214,7 +214,7 @@ async fn op_read( let stream = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; stream.read(&mut bufs[0]).await @@ -230,7 +230,7 @@ async fn op_write( let stream = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; stream.write(&bufs[0]).await diff --git a/core/examples/http_bench_json_ops.rs b/core/examples/http_bench_json_ops.rs index c4fcd636367fbb..8cf4061cc08900 100644 --- a/core/examples/http_bench_json_ops.rs +++ b/core/examples/http_bench_json_ops.rs @@ -134,7 +134,7 @@ fn op_listen( let std_listener = std::net::TcpListener::bind(&addr)?; std_listener.set_nonblocking(true)?; let listener = TcpListener::try_from(std_listener)?; - let rid = state.resource_table_2.add(listener); + let rid = state.resource_table.add(listener); Ok(serde_json::json!({ "rid": rid })) } @@ -152,7 +152,7 @@ fn op_close( .unwrap(); debug!("close rid={}", rid); state - .resource_table_2 + .resource_table .close(rid) .map(|_| serde_json::json!(())) .ok_or_else(bad_resource_id) @@ -174,11 +174,11 @@ async fn op_accept( let listener = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; let stream = listener.accept().await?; - let rid = state.borrow_mut().resource_table_2.add(stream); + let rid = state.borrow_mut().resource_table.add(stream); Ok(serde_json::json!({ "rid": rid })) } @@ -199,7 +199,7 @@ async fn op_read( let stream = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; let nread = stream.read(&mut bufs[0]).await?; @@ -223,7 +223,7 @@ async fn op_write( let stream = state .borrow() - .resource_table_2 + .resource_table .get::(rid) .ok_or_else(bad_resource_id)?; let nwritten = stream.write(&bufs[0]).await?; diff --git a/core/lib.rs b/core/lib.rs index 5846ad99d47703..6cecd4d75746aa 100644 --- a/core/lib.rs +++ b/core/lib.rs @@ -17,7 +17,6 @@ mod normalize_path; mod ops; pub mod plugin_api; mod resources; -mod resources2; mod runtime; mod shared_queue; mod zero_copy_buf; @@ -64,10 +63,9 @@ pub use crate::ops::OpFn; pub use crate::ops::OpId; pub use crate::ops::OpState; pub use crate::ops::OpTable; +pub use crate::resources::Resource; +pub use crate::resources::ResourceId; pub use crate::resources::ResourceTable; -pub use crate::resources2::Resource; -pub use crate::resources2::ResourceId; -pub use crate::resources2::ResourceTable2; pub use crate::runtime::GetErrorClassFn; pub use crate::runtime::JsErrorCreateFn; pub use crate::runtime::JsRuntime; diff --git a/core/ops.rs b/core/ops.rs index bf10d3d86e919b..2907d25525a3d5 100644 --- a/core/ops.rs +++ b/core/ops.rs @@ -4,6 +4,8 @@ use crate::error::bad_resource_id; use crate::error::type_error; use crate::error::AnyError; use crate::gotham_state::GothamState; +use crate::resources::ResourceTable; +use crate::runtime::GetErrorClassFn; use crate::BufVec; use crate::ZeroCopyBuf; use futures::Future; @@ -33,10 +35,9 @@ pub enum Op { /// Maintains the resources and ops inside a JS runtime. pub struct OpState { - pub resource_table: crate::ResourceTable, - pub resource_table_2: crate::resources2::ResourceTable, + pub resource_table: ResourceTable, pub op_table: OpTable, - pub get_error_class_fn: crate::runtime::GetErrorClassFn, + pub get_error_class_fn: GetErrorClassFn, gotham_state: GothamState, } @@ -47,7 +48,6 @@ impl Default for OpState { fn default() -> OpState { OpState { resource_table: Default::default(), - resource_table_2: Default::default(), op_table: OpTable::default(), get_error_class_fn: &|_| "Error", gotham_state: Default::default(), @@ -279,7 +279,11 @@ pub fn op_resources( _args: Value, _zero_copy: &mut [ZeroCopyBuf], ) -> Result { - let serialized_resources = state.resource_table.entries(); + let serialized_resources: HashMap = state + .resource_table + .names() + .map(|(rid, name)| (rid, name.to_string())) + .collect(); Ok(json!(serialized_resources)) } @@ -300,5 +304,6 @@ pub fn op_close( .resource_table .close(rid as u32) .ok_or_else(bad_resource_id)?; + Ok(json!({})) } diff --git a/core/resources.rs b/core/resources.rs index 753fa97139a294..da3b634fcff36a 100644 --- a/core/resources.rs +++ b/core/resources.rs @@ -1,20 +1,63 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -// Think of Resources as File Descriptors. They are integers that are allocated by -// the privileged side of Deno to refer to various rust objects that need to be -// referenced between multiple ops. For example, network sockets are resources. -// Resources may or may not correspond to a real operating system file -// descriptor (hence the different name). +// Think of Resources as File Descriptors. They are integers that are allocated +// by the privileged side of Deno which refer to various rust objects that need +// to be persisted between various ops. For example, network sockets are +// resources. Resources may or may not correspond to a real operating system +// file descriptor (hence the different name). -use crate::resources2::ResourceId; +use std::any::type_name; use std::any::Any; +use std::any::TypeId; +use std::borrow::Cow; use std::collections::HashMap; +use std::iter::Iterator; +use std::rc::Rc; + +/// All objects that can be store in the resource table should implement the +/// `Resource` trait. +pub trait Resource: Any + 'static { + /// Returns a string representation of the resource which is made available + /// to JavaScript code through `op_resources`. The default implementation + /// returns the Rust type name, but specific resource types may override this + /// trait method. + fn name(&self) -> Cow { + type_name::().into() + } + + /// Resources may implement the `close()` trait method if they need to do + /// resource specific clean-ups, such as cancelling pending futures, after a + /// resource has been removed from the resource table. + fn close(self: Rc) {} +} + +impl dyn Resource { + #[inline(always)] + fn is(&self) -> bool { + self.type_id() == TypeId::of::() + } -/// These store Deno's file descriptors. These are not necessarily the operating -/// system ones. -type ResourceMap = HashMap)>; + #[inline(always)] + #[allow(clippy::needless_lifetimes)] + pub fn downcast_rc<'a, T: Resource>(self: &'a Rc) -> Option<&'a Rc> { + if self.is::() { + let ptr = self as *const Rc<_> as *const Rc; + Some(unsafe { &*ptr }) + } else { + None + } + } +} -/// Map-like data structure storing Deno's resources (equivalent to file descriptors). +/// A `ResourceId` is an integer value referencing a resource. It could be +/// considered to be the Deno equivalent of a `file descriptor` in POSIX like +/// operating systems. Elsewhere in the code base it is commonly abbreviated +/// to `rid`. +// TODO: use `u64` instead? +pub type ResourceId = u32; + +/// Map-like data structure storing Deno's resources (equivalent to file +/// descriptors). /// /// Provides basic methods for element access. A resource can be of any type. /// Different types of resources can be stored in the same map, and provided @@ -24,156 +67,98 @@ type ResourceMap = HashMap)>; /// the key in the map. #[derive(Default)] pub struct ResourceTable { - map: ResourceMap, - next_id: u32, + index: HashMap>, + next_rid: ResourceId, } impl ResourceTable { - /// Checks if the given resource ID is contained. - pub fn has(&self, rid: ResourceId) -> bool { - self.map.contains_key(&rid) - } - - /// Returns a shared reference to a resource. + /// Inserts resource into the resource table, which takes ownership of it. /// - /// Returns `None`, if `rid` is not stored or has a type different from `T`. - pub fn get(&self, rid: ResourceId) -> Option<&T> { - let (_, resource) = self.map.get(&rid)?; - resource.downcast_ref::() - } - - /// Returns a mutable reference to a resource. + /// The resource type is erased at runtime and must be statically known + /// when retrieving it through `get()`. /// - /// Returns `None`, if `rid` is not stored or has a type different from `T`. - pub fn get_mut(&mut self, rid: ResourceId) -> Option<&mut T> { - let (_, resource) = self.map.get_mut(&rid)?; - resource.downcast_mut::() - } - - // TODO: resource id allocation should probably be randomized for security. - fn next_rid(&mut self) -> ResourceId { - let next_rid = self.next_id; - self.next_id += 1; - next_rid as ResourceId + /// Returns a unique resource ID, which acts as a key for this resource. + pub fn add(&mut self, resource: T) -> ResourceId { + self.add_rc(Rc::new(resource)) } - /// Inserts a resource, taking ownership of it. + /// Inserts a `Rc`-wrapped resource into the resource table. /// /// The resource type is erased at runtime and must be statically known /// when retrieving it through `get()`. /// /// Returns a unique resource ID, which acts as a key for this resource. - pub fn add(&mut self, name: &str, resource: Box) -> ResourceId { - let rid = self.next_rid(); - let r = self.map.insert(rid, (name.to_string(), resource)); - assert!(r.is_none()); + pub fn add_rc(&mut self, resource: Rc) -> ResourceId { + let resource = resource as Rc; + let rid = self.next_rid; + let removed_resource = self.index.insert(rid, resource); + assert!(removed_resource.is_none()); + self.next_rid += 1; rid } - /// Returns a map of resource IDs to names. - /// - /// The name is the one specified during `add()`. To access resources themselves, - /// use the `get()` or `get_mut()` functions. - pub fn entries(&self) -> HashMap { - self - .map - .iter() - .map(|(key, (name, _resource))| (*key, name.clone())) - .collect() - } - - // close(2) is done by dropping the value. Therefore we just need to remove - // the resource from the resource table. - pub fn close(&mut self, rid: ResourceId) -> Option<()> { - self.map.remove(&rid).map(|(_name, _resource)| ()) - } - - /// Removes the resource identified by `rid` and returns it. - /// - /// When the provided `rid` is stored, the associated resource will be removed. - /// Otherwise, nothing happens and `None` is returned. - /// - /// If the type `T` matches the resource's type, the resource will be returned. - /// If the type mismatches, `None` is returned, but the resource is still removed. - pub fn remove(&mut self, rid: ResourceId) -> Option> { - if let Some((_name, resource)) = self.map.remove(&rid) { - let res = match resource.downcast::() { - Ok(res) => Some(res), - Err(_e) => None, - }; - return res; - } - None - } -} - -#[cfg(test)] -mod tests { - use super::*; - - struct FakeResource { - not_empty: u128, - } - - impl FakeResource { - fn new(value: u128) -> FakeResource { - FakeResource { not_empty: value } - } + /// Returns true if any resource with the given `rid` exists. + pub fn has(&self, rid: ResourceId) -> bool { + self.index.contains_key(&rid) } - #[test] - fn test_create_resource_table_default() { - let table = ResourceTable::default(); - assert_eq!(table.map.len(), 0); + /// Returns a reference counted pointer to the resource of type `T` with the + /// given `rid`. If `rid` is not present or has a type different than `T`, + /// this function returns `None`. + pub fn get(&self, rid: ResourceId) -> Option> { + self + .index + .get(&rid) + .and_then(|rc| rc.downcast_rc::()) + .map(Clone::clone) } - #[test] - fn test_add_to_resource_table_not_empty() { - let mut table = ResourceTable::default(); - table.add("fake1", Box::new(FakeResource::new(1))); - table.add("fake2", Box::new(FakeResource::new(2))); - assert_eq!(table.map.len(), 2); + pub fn get_any(&self, rid: ResourceId) -> Option> { + self.index.get(&rid).map(Clone::clone) } - #[test] - fn test_add_to_resource_table_are_contiguous() { - let mut table = ResourceTable::default(); - let rid1 = table.add("fake1", Box::new(FakeResource::new(1))); - let rid2 = table.add("fake2", Box::new(FakeResource::new(2))); - assert_eq!(rid1 + 1, rid2); + /// Removes a resource of type `T` from the resource table and returns it. + /// If a resource with the given `rid` exists but its type does not match `T`, + /// it is not removed from the resource table. Note that the resource's + /// `close()` method is *not* called. + pub fn take(&mut self, rid: ResourceId) -> Option> { + let resource = self.get::(rid)?; + self.index.remove(&rid); + Some(resource) } - #[test] - fn test_get_from_resource_table_is_what_was_given() { - let mut table = ResourceTable::default(); - let rid = table.add("fake", Box::new(FakeResource::new(7))); - let resource = table.get::(rid); - assert_eq!(resource.unwrap().not_empty, 7); + /// Removes a resource from the resource table and returns it. Note that the + /// resource's `close()` method is *not* called. + pub fn take_any(&mut self, rid: ResourceId) -> Option> { + self.index.remove(&rid) } - #[test] - fn test_remove_from_resource_table() { - let mut table = ResourceTable::default(); - let rid1 = table.add("fake1", Box::new(FakeResource::new(1))); - let rid2 = table.add("fake2", Box::new(FakeResource::new(2))); - assert_eq!(table.map.len(), 2); - table.close(rid1); - assert_eq!(table.map.len(), 1); - table.close(rid2); - assert_eq!(table.map.len(), 0); + /// Removes the resource with the given `rid` from the resource table. If the + /// only reference to this resource existed in the resource table, this will + /// cause the resource to be dropped. However, since resources are reference + /// counted, therefore pending ops are not automatically cancelled. A resource + /// may implement the `close()` method to perform clean-ups such as canceling + /// ops. + pub fn close(&mut self, rid: ResourceId) -> Option<()> { + self.index.remove(&rid).map(|resource| resource.close()) } - #[test] - fn test_take_from_resource_table() { - let mut table = ResourceTable::default(); - let rid1 = table.add("fake1", Box::new(FakeResource::new(1))); - let rid2 = table.add("fake2", Box::new(FakeResource::new(2))); - assert_eq!(table.map.len(), 2); - let res1 = table.remove::(rid1); - assert_eq!(table.map.len(), 1); - assert!(res1.is_some()); - let res2 = table.remove::(rid2); - assert_eq!(table.map.len(), 0); - assert!(res2.is_some()); + /// Returns an iterator that yields a `(id, name)` pair for every resource + /// that's currently in the resource table. This can be used for debugging + /// purposes or to implement the `op_resources` op. Note that the order in + /// which items appear is not specified. + /// + /// # Example + /// + /// ``` + /// # use deno_core::ResourceTable; + /// # let resource_table = ResourceTable::default(); + /// let resource_names = resource_table.names().collect::>(); + /// ``` + pub fn names(&self) -> impl Iterator)> { + self + .index + .iter() + .map(|(&id, resource)| (id, resource.name())) } } diff --git a/core/resources2.rs b/core/resources2.rs deleted file mode 100644 index 989ea83280e756..00000000000000 --- a/core/resources2.rs +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -// Think of Resources as File Descriptors. They are integers that are allocated -// by the privileged side of Deno which refer to various rust objects that need -// to be persisted between various ops. For example, network sockets are -// resources. Resources may or may not correspond to a real operating system -// file descriptor (hence the different name). - -use std::any::type_name; -use std::any::Any; -use std::any::TypeId; -use std::borrow::Cow; -use std::collections::HashMap; -use std::iter::Iterator; -use std::rc::Rc; - -/// All objects that can be store in the resource table should implement the -/// `Resource` trait. -pub trait Resource: Any + 'static { - /// Returns a string representation of the resource which is made available - /// to JavaScript code through `op_resources`. The default implementation - /// returns the Rust type name, but specific resource types may override this - /// trait method. - fn name(&self) -> Cow { - type_name::().into() - } - - /// Resources may implement the `close()` trait method if they need to do - /// resource specific clean-ups, such as cancelling pending futures, after a - /// resource has been removed from the resource table. - fn close(self: Rc) {} -} - -impl dyn Resource { - #[inline(always)] - fn is(&self) -> bool { - self.type_id() == TypeId::of::() - } - - #[inline(always)] - #[allow(clippy::needless_lifetimes)] - fn downcast_rc<'a, T: Resource>(self: &'a Rc) -> Option<&'a Rc> { - if self.is::() { - let ptr = self as *const Rc<_> as *const Rc; - Some(unsafe { &*ptr }) - } else { - None - } - } -} - -/// A `ResourceId` is an integer value referencing a resource. It could be -/// considered to be the Deno equivalent of a `file descriptor` in POSIX like -/// operating systems. Elsewhere in the code base it is commonly abbreviated -/// to `rid`. -// TODO: use `u64` instead? -pub type ResourceId = u32; - -/// Temporary alias for `crate::resources2::ResourceTable`. -// TODO: remove this when the old `ResourceTable` is obsolete. -pub type ResourceTable2 = ResourceTable; - -/// Map-like data structure storing Deno's resources (equivalent to file -/// descriptors). -/// -/// Provides basic methods for element access. A resource can be of any type. -/// Different types of resources can be stored in the same map, and provided -/// with a name for description. -/// -/// Each resource is identified through a _resource ID (rid)_, which acts as -/// the key in the map. -#[derive(Default)] -pub struct ResourceTable { - index: HashMap>, - next_rid: ResourceId, -} - -impl ResourceTable { - /// Returns true if any resource with the given `rid` is exists. - pub fn has(&self, rid: ResourceId) -> bool { - self.index.contains_key(&rid) - } - - /// Returns a reference counted pointer to the resource of type `T` with the - /// given `rid`. If `rid` is not present or has a type different than `T`, - /// this function returns `None`. - pub fn get(&self, rid: ResourceId) -> Option> { - self - .index - .get(&rid) - .and_then(|resource| resource.downcast_rc::()) - .map(Clone::clone) - } - - /// Inserts resource into the resource table, which takes ownership of it. - /// - /// The resource type is erased at runtime and must be statically known - /// when retrieving it through `get()`. - /// - /// Returns a unique resource ID, which acts as a key for this resource. - pub fn add(&mut self, resource: T) -> ResourceId { - self.add_rc(Rc::new(resource)) - } - - /// Inserts a `Rc`-wrapped resource into the resource table. - /// - /// The resource type is erased at runtime and must be statically known - /// when retrieving it through `get()`. - /// - /// Returns a unique resource ID, which acts as a key for this resource. - pub fn add_rc(&mut self, resource: Rc) -> ResourceId { - let resource = resource as Rc; - let rid = self.next_rid; - let removed_resource = self.index.insert(rid, resource); - assert!(removed_resource.is_none()); - self.next_rid += 1; - rid - } - - /// Removes the resource with the given `rid` from the resource table. If the - /// only reference to this resource existed in the resource table, this will - /// cause the resource to be dropped. However, since resources are reference - /// counted, therefore pending ops are not automatically cancelled. - pub fn close(&mut self, rid: ResourceId) -> Option<()> { - self.index.remove(&rid).map(|resource| resource.close()) - } - - /// Returns an iterator that yields a `(id, name)` pair for every resource - /// that's currently in the resource table. This can be used for debugging - /// purposes or to implement the `op_resources` op. Note that the order in - /// which items appear is not specified. - /// - /// # Example - /// - /// ``` - /// # use deno_core::ResourceTable2; - /// # let resource_table = ResourceTable2::default(); - /// let resource_names = resource_table.names().collect::>(); - /// ``` - pub fn names(&self) -> impl Iterator)> { - self - .index - .iter() - .map(|(&id, resource)| (id, resource.name())) - } -} diff --git a/op_crates/fetch/lib.rs b/op_crates/fetch/lib.rs index 8a4c1ee16915b0..c2c08d2cff567e 100644 --- a/op_crates/fetch/lib.rs +++ b/op_crates/fetch/lib.rs @@ -5,15 +5,19 @@ use deno_core::error::bad_resource_id; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::url; use deno_core::url::Url; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelFuture; +use deno_core::CancelHandle; use deno_core::JsRuntime; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use reqwest::header::HeaderName; @@ -23,6 +27,7 @@ use reqwest::Client; use reqwest::Method; use reqwest::Response; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::convert::From; use std::fs::File; @@ -172,10 +177,10 @@ where } } - let rid = state - .borrow_mut() - .resource_table - .add("httpBody", Box::new(res)); + let rid = state.borrow_mut().resource_table.add(HttpBodyResource { + response: AsyncRefCell::new(res), + cancel: Default::default(), + }); Ok(json!({ "bodyRid": rid, @@ -199,32 +204,43 @@ pub async fn op_fetch_read( let args: Args = serde_json::from_value(args)?; let rid = args.rid; - use futures::future::poll_fn; - use futures::ready; - use futures::FutureExt; - let f = poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let response = state - .resource_table - .get_mut::(rid as u32) - .ok_or_else(bad_resource_id)?; + let resource = state + .borrow() + .resource_table + .get::(rid as u32) + .ok_or_else(bad_resource_id)?; + let mut response = RcRef::map(&resource, |r| &r.response).borrow_mut().await; + let cancel = RcRef::map(resource, |r| &r.cancel); + let maybe_chunk = response.chunk().or_cancel(cancel).await??; + if let Some(chunk) = maybe_chunk { + // TODO(ry) This is terribly inefficient. Make this zero-copy. + Ok(json!({ "chunk": &*chunk })) + } else { + Ok(json!({ "chunk": null })) + } +} - let mut chunk_fut = response.chunk().boxed_local(); - let r = ready!(chunk_fut.poll_unpin(cx))?; - if let Some(chunk) = r { - // TODO(ry) This is terribly inefficient. Make this zero-copy. - Ok(json!({ "chunk": &*chunk })).into() - } else { - Ok(json!({ "chunk": null })).into() - } - }); - f.await +struct HttpBodyResource { + response: AsyncRefCell, + cancel: CancelHandle, +} + +impl Resource for HttpBodyResource { + fn name(&self) -> Cow { + "httpBody".into() + } } struct HttpClientResource { client: Client, } +impl Resource for HttpClientResource { + fn name(&self) -> Cow { + "httpClient".into() + } +} + impl HttpClientResource { fn new(client: Client) -> Self { Self { client } @@ -255,9 +271,7 @@ where let client = create_http_client(args.ca_file.as_deref()).unwrap(); - let rid = state - .resource_table - .add("httpClient", Box::new(HttpClientResource::new(client))); + let rid = state.resource_table.add(HttpClientResource::new(client)); Ok(json!(rid)) } diff --git a/runtime/errors.rs b/runtime/errors.rs index f8f71a8594b722..f82d95ed8c1636 100644 --- a/runtime/errors.rs +++ b/runtime/errors.rs @@ -168,6 +168,12 @@ pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { e.downcast_ref::() .map(get_dlopen_error_class) }) + .or_else(|| { + e.downcast_ref::().map(|e| { + let io_err: io::Error = e.to_owned().into(); + get_io_error_class(&io_err) + }) + }) .or_else(|| { e.downcast_ref::() .map(get_env_var_error_class) diff --git a/runtime/ops/fs.rs b/runtime/ops/fs.rs index 865c5bccaf2cde..d6d7d7e787a572 100644 --- a/runtime/ops/fs.rs +++ b/runtime/ops/fs.rs @@ -1,7 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. // Some deserializer fields are only used on Unix and Windows build fails without it use super::io::std_file_resource; -use super::io::{FileMetadata, StreamResource, StreamResourceHolder}; +use super::io::StreamResource; use crate::fs_util::canonicalize_path; use crate::permissions::Permissions; use deno_core::error::custom_error; @@ -185,13 +185,8 @@ fn op_open_sync( let (path, open_options) = open_helper(state, args)?; let std_file = open_options.open(path)?; let tokio_file = tokio::fs::File::from_std(std_file); - let rid = state.resource_table.add( - "fsFile", - Box::new(StreamResourceHolder::new(StreamResource::FsFile(Some(( - tokio_file, - FileMetadata::default(), - ))))), - ); + let resource = StreamResource::fs_file(tokio_file); + let rid = state.resource_table.add(resource); Ok(json!(rid)) } @@ -204,13 +199,8 @@ async fn op_open_async( let tokio_file = tokio::fs::OpenOptions::from(open_options) .open(path) .await?; - let rid = state.borrow_mut().resource_table.add( - "fsFile", - Box::new(StreamResourceHolder::new(StreamResource::FsFile(Some(( - tokio_file, - FileMetadata::default(), - ))))), - ); + let resource = StreamResource::fs_file(tokio_file); + let rid = state.borrow_mut().resource_table.add(resource); Ok(json!(rid)) } diff --git a/runtime/ops/fs_events.rs b/runtime/ops/fs_events.rs index 4832c915cad78e..38661e1d475bd8 100644 --- a/runtime/ops/fs_events.rs +++ b/runtime/ops/fs_events.rs @@ -3,12 +3,16 @@ use crate::permissions::Permissions; use deno_core::error::bad_resource_id; use deno_core::error::AnyError; -use deno_core::futures::future::poll_fn; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelFuture; +use deno_core::CancelHandle; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use notify::event::Event as NotifyEvent; use notify::Error as NotifyError; @@ -18,6 +22,7 @@ use notify::RecursiveMode; use notify::Watcher; use serde::Deserialize; use serde::Serialize; +use std::borrow::Cow; use std::cell::RefCell; use std::convert::From; use std::path::PathBuf; @@ -32,7 +37,18 @@ pub fn init(rt: &mut deno_core::JsRuntime) { struct FsEventsResource { #[allow(unused)] watcher: RecommendedWatcher, - receiver: mpsc::Receiver>, + receiver: AsyncRefCell>>, + cancel: CancelHandle, +} + +impl Resource for FsEventsResource { + fn name(&self) -> Cow { + "fsEvents".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } } /// Represents a file system event. @@ -99,8 +115,12 @@ fn op_fs_events_open( .check_read(&PathBuf::from(path))?; watcher.watch(path, recursive_mode)?; } - let resource = FsEventsResource { watcher, receiver }; - let rid = state.resource_table.add("fsEvents", Box::new(resource)); + let resource = FsEventsResource { + watcher, + receiver: AsyncRefCell::new(receiver), + cancel: Default::default(), + }; + let rid = state.resource_table.add(resource); Ok(json!(rid)) } @@ -114,20 +134,18 @@ async fn op_fs_events_poll( rid: u32, } let PollArgs { rid } = serde_json::from_value(args)?; - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let watcher = state - .resource_table - .get_mut::(rid) - .ok_or_else(bad_resource_id)?; - watcher - .receiver - .poll_recv(cx) - .map(|maybe_result| match maybe_result { - Some(Ok(value)) => Ok(json!({ "value": value, "done": false })), - Some(Err(err)) => Err(err), - None => Ok(json!({ "done": true })), - }) - }) - .await + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let mut receiver = RcRef::map(&resource, |r| &r.receiver).borrow_mut().await; + let cancel = RcRef::map(resource, |r| &r.cancel); + let maybe_result = receiver.recv().or_cancel(cancel).await?; + match maybe_result { + Some(Ok(value)) => Ok(json!({ "value": value, "done": false })), + Some(Err(err)) => Err(err), + None => Ok(json!({ "done": true })), + } } diff --git a/runtime/ops/io.rs b/runtime/ops/io.rs index 0f8af905a57a6d..de56f5b557d417 100644 --- a/runtime/ops/io.rs +++ b/runtime/ops/io.rs @@ -7,26 +7,29 @@ use deno_core::error::bad_resource_id; use deno_core::error::resource_unavailable; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures; -use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; -use deno_core::futures::ready; +use deno_core::AsyncMutFuture; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::JsRuntime; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; +use std::borrow::Cow; use std::cell::RefCell; -use std::collections::HashMap; -use std::pin::Pin; use std::rc::Rc; -use std::sync::atomic::{AtomicUsize, Ordering}; -use std::task::Context; -use std::task::Poll; -use tokio::io::{AsyncRead, AsyncWrite}; +use tokio::io::AsyncRead; +use tokio::io::AsyncReadExt; +use tokio::io::AsyncWrite; +use tokio::io::AsyncWriteExt; +use tokio::net::tcp; use tokio::net::TcpStream; use tokio_rustls::client::TlsStream as ClientTlsStream; use tokio_rustls::server::TlsStream as ServerTlsStream; -#[cfg(not(windows))] +#[cfg(unix)] use std::os::unix::io::FromRawFd; #[cfg(windows)] @@ -94,26 +97,28 @@ pub fn init(rt: &mut JsRuntime) { } pub fn get_stdio() -> ( - Option, - Option, - Option, + Option, + Option, + Option, ) { - let stdin = get_stdio_stream(&STDIN_HANDLE); - let stdout = get_stdio_stream(&STDOUT_HANDLE); - let stderr = get_stdio_stream(&STDERR_HANDLE); + let stdin = get_stdio_stream(&STDIN_HANDLE, "stdin"); + let stdout = get_stdio_stream(&STDOUT_HANDLE, "stdout"); + let stderr = get_stdio_stream(&STDERR_HANDLE, "stderr"); (stdin, stdout, stderr) } fn get_stdio_stream( handle: &Option, -) -> Option { + name: &str, +) -> Option { match handle { None => None, Some(file_handle) => match file_handle.try_clone() { - Ok(clone) => Some(StreamResourceHolder::new(StreamResource::FsFile( - Some((tokio::fs::File::from_std(clone), FileMetadata::default())), - ))), + Ok(clone) => { + let tokio_file = tokio::fs::File::from_std(clone); + Some(StreamResource::stdio(tokio_file, name)) + } Err(_e) => None, }, } @@ -137,100 +142,317 @@ pub struct FileMetadata { pub tty: TTYMetadata, } -pub struct StreamResourceHolder { - pub resource: StreamResource, - waker: HashMap, - waker_counter: AtomicUsize, +#[derive(Debug)] +pub struct FullDuplexResource { + rd: AsyncRefCell, + wr: AsyncRefCell, + // When a full-duplex resource is closed, all pending 'read' ops are + // canceled, while 'write' ops are allowed to complete. Therefore only + // 'read' futures should be attached to this cancel handle. + cancel_handle: CancelHandle, } -impl StreamResourceHolder { - pub fn new(resource: StreamResource) -> StreamResourceHolder { - StreamResourceHolder { - resource, - // Atleast one task is expecter for the resource - waker: HashMap::with_capacity(1), - // Tracks wakers Ids - waker_counter: AtomicUsize::new(0), +impl FullDuplexResource { + pub fn new((rd, wr): (R, W)) -> Self { + Self { + rd: rd.into(), + wr: wr.into(), + cancel_handle: Default::default(), } } -} -impl Drop for StreamResourceHolder { - fn drop(&mut self) { - self.wake_tasks(); + pub fn into_inner(self) -> (R, W) { + (self.rd.into_inner(), self.wr.into_inner()) + } + + pub fn rd_borrow_mut(self: &Rc) -> AsyncMutFuture { + RcRef::map(self, |r| &r.rd).borrow_mut() + } + + pub fn wr_borrow_mut(self: &Rc) -> AsyncMutFuture { + RcRef::map(self, |r| &r.wr).borrow_mut() + } + + pub fn cancel_handle(self: &Rc) -> RcRef { + RcRef::map(self, |r| &r.cancel_handle) + } + + pub fn cancel_read_ops(&self) { + self.cancel_handle.cancel() } } -impl StreamResourceHolder { - pub fn track_task(&mut self, cx: &Context) -> Result { - let waker = futures::task::AtomicWaker::new(); - waker.register(cx.waker()); - // Its OK if it overflows - let task_waker_id = self.waker_counter.fetch_add(1, Ordering::Relaxed); - self.waker.insert(task_waker_id, waker); - Ok(task_waker_id) +impl FullDuplexResource +where + R: AsyncRead + Unpin + 'static, + W: AsyncWrite + Unpin + 'static, +{ + async fn read(self: &Rc, buf: &mut [u8]) -> Result { + let mut rd = self.rd_borrow_mut().await; + let nread = rd.read(buf).try_or_cancel(self.cancel_handle()).await?; + Ok(nread) } - pub fn wake_tasks(&mut self) { - for waker in self.waker.values() { - waker.wake(); - } + async fn write(self: &Rc, buf: &[u8]) -> Result { + let mut wr = self.wr_borrow_mut().await; + let nwritten = wr.write(buf).await?; + Ok(nwritten) } +} - pub fn untrack_task(&mut self, task_waker_id: usize) { - self.waker.remove(&task_waker_id); +pub type TcpStreamResource = + FullDuplexResource; + +impl Resource for TcpStreamResource { + fn name(&self) -> Cow { + "tcpStream".into() + } + + fn close(self: Rc) { + self.cancel_read_ops(); } } -pub enum StreamResource { - FsFile(Option<(tokio::fs::File, FileMetadata)>), - TcpStream(Option), - #[cfg(not(windows))] - UnixStream(tokio::net::UnixStream), - ServerTlsStream(Box>), - ClientTlsStream(Box>), - ChildStdin(tokio::process::ChildStdin), - ChildStdout(tokio::process::ChildStdout), - ChildStderr(tokio::process::ChildStderr), +#[derive(Default)] +pub struct StreamResource { + pub fs_file: + Option, Option)>>, + + #[cfg(unix)] + pub unix_stream: Option>, + + child_stdin: Option>, + + child_stdout: Option>, + + child_stderr: Option>, + + client_tls_stream: Option>>, + + server_tls_stream: Option>>, + + cancel: CancelHandle, + name: String, +} + +impl std::fmt::Debug for StreamResource { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "StreamResource") + } } -trait UnpinAsyncRead: AsyncRead + Unpin {} -trait UnpinAsyncWrite: AsyncWrite + Unpin {} +impl StreamResource { + pub fn stdio(fs_file: tokio::fs::File, name: &str) -> Self { + Self { + fs_file: Some(AsyncRefCell::new(( + Some(fs_file), + Some(FileMetadata::default()), + ))), + name: name.to_string(), + ..Default::default() + } + } + + pub fn fs_file(fs_file: tokio::fs::File) -> Self { + Self { + fs_file: Some(AsyncRefCell::new(( + Some(fs_file), + Some(FileMetadata::default()), + ))), + name: "fsFile".to_string(), + ..Default::default() + } + } + + #[cfg(unix)] + pub fn unix_stream(unix_stream: tokio::net::UnixStream) -> Self { + Self { + unix_stream: Some(AsyncRefCell::new(unix_stream)), + name: "unixStream".to_string(), + ..Default::default() + } + } + + pub fn child_stdout(child: tokio::process::ChildStdout) -> Self { + Self { + child_stdout: Some(AsyncRefCell::new(child)), + name: "childStdout".to_string(), + ..Default::default() + } + } -impl UnpinAsyncRead for T {} -impl UnpinAsyncWrite for T {} + pub fn child_stderr(child: tokio::process::ChildStderr) -> Self { + Self { + child_stderr: Some(AsyncRefCell::new(child)), + name: "childStderr".to_string(), + ..Default::default() + } + } + + pub fn child_stdin(child: tokio::process::ChildStdin) -> Self { + Self { + child_stdin: Some(AsyncRefCell::new(child)), + name: "childStdin".to_string(), + ..Default::default() + } + } + + pub fn client_tls_stream(stream: ClientTlsStream) -> Self { + Self { + client_tls_stream: Some(AsyncRefCell::new(stream)), + name: "clientTlsStream".to_string(), + ..Default::default() + } + } -/// `DenoAsyncRead` is the same as the `tokio_io::AsyncRead` trait -/// but uses an `AnyError` error instead of `std::io:Error` -pub trait DenoAsyncRead { - fn poll_read( - &mut self, - cx: &mut Context, - buf: &mut [u8], - ) -> Poll>; + pub fn server_tls_stream(stream: ServerTlsStream) -> Self { + Self { + server_tls_stream: Some(AsyncRefCell::new(stream)), + name: "serverTlsStream".to_string(), + ..Default::default() + } + } + + async fn read(self: Rc, buf: &mut [u8]) -> Result { + // TODO(bartlomieju): in the future, it would be better for `StreamResource` + // to be an enum instead a struct with many `Option` fields, however I + // wasn't able to get it to work with `AsyncRefCell`s. + if self.fs_file.is_some() { + debug_assert!(self.child_stdin.is_none()); + debug_assert!(self.child_stdout.is_none()); + debug_assert!(self.child_stderr.is_none()); + debug_assert!(self.server_tls_stream.is_none()); + debug_assert!(self.client_tls_stream.is_none()); + let mut fs_file = RcRef::map(&self, |r| r.fs_file.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = (*fs_file).0.as_mut().unwrap().read(buf).await?; + return Ok(nwritten); + } else if self.child_stdout.is_some() { + debug_assert!(self.child_stdin.is_none()); + debug_assert!(self.child_stderr.is_none()); + debug_assert!(self.server_tls_stream.is_none()); + debug_assert!(self.client_tls_stream.is_none()); + let mut child_stdout = + RcRef::map(&self, |r| r.child_stdout.as_ref().unwrap()) + .borrow_mut() + .await; + let cancel = RcRef::map(self, |r| &r.cancel); + let nread = child_stdout.read(buf).try_or_cancel(cancel).await?; + return Ok(nread); + } else if self.child_stderr.is_some() { + debug_assert!(self.child_stdin.is_none()); + debug_assert!(self.server_tls_stream.is_none()); + debug_assert!(self.client_tls_stream.is_none()); + let mut child_stderr = + RcRef::map(&self, |r| r.child_stderr.as_ref().unwrap()) + .borrow_mut() + .await; + let cancel = RcRef::map(self, |r| &r.cancel); + let nread = child_stderr.read(buf).try_or_cancel(cancel).await?; + return Ok(nread); + } else if self.client_tls_stream.is_some() { + debug_assert!(self.server_tls_stream.is_none()); + let mut client_tls_stream = + RcRef::map(&self, |r| r.client_tls_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let cancel = RcRef::map(self, |r| &r.cancel); + let nread = client_tls_stream.read(buf).try_or_cancel(cancel).await?; + return Ok(nread); + } else if self.server_tls_stream.is_some() { + let mut server_tls_stream = + RcRef::map(&self, |r| r.server_tls_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let cancel = RcRef::map(self, |r| &r.cancel); + let nread = server_tls_stream.read(buf).try_or_cancel(cancel).await?; + return Ok(nread); + } + + #[cfg(unix)] + if self.unix_stream.is_some() { + let mut unix_stream = + RcRef::map(&self, |r| r.unix_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let cancel = RcRef::map(self, |r| &r.cancel); + let nread = unix_stream.read(buf).try_or_cancel(cancel).await?; + return Ok(nread); + } + + Err(bad_resource_id()) + } + + async fn write(self: Rc, buf: &[u8]) -> Result { + // TODO(bartlomieju): in the future, it would be better for `StreamResource` + // to be an enum instead a struct with many `Option` fields, however I + // wasn't able to get it to work with `AsyncRefCell`s. + if self.fs_file.is_some() { + debug_assert!(self.child_stdin.is_none()); + debug_assert!(self.child_stdout.is_none()); + debug_assert!(self.child_stderr.is_none()); + debug_assert!(self.server_tls_stream.is_none()); + debug_assert!(self.client_tls_stream.is_none()); + let mut fs_file = RcRef::map(&self, |r| r.fs_file.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = (*fs_file).0.as_mut().unwrap().write(buf).await?; + (*fs_file).0.as_mut().unwrap().flush().await?; + return Ok(nwritten); + } else if self.child_stdin.is_some() { + debug_assert!(self.child_stdout.is_none()); + debug_assert!(self.child_stderr.is_none()); + debug_assert!(self.server_tls_stream.is_none()); + debug_assert!(self.client_tls_stream.is_none()); + let mut child_stdin = + RcRef::map(&self, |r| r.child_stdin.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = child_stdin.write(buf).await?; + child_stdin.flush().await?; + return Ok(nwritten); + } else if self.client_tls_stream.is_some() { + debug_assert!(self.server_tls_stream.is_none()); + let mut client_tls_stream = + RcRef::map(&self, |r| r.client_tls_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = client_tls_stream.write(buf).await?; + client_tls_stream.flush().await?; + return Ok(nwritten); + } else if self.server_tls_stream.is_some() { + let mut server_tls_stream = + RcRef::map(&self, |r| r.server_tls_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = server_tls_stream.write(buf).await?; + server_tls_stream.flush().await?; + return Ok(nwritten); + } + + #[cfg(unix)] + if self.unix_stream.is_some() { + let mut unix_stream = + RcRef::map(&self, |r| r.unix_stream.as_ref().unwrap()) + .borrow_mut() + .await; + let nwritten = unix_stream.write(buf).await?; + unix_stream.flush().await?; + return Ok(nwritten); + } + + Err(bad_resource_id()) + } } -impl DenoAsyncRead for StreamResource { - fn poll_read( - &mut self, - cx: &mut Context, - buf: &mut [u8], - ) -> Poll> { - use StreamResource::*; - let f: &mut dyn UnpinAsyncRead = match self { - FsFile(Some((f, _))) => f, - FsFile(None) => return Poll::Ready(Err(resource_unavailable())), - TcpStream(Some(f)) => f, - #[cfg(not(windows))] - UnixStream(f) => f, - ClientTlsStream(f) => f, - ServerTlsStream(f) => f, - ChildStdout(f) => f, - ChildStderr(f) => f, - _ => return Err(bad_resource_id()).into(), - }; - let v = ready!(Pin::new(f).poll_read(cx, buf))?; - Ok(v).into() +impl Resource for StreamResource { + fn name(&self) -> Cow { + self.name.clone().into() + } + + fn close(self: Rc) { + self.cancel.cancel() } } @@ -263,92 +485,26 @@ pub fn op_read( }) } else { let mut zero_copy = zero_copy[0].clone(); - MinimalOp::Async( - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let resource_holder = state + MinimalOp::Async({ + async move { + let resource = state + .borrow() .resource_table - .get_mut::(rid as u32) + .get_any(rid as u32) .ok_or_else(bad_resource_id)?; - - let mut task_tracker_id: Option = None; - let nread = match resource_holder.resource.poll_read(cx, &mut zero_copy) + let nread = if let Some(stream) = + resource.downcast_rc::() { - Poll::Ready(t) => { - if let Some(id) = task_tracker_id { - resource_holder.untrack_task(id); - } - t - } - Poll::Pending => { - task_tracker_id.replace(resource_holder.track_task(cx)?); - return Poll::Pending; - } - }?; - Poll::Ready(Ok(nread as i32)) - }) - .boxed_local(), - ) - } -} - -/// `DenoAsyncWrite` is the same as the `tokio_io::AsyncWrite` trait -/// but uses an `AnyError` error instead of `std::io:Error` -pub trait DenoAsyncWrite { - fn poll_write( - &mut self, - cx: &mut Context, - buf: &[u8], - ) -> Poll>; - - fn poll_close(&mut self, cx: &mut Context) -> Poll>; - - fn poll_flush(&mut self, cx: &mut Context) -> Poll>; -} - -impl DenoAsyncWrite for StreamResource { - fn poll_write( - &mut self, - cx: &mut Context, - buf: &[u8], - ) -> Poll> { - use StreamResource::*; - let f: &mut dyn UnpinAsyncWrite = match self { - FsFile(Some((f, _))) => f, - FsFile(None) => return Poll::Pending, - TcpStream(Some(f)) => f, - #[cfg(not(windows))] - UnixStream(f) => f, - ClientTlsStream(f) => f, - ServerTlsStream(f) => f, - ChildStdin(f) => f, - _ => return Err(bad_resource_id()).into(), - }; - - let v = ready!(Pin::new(f).poll_write(cx, buf))?; - Ok(v).into() - } - - fn poll_flush(&mut self, cx: &mut Context) -> Poll> { - use StreamResource::*; - let f: &mut dyn UnpinAsyncWrite = match self { - FsFile(Some((f, _))) => f, - FsFile(None) => return Poll::Pending, - TcpStream(Some(f)) => f, - #[cfg(not(windows))] - UnixStream(f) => f, - ClientTlsStream(f) => f, - ServerTlsStream(f) => f, - ChildStdin(f) => f, - _ => return Err(bad_resource_id()).into(), - }; - - ready!(Pin::new(f).poll_flush(cx))?; - Ok(()).into() - } - - fn poll_close(&mut self, _cx: &mut Context) -> Poll> { - unimplemented!() + stream.read(&mut zero_copy).await? + } else if let Some(stream) = resource.downcast_rc::() { + stream.clone().read(&mut zero_copy).await? + } else { + return Err(bad_resource_id()); + }; + Ok(nread as i32) + } + .boxed_local() + }) } } @@ -381,93 +537,76 @@ pub fn op_write( }) } else { let zero_copy = zero_copy[0].clone(); - MinimalOp::Async( + MinimalOp::Async({ async move { - let nwritten = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let resource_holder = state - .resource_table - .get_mut::(rid as u32) - .ok_or_else(bad_resource_id)?; - resource_holder.resource.poll_write(cx, &zero_copy) - }) - .await?; - - // TODO(bartlomieju): this step was added during upgrade to Tokio 0.2 - // and the reasons for the need to explicitly flush are not fully known. - // Figure out why it's needed and preferably remove it. - // https://github.com/denoland/deno/issues/3565 - poll_fn(|cx| { - let mut state = state.borrow_mut(); - let resource_holder = state - .resource_table - .get_mut::(rid as u32) - .ok_or_else(bad_resource_id)?; - resource_holder.resource.poll_flush(cx) - }) - .await?; - + let resource = state + .borrow() + .resource_table + .get_any(rid as u32) + .ok_or_else(bad_resource_id)?; + let nwritten = if let Some(stream) = + resource.downcast_rc::() + { + stream.write(&zero_copy).await? + } else if let Some(stream) = resource.downcast_rc::() { + stream.clone().write(&zero_copy).await? + } else { + return Err(bad_resource_id()); + }; Ok(nwritten as i32) } - .boxed_local(), - ) + .boxed_local() + }) } } -/// Helper function for operating on a std::fs::File stored in the resource table. -/// -/// We store file system file resources as tokio::fs::File, so this is a little -/// utility function that gets a std::fs:File when you need to do blocking -/// operations. -/// -/// Returns ErrorKind::Busy if the resource is being used by another op. pub fn std_file_resource( state: &mut OpState, rid: u32, mut f: F, ) -> Result where - F: FnMut( - Result<&mut std::fs::File, &mut StreamResource>, - ) -> Result, + F: FnMut(Result<&mut std::fs::File, ()>) -> Result, { // First we look up the rid in the resource table. - let mut r = state.resource_table.get_mut::(rid); - if let Some(ref mut resource_holder) = r { - // Sync write only works for FsFile. It doesn't make sense to do this - // for non-blocking sockets. So we error out if not FsFile. - match &mut resource_holder.resource { - StreamResource::FsFile(option_file_metadata) => { - // The object in the resource table is a tokio::fs::File - but in - // order to do a blocking write on it, we must turn it into a - // std::fs::File. Hopefully this code compiles down to nothing. - if let Some((tokio_file, metadata)) = option_file_metadata.take() { - match tokio_file.try_into_std() { - Ok(mut std_file) => { - let result = f(Ok(&mut std_file)); - // Turn the std_file handle back into a tokio file, put it back - // in the resource table. - let tokio_file = tokio::fs::File::from_std(std_file); - resource_holder.resource = - StreamResource::FsFile(Some((tokio_file, metadata))); - // return the result. - result - } - Err(tokio_file) => { - // This function will return an error containing the file if - // some operation is in-flight. - resource_holder.resource = - StreamResource::FsFile(Some((tokio_file, metadata))); - Err(resource_unavailable()) - } - } - } else { - Err(resource_unavailable()) - } + let resource = state + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + + // Sync write only works for FsFile. It doesn't make sense to do this + // for non-blocking sockets. So we error out if not FsFile. + if resource.fs_file.is_none() { + return f(Err(())); + } + + // The object in the resource table is a tokio::fs::File - but in + // order to do a blocking write on it, we must turn it into a + // std::fs::File. Hopefully this code compiles down to nothing. + + let fs_file_resource = + RcRef::map(&resource, |r| r.fs_file.as_ref().unwrap()).try_borrow_mut(); + + if let Some(mut fs_file) = fs_file_resource { + let tokio_file = fs_file.0.take().unwrap(); + match tokio_file.try_into_std() { + Ok(mut std_file) => { + let result = f(Ok(&mut std_file)); + // Turn the std_file handle back into a tokio file, put it back + // in the resource table. + let tokio_file = tokio::fs::File::from_std(std_file); + fs_file.0 = Some(tokio_file); + // return the result. + result + } + Err(tokio_file) => { + // This function will return an error containing the file if + // some operation is in-flight. + fs_file.0 = Some(tokio_file); + Err(resource_unavailable()) } - _ => f(Err(&mut resource_holder.resource)), } } else { - Err(bad_resource_id()) + Err(resource_unavailable()) } } diff --git a/runtime/ops/net.rs b/runtime/ops/net.rs index 8770ef103bb102..a4bda585b4d7e3 100644 --- a/runtime/ops/net.rs +++ b/runtime/ops/net.rs @@ -1,7 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use crate::ops::io::StreamResource; -use crate::ops::io::StreamResourceHolder; +use crate::ops::io::FullDuplexResource; +use crate::ops::io::TcpStreamResource; use crate::permissions::Permissions; use crate::resolve_addr::resolve_addr; use crate::resolve_addr::resolve_addr_sync; @@ -11,21 +11,24 @@ use deno_core::error::custom_error; use deno_core::error::generic_error; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures; -use deno_core::futures::future::poll_fn; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::net::Shutdown; use std::net::SocketAddr; use std::rc::Rc; -use std::task::Context; -use std::task::Poll; +use tokio::net::udp; use tokio::net::TcpListener; use tokio::net::TcpStream; use tokio::net::UdpSocket; @@ -33,12 +36,14 @@ use tokio::net::UdpSocket; #[cfg(unix)] use super::net_unix; #[cfg(unix)] +use crate::ops::io::StreamResource; +#[cfg(unix)] use std::path::Path; pub fn init(rt: &mut deno_core::JsRuntime) { super::reg_json_async(rt, "op_accept", op_accept); super::reg_json_async(rt, "op_connect", op_connect); - super::reg_json_sync(rt, "op_shutdown", op_shutdown); + super::reg_json_async(rt, "op_shutdown", op_shutdown); super::reg_json_sync(rt, "op_listen", op_listen); super::reg_json_async(rt, "op_datagram_receive", op_datagram_receive); super::reg_json_async(rt, "op_datagram_send", op_datagram_send); @@ -57,39 +62,31 @@ async fn accept_tcp( ) -> Result { let rid = args.rid as u32; - let accept_fut = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let listener_resource = state - .resource_table - .get_mut::(rid) - .ok_or_else(|| bad_resource("Listener has been closed"))?; - let listener = &mut listener_resource.listener; - match listener.poll_accept(cx).map_err(AnyError::from) { - Poll::Ready(Ok((stream, addr))) => { - listener_resource.untrack_task(); - Poll::Ready(Ok((stream, addr))) - } - Poll::Pending => { - listener_resource.track_task(cx)?; - Poll::Pending - } - Poll::Ready(Err(e)) => { - listener_resource.untrack_task(); - Poll::Ready(Err(e)) + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let mut listener = RcRef::map(&resource, |r| &r.listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Another accept task is ongoing"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (tcp_stream, _socket_addr) = + listener.accept().try_or_cancel(cancel).await.map_err(|e| { + // FIXME(bartlomieju): compatibility with current JS implementation + if let std::io::ErrorKind::Interrupted = e.kind() { + bad_resource("Listener has been closed") + } else { + e.into() } - } - }); - let (tcp_stream, _socket_addr) = accept_fut.await?; + })?; let local_addr = tcp_stream.local_addr()?; let remote_addr = tcp_stream.peer_addr()?; let mut state = state.borrow_mut(); - let rid = state.resource_table.add( - "tcpStream", - Box::new(StreamResourceHolder::new(StreamResource::TcpStream(Some( - tcp_stream, - )))), - ); + let rid = state + .resource_table + .add(TcpStreamResource::new(tcp_stream.into_split())); Ok(json!({ "rid": rid, "localAddr": { @@ -138,18 +135,17 @@ async fn receive_udp( let rid = args.rid as u32; - let receive_fut = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let resource = state - .resource_table - .get_mut::(rid) - .ok_or_else(|| bad_resource("Socket has been closed"))?; - let socket = &mut resource.socket; - socket - .poll_recv_from(cx, &mut zero_copy) - .map_err(AnyError::from) - }); - let (size, remote_addr) = receive_fut.await?; + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Socket has been closed"))?; + let (size, remote_addr) = resource + .rd_borrow_mut() + .await + .recv_from(&mut zero_copy) + .try_or_cancel(resource.cancel_handle()) + .await?; Ok(json!({ "size": size, "remoteAddr": { @@ -207,19 +203,18 @@ async fn op_datagram_send( .check_net(&args.hostname, args.port)?; } let addr = resolve_addr(&args.hostname, args.port).await?; - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let resource = state - .resource_table - .get_mut::(rid as u32) - .ok_or_else(|| bad_resource("Socket has been closed"))?; - resource - .socket - .poll_send_to(cx, &zero_copy, &addr) - .map_ok(|byte_length| json!(byte_length)) - .map_err(AnyError::from) - }) - .await + + let resource = state + .borrow_mut() + .resource_table + .get::(rid as u32) + .ok_or_else(|| bad_resource("Socket has been closed"))?; + let byte_length = resource + .wr_borrow_mut() + .await + .send_to(&zero_copy, &addr) + .await?; + Ok(json!(byte_length)) } #[cfg(unix)] SendArgs { @@ -232,18 +227,17 @@ async fn op_datagram_send( let s = state.borrow(); s.borrow::().check_write(&address_path)?; } - let mut state = state.borrow_mut(); let resource = state + .borrow() .resource_table - .get_mut::(rid as u32) + .get::(rid as u32) .ok_or_else(|| { custom_error("NotConnected", "Socket has been closed") })?; - let socket = &mut resource.socket; - let byte_length = socket - .send_to(&zero_copy, &resource.local_addr.as_pathname().unwrap()) - .await?; - + let mut socket = RcRef::map(&resource, |r| &r.socket) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Socket already in use"))?; + let byte_length = socket.send_to(&zero_copy, address_path).await?; Ok(json!(byte_length)) } _ => Err(type_error("Wrong argument format!")), @@ -279,12 +273,9 @@ async fn op_connect( let remote_addr = tcp_stream.peer_addr()?; let mut state_ = state.borrow_mut(); - let rid = state_.resource_table.add( - "tcpStream", - Box::new(StreamResourceHolder::new(StreamResource::TcpStream(Some( - tcp_stream, - )))), - ); + let rid = state_ + .resource_table + .add(TcpStreamResource::new(tcp_stream.into_split())); Ok(json!({ "rid": rid, "localAddr": { @@ -317,12 +308,8 @@ async fn op_connect( let remote_addr = unix_stream.peer_addr()?; let mut state_ = state.borrow_mut(); - let rid = state_.resource_table.add( - "unixStream", - Box::new(StreamResourceHolder::new(StreamResource::UnixStream( - unix_stream, - ))), - ); + let resource = StreamResource::unix_stream(unix_stream); + let rid = state_.resource_table.add(resource); Ok(json!({ "rid": rid, "localAddr": { @@ -345,12 +332,12 @@ struct ShutdownArgs { how: i32, } -fn op_shutdown( - state: &mut OpState, +async fn op_shutdown( + state: Rc>, args: Value, - _zero_copy: &mut [ZeroCopyBuf], + _zero_copy: BufVec, ) -> Result { - super::check_unstable(state, "Deno.shutdown"); + super::check_unstable2(&state, "Deno.shutdown"); let args: ShutdownArgs = serde_json::from_value(args)?; @@ -358,80 +345,61 @@ fn op_shutdown( let how = args.how; let shutdown_mode = match how { - 0 => Shutdown::Read, + 0 => Shutdown::Read, // TODO: nonsense, remove me. 1 => Shutdown::Write, _ => unimplemented!(), }; - let resource_holder = state + let resource = state + .borrow() .resource_table - .get_mut::(rid) + .get_any(rid) .ok_or_else(bad_resource_id)?; - match resource_holder.resource { - StreamResource::TcpStream(Some(ref mut stream)) => { - TcpStream::shutdown(stream, shutdown_mode)?; - } - #[cfg(unix)] - StreamResource::UnixStream(ref mut stream) => { - net_unix::UnixStream::shutdown(stream, shutdown_mode)?; + if let Some(stream) = resource.downcast_rc::() { + let wr = stream.wr_borrow_mut().await; + TcpStream::shutdown((*wr).as_ref(), shutdown_mode)?; + return Ok(json!({})); + } + + #[cfg(unix)] + if let Some(stream) = resource.downcast_rc::() { + if stream.unix_stream.is_some() { + let wr = RcRef::map(stream, |r| r.unix_stream.as_ref().unwrap()) + .borrow_mut() + .await; + net_unix::UnixStream::shutdown(&*wr, shutdown_mode)?; + return Ok(json!({})); } - _ => return Err(bad_resource_id()), } - Ok(json!({})) + Err(bad_resource_id()) } -#[allow(dead_code)] struct TcpListenerResource { - listener: TcpListener, - waker: Option, - local_addr: SocketAddr, + listener: AsyncRefCell, + cancel: CancelHandle, } -impl Drop for TcpListenerResource { - fn drop(&mut self) { - self.wake_task(); +impl Resource for TcpListenerResource { + fn name(&self) -> Cow { + "tcpListener".into() } -} - -impl TcpListenerResource { - /// Track the current task so future awaiting for connection - /// can be notified when listener is closed. - /// - /// Throws an error if another task is already tracked. - pub fn track_task(&mut self, cx: &Context) -> Result<(), AnyError> { - // Currently, we only allow tracking a single accept task for a listener. - // This might be changed in the future with multiple workers. - // Caveat: TcpListener by itself also only tracks an accept task at a time. - // See https://github.com/tokio-rs/tokio/issues/846#issuecomment-454208883 - if self.waker.is_some() { - return Err(custom_error("Busy", "Another accept task is ongoing")); - } - let waker = futures::task::AtomicWaker::new(); - waker.register(cx.waker()); - self.waker.replace(waker); - Ok(()) + fn close(self: Rc) { + self.cancel.cancel(); } +} - /// Notifies a task when listener is closed so accept future can resolve. - pub fn wake_task(&mut self) { - if let Some(waker) = self.waker.as_ref() { - waker.wake(); - } - } +type UdpSocketResource = FullDuplexResource; - /// Stop tracking a task. - /// Happens when the task is done and thus no further tracking is needed. - pub fn untrack_task(&mut self) { - if self.waker.is_some() { - self.waker.take(); - } +impl Resource for UdpSocketResource { + fn name(&self) -> Cow { + "udpSocket".into() } -} -struct UdpSocketResource { - socket: UdpSocket, + fn close(self: Rc) { + self.cancel_read_ops() + } } #[derive(Deserialize)] @@ -463,13 +431,10 @@ fn listen_tcp( let listener = TcpListener::from_std(std_listener)?; let local_addr = listener.local_addr()?; let listener_resource = TcpListenerResource { - listener, - waker: None, - local_addr, + listener: AsyncRefCell::new(listener), + cancel: Default::default(), }; - let rid = state - .resource_table - .add("tcpListener", Box::new(listener_resource)); + let rid = state.resource_table.add(listener_resource); Ok((rid, local_addr)) } @@ -481,10 +446,8 @@ fn listen_udp( let std_socket = std::net::UdpSocket::bind(&addr)?; let socket = UdpSocket::from_std(std_socket)?; let local_addr = socket.local_addr()?; - let socket_resource = UdpSocketResource { socket }; - let rid = state - .resource_table - .add("udpSocket", Box::new(socket_resource)); + let socket_resource = UdpSocketResource::new(socket.split()); + let rid = state.resource_table.add(socket_resource); Ok((rid, local_addr)) } diff --git a/runtime/ops/net_unix.rs b/runtime/ops/net_unix.rs index 4c416a5a4e27a2..23981a7f1c8687 100644 --- a/runtime/ops/net_unix.rs +++ b/runtime/ops/net_unix.rs @@ -1,34 +1,59 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::ops::io::StreamResource; -use crate::ops::io::StreamResourceHolder; use crate::ops::net::AcceptArgs; use crate::ops::net::ReceiveArgs; use deno_core::error::bad_resource; +use deno_core::error::custom_error; use deno_core::error::AnyError; -use deno_core::futures::future::poll_fn; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::fs::remove_file; use std::os::unix; use std::path::Path; use std::rc::Rc; -use std::task::Poll; use tokio::net::UnixDatagram; use tokio::net::UnixListener; pub use tokio::net::UnixStream; struct UnixListenerResource { - listener: UnixListener, + listener: AsyncRefCell, + cancel: CancelHandle, +} + +impl Resource for UnixListenerResource { + fn name(&self) -> Cow { + "unixListener".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } } pub struct UnixDatagramResource { - pub socket: UnixDatagram, - pub local_addr: unix::net::SocketAddr, + pub socket: AsyncRefCell, + pub cancel: CancelHandle, +} + +impl Resource for UnixDatagramResource { + fn name(&self) -> Cow { + "unixDatagram".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } } #[derive(Deserialize)] @@ -43,38 +68,23 @@ pub(crate) async fn accept_unix( ) -> Result { let rid = args.rid as u32; - let accept_fut = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let listener_resource = state - .resource_table - .get_mut::(rid) - .ok_or_else(|| bad_resource("Listener has been closed"))?; - let listener = &mut listener_resource.listener; - use deno_core::futures::StreamExt; - match listener.poll_next_unpin(cx) { - Poll::Ready(Some(stream)) => { - //listener_resource.untrack_task(); - Poll::Ready(stream) - } - Poll::Ready(None) => todo!(), - Poll::Pending => { - //listener_resource.track_task(cx)?; - Poll::Pending - } - } - .map_err(AnyError::from) - }); - let unix_stream = accept_fut.await?; + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let mut listener = RcRef::map(&resource, |r| &r.listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Listener already in use"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (unix_stream, _socket_addr) = + listener.accept().try_or_cancel(cancel).await?; let local_addr = unix_stream.local_addr()?; let remote_addr = unix_stream.peer_addr()?; + let resource = StreamResource::unix_stream(unix_stream); let mut state = state.borrow_mut(); - let rid = state.resource_table.add( - "unixStream", - Box::new(StreamResourceHolder::new(StreamResource::UnixStream( - unix_stream, - ))), - ); + let rid = state.resource_table.add(resource); Ok(json!({ "rid": rid, "localAddr": { @@ -98,12 +108,17 @@ pub(crate) async fn receive_unix_packet( let rid = args.rid as u32; let mut buf = bufs.into_iter().next().unwrap(); - let mut state = state.borrow_mut(); let resource = state + .borrow() .resource_table - .get_mut::(rid) + .get::(rid) .ok_or_else(|| bad_resource("Socket has been closed"))?; - let (size, remote_addr) = resource.socket.recv_from(&mut buf).await?; + let mut socket = RcRef::map(&resource, |r| &r.socket) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Socket already in use"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (size, remote_addr) = + socket.recv_from(&mut buf).try_or_cancel(cancel).await?; Ok(json!({ "size": size, "remoteAddr": { @@ -122,10 +137,11 @@ pub fn listen_unix( } let listener = UnixListener::bind(&addr)?; let local_addr = listener.local_addr()?; - let listener_resource = UnixListenerResource { listener }; - let rid = state - .resource_table - .add("unixListener", Box::new(listener_resource)); + let listener_resource = UnixListenerResource { + listener: AsyncRefCell::new(listener), + cancel: Default::default(), + }; + let rid = state.resource_table.add(listener_resource); Ok((rid, local_addr)) } @@ -140,12 +156,10 @@ pub fn listen_unix_packet( let socket = UnixDatagram::bind(&addr)?; let local_addr = socket.local_addr()?; let datagram_resource = UnixDatagramResource { - socket, - local_addr: local_addr.clone(), + socket: AsyncRefCell::new(socket), + cancel: Default::default(), }; - let rid = state - .resource_table - .add("unixDatagram", Box::new(datagram_resource)); + let rid = state.resource_table.add(datagram_resource); Ok((rid, local_addr)) } diff --git a/runtime/ops/plugin.rs b/runtime/ops/plugin.rs index 1f3669b6f02a6e..953d6f7d22ebcd 100644 --- a/runtime/ops/plugin.rs +++ b/runtime/ops/plugin.rs @@ -14,9 +14,11 @@ use deno_core::Op; use deno_core::OpAsyncFuture; use deno_core::OpId; use deno_core::OpState; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use dlopen::symbor::Library; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::path::PathBuf; use std::pin::Pin; @@ -53,9 +55,7 @@ pub fn op_open_plugin( let rid; let deno_plugin_init; { - rid = state - .resource_table - .add("plugin", Box::new(plugin_resource)); + rid = state.resource_table.add(plugin_resource); deno_plugin_init = *unsafe { state .resource_table @@ -77,6 +77,12 @@ struct PluginResource { lib: Rc, } +impl Resource for PluginResource { + fn name(&self) -> Cow { + "plugin".into() + } +} + impl PluginResource { fn new(lib: &Rc) -> Self { Self { lib: lib.clone() } diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index 67b3d076106db3..b46627e21d465e 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -1,19 +1,22 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use super::io::{std_file_resource, StreamResource, StreamResourceHolder}; +use super::io::{std_file_resource, StreamResource}; use crate::permissions::Permissions; use deno_core::error::bad_resource_id; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::future::poll_fn; -use deno_core::futures::future::FutureExt; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::AsyncMutFuture; +use deno_core::AsyncRefCell; use deno_core::BufVec; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; use tokio::process::Command; @@ -61,7 +64,19 @@ struct RunArgs { } struct ChildResource { - child: tokio::process::Child, + child: AsyncRefCell, +} + +impl Resource for ChildResource { + fn name(&self) -> Cow { + "child".into() + } +} + +impl ChildResource { + fn borrow_mut(self: Rc) -> AsyncMutFuture { + RcRef::map(self, |r| &r.child).borrow_mut() + } } fn op_run( @@ -117,12 +132,9 @@ fn op_run( let stdin_rid = match child.stdin.take() { Some(child_stdin) => { - let rid = state.resource_table.add( - "childStdin", - Box::new(StreamResourceHolder::new(StreamResource::ChildStdin( - child_stdin, - ))), - ); + let rid = state + .resource_table + .add(StreamResource::child_stdin(child_stdin)); Some(rid) } None => None, @@ -130,12 +142,9 @@ fn op_run( let stdout_rid = match child.stdout.take() { Some(child_stdout) => { - let rid = state.resource_table.add( - "childStdout", - Box::new(StreamResourceHolder::new(StreamResource::ChildStdout( - child_stdout, - ))), - ); + let rid = state + .resource_table + .add(StreamResource::child_stdout(child_stdout)); Some(rid) } None => None, @@ -143,19 +152,18 @@ fn op_run( let stderr_rid = match child.stderr.take() { Some(child_stderr) => { - let rid = state.resource_table.add( - "childStderr", - Box::new(StreamResourceHolder::new(StreamResource::ChildStderr( - child_stderr, - ))), - ); + let rid = state + .resource_table + .add(StreamResource::child_stderr(child_stderr)); Some(rid) } None => None, }; - let child_resource = ChildResource { child }; - let child_rid = state.resource_table.add("child", Box::new(child_resource)); + let child_resource = ChildResource { + child: AsyncRefCell::new(child), + }; + let child_rid = state.resource_table.add(child_resource); Ok(json!({ "rid": child_rid, @@ -185,17 +193,13 @@ async fn op_run_status( s.borrow::().check_run()?; } - let run_status = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let child_resource = state - .resource_table - .get_mut::(rid) - .ok_or_else(bad_resource_id)?; - let child = &mut child_resource.child; - child.poll_unpin(cx).map_err(AnyError::from) - }) - .await?; - + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let mut child = resource.borrow_mut().await; + let run_status = (&mut *child).await?; let code = run_status.code(); #[cfg(unix)] diff --git a/runtime/ops/signal.rs b/runtime/ops/signal.rs index be6bc0a35567ea..b3891792c1d904 100644 --- a/runtime/ops/signal.rs +++ b/runtime/ops/signal.rs @@ -11,15 +11,23 @@ use std::rc::Rc; #[cfg(unix)] use deno_core::error::bad_resource_id; #[cfg(unix)] -use deno_core::futures::future::poll_fn; -#[cfg(unix)] use deno_core::serde_json; #[cfg(unix)] use deno_core::serde_json::json; #[cfg(unix)] +use deno_core::AsyncRefCell; +#[cfg(unix)] +use deno_core::CancelFuture; +#[cfg(unix)] +use deno_core::CancelHandle; +#[cfg(unix)] +use deno_core::RcRef; +#[cfg(unix)] +use deno_core::Resource; +#[cfg(unix)] use serde::Deserialize; #[cfg(unix)] -use std::task::Waker; +use std::borrow::Cow; #[cfg(unix)] use tokio::signal::unix::{signal, Signal, SignalKind}; @@ -32,7 +40,21 @@ pub fn init(rt: &mut deno_core::JsRuntime) { #[cfg(unix)] /// The resource for signal stream. /// The second element is the waker of polling future. -pub struct SignalStreamResource(pub Signal, pub Option); +struct SignalStreamResource { + signal: AsyncRefCell, + cancel: CancelHandle, +} + +#[cfg(unix)] +impl Resource for SignalStreamResource { + fn name(&self) -> Cow { + "signal".into() + } + + fn close(self: Rc) { + self.cancel.cancel(); + } +} #[cfg(unix)] #[derive(Deserialize)] @@ -54,13 +76,13 @@ fn op_signal_bind( ) -> Result { super::check_unstable(state, "Deno.signal"); let args: BindSignalArgs = serde_json::from_value(args)?; - let rid = state.resource_table.add( - "signal", - Box::new(SignalStreamResource( + let resource = SignalStreamResource { + signal: AsyncRefCell::new( signal(SignalKind::from_raw(args.signo)).expect(""), - None, - )), - ); + ), + cancel: Default::default(), + }; + let rid = state.resource_table.add(resource); Ok(json!({ "rid": rid, })) @@ -76,18 +98,18 @@ async fn op_signal_poll( let args: SignalArgs = serde_json::from_value(args)?; let rid = args.rid as u32; - let future = poll_fn(move |cx| { - let mut state = state.borrow_mut(); - if let Some(mut signal) = - state.resource_table.get_mut::(rid) - { - signal.1 = Some(cx.waker().clone()); - return signal.0.poll_recv(cx); - } - std::task::Poll::Ready(None) - }); - let result = future.await; - Ok(json!({ "done": result.is_none() })) + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let cancel = RcRef::map(&resource, |r| &r.cancel); + let mut signal = RcRef::map(&resource, |r| &r.signal).borrow_mut().await; + + match signal.recv().or_cancel(cancel).await { + Ok(result) => Ok(json!({ "done": result.is_none() })), + Err(_) => Ok(json!({ "done": true })), + } } #[cfg(unix)] @@ -99,14 +121,6 @@ pub fn op_signal_unbind( super::check_unstable(state, "Deno.signal"); let args: SignalArgs = serde_json::from_value(args)?; let rid = args.rid as u32; - let resource = state.resource_table.get_mut::(rid); - if let Some(signal) = resource { - if let Some(waker) = &signal.1 { - // Wakes up the pending poll if exists. - // This prevents the poll future from getting stuck forever. - waker.clone().wake(); - } - } state .resource_table .close(rid) diff --git a/runtime/ops/tls.rs b/runtime/ops/tls.rs index b59650ab0ef07a..0630747ed55818 100644 --- a/runtime/ops/tls.rs +++ b/runtime/ops/tls.rs @@ -1,6 +1,7 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use super::io::{StreamResource, StreamResourceHolder}; +use super::io::StreamResource; +use super::io::TcpStreamResource; use crate::permissions::Permissions; use crate::resolve_addr::resolve_addr; use crate::resolve_addr::resolve_addr_sync; @@ -8,25 +9,26 @@ use deno_core::error::bad_resource; use deno_core::error::bad_resource_id; use deno_core::error::custom_error; use deno_core::error::AnyError; -use deno_core::futures; -use deno_core::futures::future::poll_fn; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelHandle; +use deno_core::CancelTryFuture; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::ZeroCopyBuf; use serde::Deserialize; +use std::borrow::Cow; use std::cell::RefCell; use std::convert::From; use std::fs::File; use std::io::BufReader; -use std::net::SocketAddr; use std::path::Path; use std::rc::Rc; use std::sync::Arc; -use std::task::Context; -use std::task::Poll; use tokio::net::TcpListener; use tokio::net::TcpStream; use tokio_rustls::{rustls::ClientConfig, TlsConnector}; @@ -85,60 +87,53 @@ async fn op_start_tls( permissions.check_read(Path::new(&path))?; } } - let mut resource_holder = { - let mut state_ = state.borrow_mut(); - match state_.resource_table.remove::(rid) { - Some(resource) => *resource, - None => return Err(bad_resource_id()), - } - }; - if let StreamResource::TcpStream(ref mut tcp_stream) = - resource_holder.resource - { - let tcp_stream = tcp_stream.take().unwrap(); - let local_addr = tcp_stream.local_addr()?; - let remote_addr = tcp_stream.peer_addr()?; - let mut config = ClientConfig::new(); - config - .root_store - .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); - if let Some(path) = cert_file { - let key_file = File::open(path)?; - let reader = &mut BufReader::new(key_file); - config.root_store.add_pem_file(reader).unwrap(); - } + let resource_rc = state + .borrow_mut() + .resource_table + .take::(rid) + .ok_or_else(bad_resource_id)?; + let resource = Rc::try_unwrap(resource_rc) + .expect("Only a single use of this resource should happen"); + let (read_half, write_half) = resource.into_inner(); + let tcp_stream = read_half.reunite(write_half)?; - let tls_connector = TlsConnector::from(Arc::new(config)); - let dnsname = - DNSNameRef::try_from_ascii_str(&domain).expect("Invalid DNS lookup"); - let tls_stream = tls_connector.connect(dnsname, tcp_stream).await?; - - let rid = { - let mut state_ = state.borrow_mut(); - state_.resource_table.add( - "clientTlsStream", - Box::new(StreamResourceHolder::new(StreamResource::ClientTlsStream( - Box::new(tls_stream), - ))), - ) - }; - Ok(json!({ - "rid": rid, - "localAddr": { - "hostname": local_addr.ip().to_string(), - "port": local_addr.port(), - "transport": "tcp", - }, - "remoteAddr": { - "hostname": remote_addr.ip().to_string(), - "port": remote_addr.port(), - "transport": "tcp", - } - })) - } else { - Err(bad_resource_id()) + let local_addr = tcp_stream.local_addr()?; + let remote_addr = tcp_stream.peer_addr()?; + let mut config = ClientConfig::new(); + config + .root_store + .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS); + if let Some(path) = cert_file { + let key_file = File::open(path)?; + let reader = &mut BufReader::new(key_file); + config.root_store.add_pem_file(reader).unwrap(); } + + let tls_connector = TlsConnector::from(Arc::new(config)); + let dnsname = + DNSNameRef::try_from_ascii_str(&domain).expect("Invalid DNS lookup"); + let tls_stream = tls_connector.connect(dnsname, tcp_stream).await?; + + let rid = { + let mut state_ = state.borrow_mut(); + state_ + .resource_table + .add(StreamResource::client_tls_stream(tls_stream)) + }; + Ok(json!({ + "rid": rid, + "localAddr": { + "hostname": local_addr.ip().to_string(), + "port": local_addr.port(), + "transport": "tcp", + }, + "remoteAddr": { + "hostname": remote_addr.ip().to_string(), + "port": remote_addr.port(), + "transport": "tcp", + } + })) } async fn op_connect_tls( @@ -180,12 +175,9 @@ async fn op_connect_tls( let tls_stream = tls_connector.connect(dnsname, tcp_stream).await?; let rid = { let mut state_ = state.borrow_mut(); - state_.resource_table.add( - "clientTlsStream", - Box::new(StreamResourceHolder::new(StreamResource::ClientTlsStream( - Box::new(tls_stream), - ))), - ) + state_ + .resource_table + .add(StreamResource::client_tls_stream(tls_stream)) }; Ok(json!({ "rid": rid, @@ -256,51 +248,19 @@ fn load_keys(path: &str) -> Result, AnyError> { Ok(keys) } -#[allow(dead_code)] pub struct TlsListenerResource { - listener: TcpListener, + listener: AsyncRefCell, tls_acceptor: TlsAcceptor, - waker: Option, - local_addr: SocketAddr, + cancel: CancelHandle, } -impl Drop for TlsListenerResource { - fn drop(&mut self) { - self.wake_task(); +impl Resource for TlsListenerResource { + fn name(&self) -> Cow { + "tlsListener".into() } -} - -impl TlsListenerResource { - /// Track the current task so future awaiting for connection - /// can be notified when listener is closed. - /// - /// Throws an error if another task is already tracked. - pub fn track_task(&mut self, cx: &Context) -> Result<(), AnyError> { - // Currently, we only allow tracking a single accept task for a listener. - // This might be changed in the future with multiple workers. - // Caveat: TcpListener by itself also only tracks an accept task at a time. - // See https://github.com/tokio-rs/tokio/issues/846#issuecomment-454208883 - if self.waker.is_some() { - return Err(custom_error("Busy", "Another accept task is ongoing")); - } - let waker = futures::task::AtomicWaker::new(); - waker.register(cx.waker()); - self.waker.replace(waker); - Ok(()) - } - - /// Notifies a task when listener is closed so accept future can resolve. - pub fn wake_task(&mut self) { - if let Some(waker) = self.waker.as_ref() { - waker.wake(); - } - } - - /// Stop tracking a task. - /// Happens when the task is done and thus no further tracking is needed. - pub fn untrack_task(&mut self) { - self.waker.take(); + fn close(self: Rc) { + self.cancel.cancel(); } } @@ -340,15 +300,12 @@ fn op_listen_tls( let listener = TcpListener::from_std(std_listener)?; let local_addr = listener.local_addr()?; let tls_listener_resource = TlsListenerResource { - listener, + listener: AsyncRefCell::new(listener), tls_acceptor, - waker: None, - local_addr, + cancel: Default::default(), }; - let rid = state - .resource_table - .add("tlsListener", Box::new(tls_listener_resource)); + let rid = state.resource_table.add(tls_listener_resource); Ok(json!({ "rid": rid, @@ -372,50 +329,46 @@ async fn op_accept_tls( ) -> Result { let args: AcceptTlsArgs = serde_json::from_value(args)?; let rid = args.rid as u32; - let accept_fut = poll_fn(|cx| { - let mut state = state.borrow_mut(); - let listener_resource = state - .resource_table - .get_mut::(rid) - .ok_or_else(|| bad_resource("Listener has been closed"))?; - let listener = &mut listener_resource.listener; - match listener.poll_accept(cx).map_err(AnyError::from) { - Poll::Ready(Ok((stream, addr))) => { - listener_resource.untrack_task(); - Poll::Ready(Ok((stream, addr))) - } - Poll::Pending => { - listener_resource.track_task(cx)?; - Poll::Pending - } - Poll::Ready(Err(e)) => { - listener_resource.untrack_task(); - Poll::Ready(Err(e)) + + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let mut listener = RcRef::map(&resource, |r| &r.listener) + .try_borrow_mut() + .ok_or_else(|| custom_error("Busy", "Another accept task is ongoing"))?; + let cancel = RcRef::map(resource, |r| &r.cancel); + let (tcp_stream, _socket_addr) = + listener.accept().try_or_cancel(cancel).await.map_err(|e| { + // FIXME(bartlomieju): compatibility with current JS implementation + if let std::io::ErrorKind::Interrupted = e.kind() { + bad_resource("Listener has been closed") + } else { + e.into() } - } - }); - let (tcp_stream, _socket_addr) = accept_fut.await?; + })?; let local_addr = tcp_stream.local_addr()?; let remote_addr = tcp_stream.peer_addr()?; - let tls_acceptor = { - let state_ = state.borrow(); - let resource = state_ - .resource_table - .get::(rid) - .ok_or_else(bad_resource_id) - .expect("Can't find tls listener"); - resource.tls_acceptor.clone() - }; - let tls_stream = tls_acceptor.accept(tcp_stream).await?; + let resource = state + .borrow() + .resource_table + .get::(rid) + .ok_or_else(|| bad_resource("Listener has been closed"))?; + let cancel = RcRef::map(&resource, |r| &r.cancel); + let tls_acceptor = resource.tls_acceptor.clone(); + let tls_stream = tls_acceptor + .accept(tcp_stream) + .try_or_cancel(cancel) + .await?; + let rid = { let mut state_ = state.borrow_mut(); - state_.resource_table.add( - "serverTlsStream", - Box::new(StreamResourceHolder::new(StreamResource::ServerTlsStream( - Box::new(tls_stream), - ))), - ) + state_ + .resource_table + .add(StreamResource::server_tls_stream(tls_stream)) }; + Ok(json!({ "rid": rid, "localAddr": { diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index ad66bcf1a5ef16..05536b4299ba12 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -2,7 +2,6 @@ use super::io::std_file_resource; use super::io::StreamResource; -use super::io::StreamResourceHolder; use deno_core::error::bad_resource_id; use deno_core::error::not_supported; use deno_core::error::resource_unavailable; @@ -11,6 +10,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::OpState; +use deno_core::RcRef; use deno_core::ZeroCopyBuf; use serde::Deserialize; use serde::Serialize; @@ -88,48 +88,47 @@ fn op_set_raw( use winapi::shared::minwindef::FALSE; use winapi::um::{consoleapi, handleapi}; - let resource_holder = - state.resource_table.get_mut::(rid); - if resource_holder.is_none() { - return Err(bad_resource_id()); - } + let resource = state + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + if cbreak { return Err(not_supported()); } - let resource_holder = resource_holder.unwrap(); - - // For now, only stdin. - let handle = match &mut resource_holder.resource { - StreamResource::FsFile(ref mut option_file_metadata) => { - if let Some((tokio_file, metadata)) = option_file_metadata.take() { - match tokio_file.try_into_std() { - Ok(std_file) => { - let raw_handle = std_file.as_raw_handle(); - // Turn the std_file handle back into a tokio file, put it back - // in the resource table. - let tokio_file = tokio::fs::File::from_std(std_file); - resource_holder.resource = - StreamResource::FsFile(Some((tokio_file, metadata))); - // return the result. - raw_handle - } - Err(tokio_file) => { - // This function will return an error containing the file if - // some operation is in-flight. - resource_holder.resource = - StreamResource::FsFile(Some((tokio_file, metadata))); - return Err(resource_unavailable()); - } - } - } else { - return Err(resource_unavailable()); + + if resource.fs_file.is_none() { + return Err(bad_resource_id()); + } + + let fs_file_resource = + RcRef::map(&resource, |r| r.fs_file.as_ref().unwrap()).try_borrow_mut(); + + let handle_result = if let Some(mut fs_file) = fs_file_resource { + let tokio_file = fs_file.0.take().unwrap(); + match tokio_file.try_into_std() { + Ok(std_file) => { + let raw_handle = std_file.as_raw_handle(); + // Turn the std_file handle back into a tokio file, put it back + // in the resource table. + let tokio_file = tokio::fs::File::from_std(std_file); + fs_file.0 = Some(tokio_file); + // return the result. + Ok(raw_handle) + } + Err(tokio_file) => { + // This function will return an error containing the file if + // some operation is in-flight. + fs_file.0 = Some(tokio_file); + Err(resource_unavailable()) } } - _ => { - return Err(bad_resource_id()); - } + } else { + Err(resource_unavailable()) }; + let handle = handle_result?; + if handle == handleapi::INVALID_HANDLE_VALUE { return Err(Error::last_os_error().into()); } else if handle.is_null() { @@ -156,24 +155,31 @@ fn op_set_raw( { use std::os::unix::io::AsRawFd; - let resource_holder = - state.resource_table.get_mut::(rid); - if resource_holder.is_none() { - return Err(bad_resource_id()); + let resource = state + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + + if resource.fs_file.is_none() { + return Err(not_supported()); } - if is_raw { - let (raw_fd, maybe_tty_mode) = - match &mut resource_holder.unwrap().resource { - StreamResource::FsFile(Some((f, ref mut metadata))) => { - (f.as_raw_fd(), &mut metadata.tty.mode) - } - StreamResource::FsFile(None) => return Err(resource_unavailable()), - _ => { - return Err(not_supported()); - } - }; + let maybe_fs_file_resource = + RcRef::map(&resource, |r| r.fs_file.as_ref().unwrap()).try_borrow_mut(); + + if maybe_fs_file_resource.is_none() { + return Err(resource_unavailable()); + } + let mut fs_file_resource = maybe_fs_file_resource.unwrap(); + if fs_file_resource.0.is_none() { + return Err(resource_unavailable()); + } + + let raw_fd = fs_file_resource.0.as_ref().unwrap().as_raw_fd(); + let maybe_tty_mode = &mut fs_file_resource.1.as_mut().unwrap().tty.mode; + + if is_raw { if maybe_tty_mode.is_none() { // Save original mode. let original_mode = termios::tcgetattr(raw_fd)?; @@ -199,28 +205,14 @@ fn op_set_raw( raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = 1; raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = 0; termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw)?; - Ok(json!({})) } else { // Try restore saved mode. - let (raw_fd, maybe_tty_mode) = - match &mut resource_holder.unwrap().resource { - StreamResource::FsFile(Some((f, ref mut metadata))) => { - (f.as_raw_fd(), &mut metadata.tty.mode) - } - StreamResource::FsFile(None) => { - return Err(resource_unavailable()); - } - _ => { - return Err(bad_resource_id()); - } - }; - if let Some(mode) = maybe_tty_mode.take() { termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode)?; } - - Ok(json!({})) } + + Ok(json!({})) } } @@ -255,7 +247,6 @@ fn op_isatty( Ok(unsafe { libc::isatty(raw_fd as libc::c_int) == 1 }) } } - Err(StreamResource::FsFile(_)) => unreachable!(), _ => Ok(false), })?; Ok(json!(isatty)) diff --git a/runtime/ops/websocket.rs b/runtime/ops/websocket.rs index a8c591a3322c27..d805f307beb8af 100644 --- a/runtime/ops/websocket.rs +++ b/runtime/ops/websocket.rs @@ -1,18 +1,23 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::permissions::Permissions; -use core::task::Poll; use deno_core::error::bad_resource_id; use deno_core::error::type_error; use deno_core::error::AnyError; -use deno_core::futures::future::poll_fn; +use deno_core::futures::stream::SplitSink; +use deno_core::futures::stream::SplitStream; +use deno_core::futures::SinkExt; use deno_core::futures::StreamExt; -use deno_core::futures::{ready, SinkExt}; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::url; +use deno_core::AsyncRefCell; use deno_core::BufVec; +use deno_core::CancelFuture; +use deno_core::CancelHandle; use deno_core::OpState; +use deno_core::RcRef; +use deno_core::Resource; use deno_core::{serde_json, ZeroCopyBuf}; use http::{Method, Request, Uri}; use serde::Deserialize; @@ -62,6 +67,22 @@ type MaybeTlsStream = StreamSwitcher>; type WsStream = WebSocketStream; +struct WsStreamResource { + tx: AsyncRefCell>, + rx: AsyncRefCell>, + // When a `WsStreamResource` resource is closed, all pending 'read' ops are + // canceled, while 'write' ops are allowed to complete. Therefore only + // 'read' futures are attached to this cancel handle. + cancel: CancelHandle, +} + +impl Resource for WsStreamResource { + fn name(&self) -> Cow { + "webSocketStream".into() + } +} + +impl WsStreamResource {} #[derive(Deserialize)] #[serde(rename_all = "camelCase")] @@ -165,10 +186,14 @@ pub async fn op_ws_create( )) })?; + let (ws_tx, ws_rx) = stream.split(); + let resource = WsStreamResource { + rx: AsyncRefCell::new(ws_rx), + tx: AsyncRefCell::new(ws_tx), + cancel: Default::default(), + }; let mut state = state.borrow_mut(); - let rid = state - .resource_table - .add("webSocketStream", Box::new(stream)); + let rid = state.resource_table.add(resource); let protocol = match response.headers().get("Sec-WebSocket-Protocol") { Some(header) => header.to_str().unwrap(), @@ -202,30 +227,21 @@ pub async fn op_ws_send( ) -> Result { let args: SendArgs = serde_json::from_value(args)?; - let mut maybe_msg = Some(match args.text { + let msg = match args.text { Some(text) => Message::Text(text), None => Message::Binary(bufs[0].to_vec()), - }); + }; let rid = args.rid; - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let stream = state - .resource_table - .get_mut::(rid) - .ok_or_else(bad_resource_id)?; - - // TODO(ry) Handle errors below instead of unwrap. - // Need to map `TungsteniteError` to `AnyError`. - ready!(stream.poll_ready_unpin(cx)).unwrap(); - if let Some(msg) = maybe_msg.take() { - stream.start_send_unpin(msg).unwrap(); - } - ready!(stream.poll_flush_unpin(cx)).unwrap(); - - Poll::Ready(Ok(json!({}))) - }) - .await + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let mut tx = RcRef::map(&resource, |r| &r.tx).borrow_mut().await; + tx.send(msg).await?; + eprintln!("sent!"); + Ok(json!({})) } #[derive(Deserialize)] @@ -243,33 +259,22 @@ pub async fn op_ws_close( ) -> Result { let args: CloseArgs = serde_json::from_value(args)?; let rid = args.rid; - let mut maybe_msg = Some(Message::Close(args.code.map(|c| CloseFrame { + let msg = Message::Close(args.code.map(|c| CloseFrame { code: CloseCode::from(c), reason: match args.reason { Some(reason) => Cow::from(reason), None => Default::default(), }, - }))); - - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let stream = state - .resource_table - .get_mut::(rid) - .ok_or_else(bad_resource_id)?; - - // TODO(ry) Handle errors below instead of unwrap. - // Need to map `TungsteniteError` to `AnyError`. - ready!(stream.poll_ready_unpin(cx)).unwrap(); - if let Some(msg) = maybe_msg.take() { - stream.start_send_unpin(msg).unwrap(); - } - ready!(stream.poll_flush_unpin(cx)).unwrap(); - ready!(stream.poll_close_unpin(cx)).unwrap(); + })); - Poll::Ready(Ok(json!({}))) - }) - .await + let resource = state + .borrow_mut() + .resource_table + .get::(rid) + .ok_or_else(bad_resource_id)?; + let mut tx = RcRef::map(&resource, |r| &r.tx).borrow_mut().await; + tx.send(msg).await?; + Ok(json!({})) } #[derive(Deserialize)] @@ -284,43 +289,41 @@ pub async fn op_ws_next_event( _bufs: BufVec, ) -> Result { let args: NextEventArgs = serde_json::from_value(args)?; - poll_fn(move |cx| { - let mut state = state.borrow_mut(); - let stream = state - .resource_table - .get_mut::(args.rid) - .ok_or_else(bad_resource_id)?; - stream - .poll_next_unpin(cx) - .map(|val| { - match val { - Some(Ok(Message::Text(text))) => json!({ - "type": "string", - "data": text - }), - Some(Ok(Message::Binary(data))) => { - // TODO(ry): don't use json to send binary data. - json!({ - "type": "binary", - "data": data - }) - } - Some(Ok(Message::Close(Some(frame)))) => json!({ - "type": "close", - "code": u16::from(frame.code), - "reason": frame.reason.as_ref() - }), - Some(Ok(Message::Close(None))) => json!({ "type": "close" }), - Some(Ok(Message::Ping(_))) => json!({"type": "ping"}), - Some(Ok(Message::Pong(_))) => json!({"type": "pong"}), - Some(Err(_)) => json!({"type": "error"}), - None => { - state.resource_table.close(args.rid).unwrap(); - json!({"type": "closed"}) - } - } + + let resource = state + .borrow_mut() + .resource_table + .get::(args.rid) + .ok_or_else(bad_resource_id)?; + + let mut rx = RcRef::map(&resource, |r| &r.rx).borrow_mut().await; + let cancel = RcRef::map(resource, |r| &r.cancel); + let val = rx.next().or_cancel(cancel).await?; + let res = match val { + Some(Ok(Message::Text(text))) => json!({ + "type": "string", + "data": text + }), + Some(Ok(Message::Binary(data))) => { + // TODO(ry): don't use json to send binary data. + json!({ + "type": "binary", + "data": data }) - .map(Ok) - }) - .await + } + Some(Ok(Message::Close(Some(frame)))) => json!({ + "type": "close", + "code": u16::from(frame.code), + "reason": frame.reason.as_ref() + }), + Some(Ok(Message::Close(None))) => json!({ "type": "close" }), + Some(Ok(Message::Ping(_))) => json!({"type": "ping"}), + Some(Ok(Message::Pong(_))) => json!({"type": "pong"}), + Some(Err(_)) => json!({"type": "error"}), + None => { + state.borrow_mut().resource_table.close(args.rid).unwrap(); + json!({"type": "closed"}) + } + }; + Ok(res) } diff --git a/runtime/rt/30_net.js b/runtime/rt/30_net.js index 9a71f069357038..7009f6f8d35920 100644 --- a/runtime/rt/30_net.js +++ b/runtime/rt/30_net.js @@ -11,20 +11,16 @@ 0: "Read", 1: "Write", 2: "ReadWrite", - Read: 0, + Read: 0, // TODO: nonsense, remove me. Write: 1, ReadWrite: 2, // unused }; function shutdown(rid, how) { - core.jsonOpSync("op_shutdown", { rid, how }); - return Promise.resolve(); + return core.jsonOpAsync("op_shutdown", { rid, how }); } - function opAccept( - rid, - transport, - ) { + function opAccept(rid, transport) { return core.jsonOpAsync("op_accept", { rid, transport }); } @@ -36,11 +32,7 @@ return core.jsonOpAsync("op_connect", args); } - function opReceive( - rid, - transport, - zeroCopy, - ) { + function opReceive(rid, transport, zeroCopy) { return core.jsonOpAsync( "op_datagram_receive", { rid, transport }, @@ -56,11 +48,7 @@ #rid = 0; #remoteAddr = null; #localAddr = null; - constructor( - rid, - remoteAddr, - localAddr, - ) { + constructor(rid, remoteAddr, localAddr) { this.#rid = rid; this.#remoteAddr = remoteAddr; this.#localAddr = localAddr; @@ -149,11 +137,7 @@ #rid = 0; #addr = null; - constructor( - rid, - addr, - bufSize = 1024, - ) { + constructor(rid, addr, bufSize = 1024) { this.#rid = rid; this.#addr = addr; this.bufSize = bufSize; @@ -213,9 +197,7 @@ return new Listener(res.rid, res.localAddr); } - async function connect( - options, - ) { + async function connect(options) { let res; if (options.transport === "unix") { diff --git a/runtime/rt/40_fs_events.js b/runtime/rt/40_fs_events.js index a36adecba531d9..a179e8c1bbea3b 100644 --- a/runtime/rt/40_fs_events.js +++ b/runtime/rt/40_fs_events.js @@ -24,6 +24,8 @@ } catch (error) { if (error instanceof errors.BadResource) { return { value: undefined, done: true }; + } else if (error instanceof errors.Interrupted) { + return { value: undefined, done: true }; } throw error; } diff --git a/runtime/rt/40_signals.js b/runtime/rt/40_signals.js index 739c963fd983a7..091afd66a93d62 100644 --- a/runtime/rt/40_signals.js +++ b/runtime/rt/40_signals.js @@ -3,6 +3,7 @@ ((window) => { const core = window.Deno.core; const { build } = window.__bootstrap.build; + const { errors } = window.__bootstrap.errors; function bindSignal(signo) { return core.jsonOpSync("op_signal_bind", { signo }); @@ -212,7 +213,15 @@ } #pollSignal = async () => { - const res = await pollSignal(this.#rid); + let res; + try { + res = await pollSignal(this.#rid); + } catch (error) { + if (error instanceof errors.BadResource) { + return true; + } + throw error; + } return res.done; }; diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index db97e36048669f..c1713f815055eb 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -256,15 +256,16 @@ impl WebWorker { let op_state = js_runtime.op_state(); let mut op_state = op_state.borrow_mut(); + let t = &mut op_state.resource_table; let (stdin, stdout, stderr) = ops::io::get_stdio(); if let Some(stream) = stdin { - op_state.resource_table.add("stdin", Box::new(stream)); + t.add(stream); } if let Some(stream) = stdout { - op_state.resource_table.add("stdout", Box::new(stream)); + t.add(stream); } if let Some(stream) = stderr { - op_state.resource_table.add("stderr", Box::new(stream)); + t.add(stream); } } diff --git a/runtime/worker.rs b/runtime/worker.rs index a0e63afad00566..adb525c4c98603 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -152,13 +152,13 @@ impl MainWorker { let t = &mut op_state.resource_table; let (stdin, stdout, stderr) = ops::io::get_stdio(); if let Some(stream) = stdin { - t.add("stdin", Box::new(stream)); + t.add(stream); } if let Some(stream) = stdout { - t.add("stdout", Box::new(stream)); + t.add(stream); } if let Some(stream) = stderr { - t.add("stderr", Box::new(stream)); + t.add(stream); } } From 63e0ab99a1757f5dbae0a5ec4ea11a037a47a29b Mon Sep 17 00:00:00 2001 From: Trivikram Kamat <16024985+trivikr@users.noreply.github.com> Date: Wed, 16 Dec 2020 08:46:32 -0800 Subject: [PATCH 079/135] upgrade TypeScript to 4.1.3 (#8785) --- cli/dts/lib.es5.d.ts | 2 +- cli/tsc/00_typescript.js | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/cli/dts/lib.es5.d.ts b/cli/dts/lib.es5.d.ts index a7c35dbcf770d8..31eb384cd6ce15 100644 --- a/cli/dts/lib.es5.d.ts +++ b/cli/dts/lib.es5.d.ts @@ -1396,7 +1396,7 @@ interface ArrayConstructor { (arrayLength?: number): any[]; (arrayLength: number): T[]; (...items: T[]): T[]; - isArray(arg: T | {}): arg is T extends readonly any[] ? (unknown extends T ? never : readonly any[]) : any[]; + isArray(arg: any): arg is any[]; readonly prototype: any[]; } diff --git a/cli/tsc/00_typescript.js b/cli/tsc/00_typescript.js index a3c7e072007b75..ea89c6ac4009fa 100644 --- a/cli/tsc/00_typescript.js +++ b/cli/tsc/00_typescript.js @@ -288,7 +288,7 @@ var ts; // If changing the text in this section, be sure to test `configurePrerelease` too. ts.versionMajorMinor = "4.1"; /** The version of the TypeScript compiler release */ - ts.version = "4.1.2"; + ts.version = "4.1.3"; /* @internal */ var Comparison; (function (Comparison) { @@ -45888,7 +45888,7 @@ var ts; var resolved = getExternalModuleMember(root, commonJSPropertyAccess || node, dontResolveAlias); var name = node.propertyName || node.name; if (commonJSPropertyAccess && resolved && ts.isIdentifier(name)) { - return getPropertyOfType(getTypeOfSymbol(resolved), name.escapedText); + return resolveSymbol(getPropertyOfType(getTypeOfSymbol(resolved), name.escapedText), dontResolveAlias); } markSymbolOfAliasDeclarationIfTypeOnly(node, /*immediateTarget*/ undefined, resolved, /*overwriteEmpty*/ false); return resolved; @@ -63317,6 +63317,7 @@ var ts; return errorType; } flowDepth++; + var sharedFlow; while (true) { var flags = flow.flags; if (flags & 4096 /* Shared */) { @@ -63329,6 +63330,7 @@ var ts; return sharedFlowTypes[i]; } } + sharedFlow = flow; } var type = void 0; if (flags & 16 /* Assignment */) { @@ -63392,9 +63394,9 @@ var ts; // simply return the non-auto declared type to reduce follow-on errors. type = convertAutoToAny(declaredType); } - if (flags & 4096 /* Shared */) { + if (sharedFlow) { // Record visited node and the associated type in the cache. - sharedFlowNodes[sharedFlowCount] = flow; + sharedFlowNodes[sharedFlowCount] = sharedFlow; sharedFlowTypes[sharedFlowCount] = type; sharedFlowCount++; } @@ -68561,9 +68563,9 @@ var ts; var oldCandidatesForArgumentError = candidatesForArgumentError; var oldCandidateForArgumentArityError = candidateForArgumentArityError; var oldCandidateForTypeArgumentError = candidateForTypeArgumentError; - var declCount = ts.length((_a = failed.declaration) === null || _a === void 0 ? void 0 : _a.symbol.declarations); - var isOverload = declCount > 1; - var implDecl = isOverload ? ts.find(((_b = failed.declaration) === null || _b === void 0 ? void 0 : _b.symbol.declarations) || ts.emptyArray, function (d) { return ts.isFunctionLikeDeclaration(d) && ts.nodeIsPresent(d.body); }) : undefined; + var failedSignatureDeclarations = ((_b = (_a = failed.declaration) === null || _a === void 0 ? void 0 : _a.symbol) === null || _b === void 0 ? void 0 : _b.declarations) || ts.emptyArray; + var isOverload = failedSignatureDeclarations.length > 1; + var implDecl = isOverload ? ts.find(failedSignatureDeclarations, function (d) { return ts.isFunctionLikeDeclaration(d) && ts.nodeIsPresent(d.body); }) : undefined; if (implDecl) { var candidate = getSignatureFromDeclaration(implDecl); var isSingleNonGenericCandidate_1 = !candidate.typeParameters; From 2e976080c77812008baa0c83e44a1032a97532f8 Mon Sep 17 00:00:00 2001 From: Steven Guerrero Date: Wed, 16 Dec 2020 22:40:21 -0500 Subject: [PATCH 080/135] docs(lsp): add Sublime Text integration documentation (#8797) --- .../getting_started/setup_your_environment.md | 67 ++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/docs/getting_started/setup_your_environment.md b/docs/getting_started/setup_your_environment.md index b987e4e63830b8..ed52f74ecdf7cf 100644 --- a/docs/getting_started/setup_your_environment.md +++ b/docs/getting_started/setup_your_environment.md @@ -147,7 +147,7 @@ project (`npm init -y` as necessary), then add the following block to your #### LSP clients Deno has builtin support for the -[Language server protocol](https://langserver.org). +[Language server protocol](https://langserver.org) as of version 1.6.0 or later. If your editor supports the LSP, you can use Deno as a language server for TypeScript and JavaScript. @@ -188,6 +188,71 @@ if executable("deno") endif ``` +##### Example for Sublime Text + +- Install the [Sublime LSP package](https://packagecontrol.io/packages/LSP) +- Install the + [TypeScript package](https://packagecontrol.io/packages/TypeScript) to get + syntax highlighting +- Add the following `.sublime-project` file to your project folder + +```json +{ + "settings": { + "LSP": { + "deno": { + "command": [ + "deno", + "lsp" + ], + "initializationOptions": { + // "config": "", // Sets the path for the config file in your project + "enable": true, + // "importMap": "", // Sets the path for the import-map in your project + "lint": true, + "unstable": false + }, + "enabled": true, + "languages": [ + { + "languageId": "javascript", + "scopes": ["source.js"], + "syntaxes": [ + "Packages/Babel/JavaScript (Babel).sublime-syntax", + "Packages/JavaScript/JavaScript.sublime-syntax" + ] + }, + { + "languageId": "javascriptreact", + "scopes": ["source.jsx"], + "syntaxes": [ + "Packages/Babel/JavaScript (Babel).sublime-syntax", + "Packages/JavaScript/JavaScript.sublime-syntax" + ] + }, + { + "languageId": "typescript", + "scopes": ["source.ts"], + "syntaxes": [ + "Packages/TypeScript-TmLanguage/TypeScript.tmLanguage", + "Packages/TypeScript Syntax/TypeScript.tmLanguage" + ] + }, + { + "languageId": "typescriptreact", + "scopes": ["source.tsx"], + "syntaxes": [ + "Packages/TypeScript-TmLanguage/TypeScriptReact.tmLanguage", + "Packages/TypeScript Syntax/TypeScriptReact.tmLanguage" + ] + } + ] + } + } + } +} +``` + If you don't see your favorite IDE on this list, maybe you can develop an extension. Our [community Discord group](https://discord.gg/deno) can give you some pointers on where to get started. From 825293737004cbf94638458f77fa13fd1b07aef2 Mon Sep 17 00:00:00 2001 From: Yusuke Tanaka Date: Thu, 17 Dec 2020 22:14:49 +0900 Subject: [PATCH 081/135] chore(runtime): fix typo (#8791) --- runtime/permissions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/permissions.rs b/runtime/permissions.rs index 88f9c717940bed..df957aa0136f9c 100644 --- a/runtime/permissions.rs +++ b/runtime/permissions.rs @@ -241,7 +241,7 @@ impl Permissions { if parsed.host().is_none() { return Err(custom_error( "URIError", - "invalid urlormat: ://[:port][/subpath]", + "invalid url format: ://[:port][/subpath]", )); } Ok(self.query_net( From 55dc467b419b8e5897b1c832b04d63e383253d84 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Thu, 17 Dec 2020 22:01:47 +0800 Subject: [PATCH 082/135] test(cli): ensure await all on stdout does not deadlock (#8802) --- cli/tests/integration_tests.rs | 5 +++++ cli/tests/stdout_write_all.out | 3 +++ cli/tests/stdout_write_all.ts | 8 ++++++++ 3 files changed, 16 insertions(+) create mode 100644 cli/tests/stdout_write_all.out create mode 100644 cli/tests/stdout_write_all.ts diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index 2f23e2dee62247..302e808faf6128 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -2105,6 +2105,11 @@ fn deno_test_no_color() { assert!(out.contains("test result: FAILED. 1 passed; 1 failed; 1 ignored; 0 measured; 0 filtered out")); } +itest!(stdout_write_all { + args: "run --quiet stdout_write_all.ts", + output: "stdout_write_all.out", +}); + itest!(_001_hello { args: "run --reload 001_hello.js", output: "001_hello.js.out", diff --git a/cli/tests/stdout_write_all.out b/cli/tests/stdout_write_all.out new file mode 100644 index 00000000000000..49a6d64e94b97c --- /dev/null +++ b/cli/tests/stdout_write_all.out @@ -0,0 +1,3 @@ +done +done +complete diff --git a/cli/tests/stdout_write_all.ts b/cli/tests/stdout_write_all.ts new file mode 100644 index 00000000000000..c82a0ca7d13452 --- /dev/null +++ b/cli/tests/stdout_write_all.ts @@ -0,0 +1,8 @@ +const encoder = new TextEncoder(); +const pending = [ + Deno.stdout.write(encoder.encode("done\n")), + Deno.stdout.write(encoder.encode("done\n")), +]; + +await Promise.all(pending); +await Deno.stdout.write(encoder.encode("complete\n")); From ffb5f7a4e1d5d4ac488058ca3ec3c0805587fe44 Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Thu, 17 Dec 2020 16:37:57 +0000 Subject: [PATCH 083/135] refactor: Rename runtime/rt to runtime/js (#8806) --- cli/fmt_errors.rs | 4 ++-- cli/tsc/README.md | 11 ++--------- runtime/build.rs | 2 +- runtime/{rt => js}/00_bootstrap_namespace.js | 0 runtime/{rt => js}/01_build.js | 0 runtime/{rt => js}/01_colors.js | 0 runtime/{rt => js}/01_errors.js | 0 runtime/{rt => js}/01_internals.js | 0 runtime/{rt => js}/01_version.js | 0 runtime/{rt => js}/01_web_util.js | 0 runtime/{rt => js}/02_console.js | 0 runtime/{rt => js}/06_util.js | 0 runtime/{rt => js}/10_dispatch_minimal.js | 0 runtime/{rt => js}/11_timers.js | 0 runtime/{rt => js}/11_workers.js | 0 runtime/{rt => js}/12_io.js | 0 runtime/{rt => js}/13_buffer.js | 0 runtime/{rt => js}/27_websocket.js | 0 runtime/{rt => js}/30_files.js | 0 runtime/{rt => js}/30_fs.js | 0 runtime/{rt => js}/30_metrics.js | 0 runtime/{rt => js}/30_net.js | 0 runtime/{rt => js}/30_os.js | 0 runtime/{rt => js}/40_compiler_api.js | 0 runtime/{rt => js}/40_diagnostics.js | 0 runtime/{rt => js}/40_error_stack.js | 0 runtime/{rt => js}/40_fs_events.js | 0 runtime/{rt => js}/40_net_unstable.js | 0 runtime/{rt => js}/40_performance.js | 0 runtime/{rt => js}/40_permissions.js | 0 runtime/{rt => js}/40_plugins.js | 0 runtime/{rt => js}/40_process.js | 0 runtime/{rt => js}/40_read_file.js | 0 runtime/{rt => js}/40_signals.js | 0 runtime/{rt => js}/40_testing.js | 0 runtime/{rt => js}/40_tls.js | 0 runtime/{rt => js}/40_tty.js | 0 runtime/{rt => js}/40_write_file.js | 0 runtime/{rt => js}/41_prompt.js | 0 runtime/{rt => js}/90_deno_ns.js | 0 runtime/{rt => js}/99_main.js | 0 runtime/{rt => js}/README.md | 0 42 files changed, 5 insertions(+), 12 deletions(-) rename runtime/{rt => js}/00_bootstrap_namespace.js (100%) rename runtime/{rt => js}/01_build.js (100%) rename runtime/{rt => js}/01_colors.js (100%) rename runtime/{rt => js}/01_errors.js (100%) rename runtime/{rt => js}/01_internals.js (100%) rename runtime/{rt => js}/01_version.js (100%) rename runtime/{rt => js}/01_web_util.js (100%) rename runtime/{rt => js}/02_console.js (100%) rename runtime/{rt => js}/06_util.js (100%) rename runtime/{rt => js}/10_dispatch_minimal.js (100%) rename runtime/{rt => js}/11_timers.js (100%) rename runtime/{rt => js}/11_workers.js (100%) rename runtime/{rt => js}/12_io.js (100%) rename runtime/{rt => js}/13_buffer.js (100%) rename runtime/{rt => js}/27_websocket.js (100%) rename runtime/{rt => js}/30_files.js (100%) rename runtime/{rt => js}/30_fs.js (100%) rename runtime/{rt => js}/30_metrics.js (100%) rename runtime/{rt => js}/30_net.js (100%) rename runtime/{rt => js}/30_os.js (100%) rename runtime/{rt => js}/40_compiler_api.js (100%) rename runtime/{rt => js}/40_diagnostics.js (100%) rename runtime/{rt => js}/40_error_stack.js (100%) rename runtime/{rt => js}/40_fs_events.js (100%) rename runtime/{rt => js}/40_net_unstable.js (100%) rename runtime/{rt => js}/40_performance.js (100%) rename runtime/{rt => js}/40_permissions.js (100%) rename runtime/{rt => js}/40_plugins.js (100%) rename runtime/{rt => js}/40_process.js (100%) rename runtime/{rt => js}/40_read_file.js (100%) rename runtime/{rt => js}/40_signals.js (100%) rename runtime/{rt => js}/40_testing.js (100%) rename runtime/{rt => js}/40_tls.js (100%) rename runtime/{rt => js}/40_tty.js (100%) rename runtime/{rt => js}/40_write_file.js (100%) rename runtime/{rt => js}/41_prompt.js (100%) rename runtime/{rt => js}/90_deno_ns.js (100%) rename runtime/{rt => js}/99_main.js (100%) rename runtime/{rt => js}/README.md (100%) diff --git a/cli/fmt_errors.rs b/cli/fmt_errors.rs index b3c855db947efa..3e871e6f306b9a 100644 --- a/cli/fmt_errors.rs +++ b/cli/fmt_errors.rs @@ -40,7 +40,7 @@ fn italic_bold(s: &str, internal: bool) -> String { } } -// Keep in sync with `cli/rt/40_error_stack.js`. +// Keep in sync with `runtime/js/40_error_stack.js`. pub fn format_location(frame: &JsStackFrame) -> String { let internal = frame .file_name @@ -75,7 +75,7 @@ pub fn format_location(frame: &JsStackFrame) -> String { result } -// Keep in sync with `cli/rt/40_error_stack.js`. +// Keep in sync with `runtime/js/40_error_stack.js`. fn format_frame(frame: &JsStackFrame) -> String { let internal = frame .file_name diff --git a/cli/tsc/README.md b/cli/tsc/README.md index e680138857ae94..e8287f3c093260 100644 --- a/cli/tsc/README.md +++ b/cli/tsc/README.md @@ -1,11 +1,4 @@ # tsc -This directory contains the code for the typescript compiler snapshot - -There is currently A LOT of overlap between this code and the runtime snapshot -code in cli/rt. - -This is intentionally ugly because there should be no overlap. - -This directory ultimately should contain just typescript.js and a smallish -CompilerHost. +This directory contains the typescript compiler and a small compiler host for +the runtime snapshot. diff --git a/runtime/build.rs b/runtime/build.rs index 78f17f61fcf550..ca4a77c314028a 100644 --- a/runtime/build.rs +++ b/runtime/build.rs @@ -62,7 +62,7 @@ fn main() { // Main snapshot let runtime_snapshot_path = o.join("CLI_SNAPSHOT.bin"); - let js_files = get_js_files("rt"); + let js_files = get_js_files("js"); create_runtime_snapshot(&runtime_snapshot_path, js_files); } diff --git a/runtime/rt/00_bootstrap_namespace.js b/runtime/js/00_bootstrap_namespace.js similarity index 100% rename from runtime/rt/00_bootstrap_namespace.js rename to runtime/js/00_bootstrap_namespace.js diff --git a/runtime/rt/01_build.js b/runtime/js/01_build.js similarity index 100% rename from runtime/rt/01_build.js rename to runtime/js/01_build.js diff --git a/runtime/rt/01_colors.js b/runtime/js/01_colors.js similarity index 100% rename from runtime/rt/01_colors.js rename to runtime/js/01_colors.js diff --git a/runtime/rt/01_errors.js b/runtime/js/01_errors.js similarity index 100% rename from runtime/rt/01_errors.js rename to runtime/js/01_errors.js diff --git a/runtime/rt/01_internals.js b/runtime/js/01_internals.js similarity index 100% rename from runtime/rt/01_internals.js rename to runtime/js/01_internals.js diff --git a/runtime/rt/01_version.js b/runtime/js/01_version.js similarity index 100% rename from runtime/rt/01_version.js rename to runtime/js/01_version.js diff --git a/runtime/rt/01_web_util.js b/runtime/js/01_web_util.js similarity index 100% rename from runtime/rt/01_web_util.js rename to runtime/js/01_web_util.js diff --git a/runtime/rt/02_console.js b/runtime/js/02_console.js similarity index 100% rename from runtime/rt/02_console.js rename to runtime/js/02_console.js diff --git a/runtime/rt/06_util.js b/runtime/js/06_util.js similarity index 100% rename from runtime/rt/06_util.js rename to runtime/js/06_util.js diff --git a/runtime/rt/10_dispatch_minimal.js b/runtime/js/10_dispatch_minimal.js similarity index 100% rename from runtime/rt/10_dispatch_minimal.js rename to runtime/js/10_dispatch_minimal.js diff --git a/runtime/rt/11_timers.js b/runtime/js/11_timers.js similarity index 100% rename from runtime/rt/11_timers.js rename to runtime/js/11_timers.js diff --git a/runtime/rt/11_workers.js b/runtime/js/11_workers.js similarity index 100% rename from runtime/rt/11_workers.js rename to runtime/js/11_workers.js diff --git a/runtime/rt/12_io.js b/runtime/js/12_io.js similarity index 100% rename from runtime/rt/12_io.js rename to runtime/js/12_io.js diff --git a/runtime/rt/13_buffer.js b/runtime/js/13_buffer.js similarity index 100% rename from runtime/rt/13_buffer.js rename to runtime/js/13_buffer.js diff --git a/runtime/rt/27_websocket.js b/runtime/js/27_websocket.js similarity index 100% rename from runtime/rt/27_websocket.js rename to runtime/js/27_websocket.js diff --git a/runtime/rt/30_files.js b/runtime/js/30_files.js similarity index 100% rename from runtime/rt/30_files.js rename to runtime/js/30_files.js diff --git a/runtime/rt/30_fs.js b/runtime/js/30_fs.js similarity index 100% rename from runtime/rt/30_fs.js rename to runtime/js/30_fs.js diff --git a/runtime/rt/30_metrics.js b/runtime/js/30_metrics.js similarity index 100% rename from runtime/rt/30_metrics.js rename to runtime/js/30_metrics.js diff --git a/runtime/rt/30_net.js b/runtime/js/30_net.js similarity index 100% rename from runtime/rt/30_net.js rename to runtime/js/30_net.js diff --git a/runtime/rt/30_os.js b/runtime/js/30_os.js similarity index 100% rename from runtime/rt/30_os.js rename to runtime/js/30_os.js diff --git a/runtime/rt/40_compiler_api.js b/runtime/js/40_compiler_api.js similarity index 100% rename from runtime/rt/40_compiler_api.js rename to runtime/js/40_compiler_api.js diff --git a/runtime/rt/40_diagnostics.js b/runtime/js/40_diagnostics.js similarity index 100% rename from runtime/rt/40_diagnostics.js rename to runtime/js/40_diagnostics.js diff --git a/runtime/rt/40_error_stack.js b/runtime/js/40_error_stack.js similarity index 100% rename from runtime/rt/40_error_stack.js rename to runtime/js/40_error_stack.js diff --git a/runtime/rt/40_fs_events.js b/runtime/js/40_fs_events.js similarity index 100% rename from runtime/rt/40_fs_events.js rename to runtime/js/40_fs_events.js diff --git a/runtime/rt/40_net_unstable.js b/runtime/js/40_net_unstable.js similarity index 100% rename from runtime/rt/40_net_unstable.js rename to runtime/js/40_net_unstable.js diff --git a/runtime/rt/40_performance.js b/runtime/js/40_performance.js similarity index 100% rename from runtime/rt/40_performance.js rename to runtime/js/40_performance.js diff --git a/runtime/rt/40_permissions.js b/runtime/js/40_permissions.js similarity index 100% rename from runtime/rt/40_permissions.js rename to runtime/js/40_permissions.js diff --git a/runtime/rt/40_plugins.js b/runtime/js/40_plugins.js similarity index 100% rename from runtime/rt/40_plugins.js rename to runtime/js/40_plugins.js diff --git a/runtime/rt/40_process.js b/runtime/js/40_process.js similarity index 100% rename from runtime/rt/40_process.js rename to runtime/js/40_process.js diff --git a/runtime/rt/40_read_file.js b/runtime/js/40_read_file.js similarity index 100% rename from runtime/rt/40_read_file.js rename to runtime/js/40_read_file.js diff --git a/runtime/rt/40_signals.js b/runtime/js/40_signals.js similarity index 100% rename from runtime/rt/40_signals.js rename to runtime/js/40_signals.js diff --git a/runtime/rt/40_testing.js b/runtime/js/40_testing.js similarity index 100% rename from runtime/rt/40_testing.js rename to runtime/js/40_testing.js diff --git a/runtime/rt/40_tls.js b/runtime/js/40_tls.js similarity index 100% rename from runtime/rt/40_tls.js rename to runtime/js/40_tls.js diff --git a/runtime/rt/40_tty.js b/runtime/js/40_tty.js similarity index 100% rename from runtime/rt/40_tty.js rename to runtime/js/40_tty.js diff --git a/runtime/rt/40_write_file.js b/runtime/js/40_write_file.js similarity index 100% rename from runtime/rt/40_write_file.js rename to runtime/js/40_write_file.js diff --git a/runtime/rt/41_prompt.js b/runtime/js/41_prompt.js similarity index 100% rename from runtime/rt/41_prompt.js rename to runtime/js/41_prompt.js diff --git a/runtime/rt/90_deno_ns.js b/runtime/js/90_deno_ns.js similarity index 100% rename from runtime/rt/90_deno_ns.js rename to runtime/js/90_deno_ns.js diff --git a/runtime/rt/99_main.js b/runtime/js/99_main.js similarity index 100% rename from runtime/rt/99_main.js rename to runtime/js/99_main.js diff --git a/runtime/rt/README.md b/runtime/js/README.md similarity index 100% rename from runtime/rt/README.md rename to runtime/js/README.md From 37fd0836d01011640356d6ff83b29d39df83b03e Mon Sep 17 00:00:00 2001 From: crowlKats <13135287+crowlKats@users.noreply.github.com> Date: Fri, 18 Dec 2020 11:27:53 +0100 Subject: [PATCH 084/135] fix(runtime/websocket): remove eprintln (#8817) --- runtime/ops/websocket.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/runtime/ops/websocket.rs b/runtime/ops/websocket.rs index d805f307beb8af..a5681bc52718c5 100644 --- a/runtime/ops/websocket.rs +++ b/runtime/ops/websocket.rs @@ -240,7 +240,6 @@ pub async fn op_ws_send( .ok_or_else(bad_resource_id)?; let mut tx = RcRef::map(&resource, |r| &r.tx).borrow_mut().await; tx.send(msg).await?; - eprintln!("sent!"); Ok(json!({})) } From b9165e9482465293aad99aff66bee6b64f739eb6 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Fri, 18 Dec 2020 19:30:49 +0100 Subject: [PATCH 085/135] fix: atomically write files to $DENO_DIR (#8822) --- cli/disk_cache.rs | 2 +- cli/fs_util.rs | 16 ++++++++++++++++ cli/http_cache.rs | 4 ++-- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/cli/disk_cache.rs b/cli/disk_cache.rs index 233990903bc731..81b86c0ae7c8c6 100644 --- a/cli/disk_cache.rs +++ b/cli/disk_cache.rs @@ -139,7 +139,7 @@ impl DiskCache { Some(ref parent) => self.ensure_dir_exists(parent), None => Ok(()), }?; - fs_util::write_file(&path, data, crate::http_cache::CACHE_PERM) + fs_util::atomic_write_file(&path, data, crate::http_cache::CACHE_PERM) .map_err(|e| with_io_context(&e, format!("{:#?}", &path))) } } diff --git a/cli/fs_util.rs b/cli/fs_util.rs index 217476c012e80e..f13558e36e07b6 100644 --- a/cli/fs_util.rs +++ b/cli/fs_util.rs @@ -2,12 +2,28 @@ use deno_core::error::AnyError; pub use deno_core::normalize_path; +use deno_runtime::deno_crypto::rand; use std::env::current_dir; use std::fs::OpenOptions; use std::io::{Error, Write}; use std::path::{Path, PathBuf}; use walkdir::WalkDir; +pub fn atomic_write_file>( + filename: &Path, + data: T, + mode: u32, +) -> std::io::Result<()> { + let rand: String = (0..4) + .map(|_| format!("{:02x}", rand::random::())) + .collect(); + let extension = format!("{}.tmp", rand); + let tmp_file = filename.with_extension(extension); + write_file(&tmp_file, data, mode)?; + std::fs::rename(tmp_file, filename)?; + Ok(()) +} + pub fn write_file>( filename: &Path, data: T, diff --git a/cli/http_cache.rs b/cli/http_cache.rs index dd5f4dc3fd8f9c..4677b44c9f9c93 100644 --- a/cli/http_cache.rs +++ b/cli/http_cache.rs @@ -87,7 +87,7 @@ impl Metadata { pub fn write(&self, cache_filename: &Path) -> Result<(), AnyError> { let metadata_filename = Self::filename(cache_filename); let json = serde_json::to_string_pretty(self)?; - fs_util::write_file(&metadata_filename, json, CACHE_PERM)?; + fs_util::atomic_write_file(&metadata_filename, json, CACHE_PERM)?; Ok(()) } @@ -161,7 +161,7 @@ impl HttpCache { .expect("Cache filename should have a parent dir"); self.ensure_dir_exists(parent_filename)?; // Cache content - fs_util::write_file(&cache_filename, content, CACHE_PERM)?; + fs_util::atomic_write_file(&cache_filename, content, CACHE_PERM)?; let metadata = Metadata { url: url.to_string(), From 4ab1aa8877a70ef99ca4091fc65def0aefc9c360 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sat, 19 Dec 2020 16:20:36 +0100 Subject: [PATCH 086/135] upgrade: rustyline 7.1.0 (#8829) --- Cargo.lock | 4 ++-- cli/Cargo.toml | 2 +- runtime/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f12da908a597d..ada9a504b0b5d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2246,9 +2246,9 @@ dependencies = [ [[package]] name = "rustyline" -version = "7.0.0" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a5f54deba50e65ee4cf786dbc37e8b3c63bdccccbcf9d3a8a9fd0c1bb7e1984" +checksum = "8227301bfc717136f0ecbd3d064ba8199e44497a0bdd46bb01ede4387cfd2cec" dependencies = [ "bitflags", "cfg-if 1.0.0", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index b0dc9e5093f24c..449353c2efb26b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -58,7 +58,7 @@ notify = "5.0.0-pre.3" percent-encoding = "2.1.0" regex = "1.3.9" ring = "0.16.19" -rustyline = { version = "7.0.0", default-features = false } +rustyline = { version = "7.1.0", default-features = false } rustyline-derive = "0.4.0" semver-parser = "0.9.0" serde = { version = "1.0.116", features = ["derive"] } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index b7bfc494e92ac1..1e5c9511fd4f34 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -47,7 +47,7 @@ notify = "5.0.0-pre.3" percent-encoding = "2.1.0" regex = "1.3.9" ring = "0.16.19" -rustyline = { version = "7.0.0", default-features = false } +rustyline = { version = "7.1.0", default-features = false } rustyline-derive = "0.4.0" serde = { version = "1.0.116", features = ["derive"] } shell-escape = "0.1.5" From afbd19ed9b9661a6648554e635ccdae5cbc0b0a3 Mon Sep 17 00:00:00 2001 From: yonatan ben avraham Date: Sun, 20 Dec 2020 00:13:48 +0200 Subject: [PATCH 087/135] feat(unstable): support in memory certificate data for Deno.createHttpClient (#8739) --- cli/dts/lib.deno.unstable.d.ts | 4 ++++ cli/tests/unit/fetch_test.ts | 41 +++++++++++++++++++++++++++++++++- op_crates/fetch/lib.rs | 16 ++++++++++--- 3 files changed, 57 insertions(+), 4 deletions(-) diff --git a/cli/dts/lib.deno.unstable.d.ts b/cli/dts/lib.deno.unstable.d.ts index f9ef1fb2fcb88c..e8d484ec3a922b 100644 --- a/cli/dts/lib.deno.unstable.d.ts +++ b/cli/dts/lib.deno.unstable.d.ts @@ -1266,6 +1266,10 @@ declare namespace Deno { * Requires `allow-read` permission. */ caFile?: string; + + /** A certificate authority to use when validating TLS certificates. Certificate data must be PEM encoded. + */ + caData?: string; } /** **UNSTABLE**: New API, yet to be vetted. diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index 6a5cff164f6cde..6f90a1847cc24d 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -959,7 +959,7 @@ unitTest(function fetchResponseEmptyConstructor(): void { unitTest( { perms: { net: true, read: true } }, - async function fetchCustomHttpClientSuccess(): Promise< + async function fetchCustomHttpClientFileCertificateSuccess(): Promise< void > { const client = Deno.createHttpClient( @@ -974,3 +974,42 @@ unitTest( client.close(); }, ); + +unitTest( + { perms: { net: true } }, + async function fetchCustomHttpClientParamCertificateSuccess(): Promise< + void + > { + const client = Deno.createHttpClient( + { + caData: `-----BEGIN CERTIFICATE----- +MIIDIzCCAgugAwIBAgIJAMKPPW4tsOymMA0GCSqGSIb3DQEBCwUAMCcxCzAJBgNV +BAYTAlVTMRgwFgYDVQQDDA9FeGFtcGxlLVJvb3QtQ0EwIBcNMTkxMDIxMTYyODIy +WhgPMjExODA5MjcxNjI4MjJaMCcxCzAJBgNVBAYTAlVTMRgwFgYDVQQDDA9FeGFt +cGxlLVJvb3QtQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDMH/IO +2qtHfyBKwANNPB4K0q5JVSg8XxZdRpTTlz0CwU0oRO3uHrI52raCCfVeiQutyZop +eFZTDWeXGudGAFA2B5m3orWt0s+touPi8MzjsG2TQ+WSI66QgbXTNDitDDBtTVcV +5G3Ic+3SppQAYiHSekLISnYWgXLl+k5CnEfTowg6cjqjVr0KjL03cTN3H7b+6+0S +ws4rYbW1j4ExR7K6BFNH6572yq5qR20E6GqlY+EcOZpw4CbCk9lS8/CWuXze/vMs +OfDcc6K+B625d27wyEGZHedBomT2vAD7sBjvO8hn/DP1Qb46a8uCHR6NSfnJ7bXO +G1igaIbgY1zXirNdAgMBAAGjUDBOMB0GA1UdDgQWBBTzut+pwwDfqmMYcI9KNWRD +hxcIpTAfBgNVHSMEGDAWgBTzut+pwwDfqmMYcI9KNWRDhxcIpTAMBgNVHRMEBTAD +AQH/MA0GCSqGSIb3DQEBCwUAA4IBAQB9AqSbZ+hEglAgSHxAMCqRFdhVu7MvaQM0 +P090mhGlOCt3yB7kdGfsIrUW6nQcTz7PPQFRaJMrFHPvFvPootkBUpTYR4hTkdce +H6RCRu2Jxl4Y9bY/uezd9YhGCYfUtfjA6/TH9FcuZfttmOOlxOt01XfNvVMIR6RM +z/AYhd+DeOXjr35F/VHeVpnk+55L0PYJsm1CdEbOs5Hy1ecR7ACuDkXnbM4fpz9I +kyIWJwk2zJReKcJMgi1aIinDM9ao/dca1G99PHOw8dnr4oyoTiv8ao6PWiSRHHMi +MNf4EgWfK+tZMnuqfpfO9740KzfcVoMNo4QJD4yn5YxroUOO/Azi +-----END CERTIFICATE----- +`, + }, + ); + const response = await fetch( + "https://localhost:5545/cli/tests/fixture.json", + { client }, + ); + const json = await response.json(); + assertEquals(json.name, "deno"); + client.close(); + }, +); diff --git a/op_crates/fetch/lib.rs b/op_crates/fetch/lib.rs index c2c08d2cff567e..4bc37b998cef4d 100644 --- a/op_crates/fetch/lib.rs +++ b/op_crates/fetch/lib.rs @@ -260,6 +260,7 @@ where #[serde(default)] struct CreateHttpClientOptions { ca_file: Option, + ca_data: Option, } let args: CreateHttpClientOptions = serde_json::from_value(args)?; @@ -269,7 +270,9 @@ where permissions.check_read(&PathBuf::from(ca_file))?; } - let client = create_http_client(args.ca_file.as_deref()).unwrap(); + let client = + create_http_client(args.ca_file.as_deref(), args.ca_data.as_deref()) + .unwrap(); let rid = state.resource_table.add(HttpClientResource::new(client)); Ok(json!(rid)) @@ -277,9 +280,16 @@ where /// Create new instance of async reqwest::Client. This client supports /// proxies and doesn't follow redirects. -fn create_http_client(ca_file: Option<&str>) -> Result { +fn create_http_client( + ca_file: Option<&str>, + ca_data: Option<&str>, +) -> Result { let mut builder = Client::builder().redirect(Policy::none()).use_rustls_tls(); - if let Some(ca_file) = ca_file { + if let Some(ca_data) = ca_data { + let ca_data_vec = ca_data.as_bytes().to_vec(); + let cert = reqwest::Certificate::from_pem(&ca_data_vec)?; + builder = builder.add_root_certificate(cert); + } else if let Some(ca_file) = ca_file { let mut buf = Vec::new(); File::open(ca_file)?.read_to_end(&mut buf)?; let cert = reqwest::Certificate::from_pem(&buf)?; From 660f75e066226a635375b70225df165bcf759077 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 20 Dec 2020 00:30:53 +0100 Subject: [PATCH 088/135] upgrade: swc_ecmascript 0.15.1 (#8836) --- Cargo.lock | 17 +++++++++-------- cli/Cargo.toml | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ada9a504b0b5d6..71ab5aec2d1c4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2622,9 +2622,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.41.1" +version = "0.41.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96e655391ee7b9e7e01fde2f64bfc1dbcf8b712510fc09edc6c00de52704e6a9" +checksum = "c50c98de260a5f33084211ce488c64acb052fc29851d917ee270fae498960128" dependencies = [ "bitflags", "num-bigint", @@ -2633,6 +2633,7 @@ dependencies = [ "swc_common", "swc_ecma_ast", "swc_ecma_codegen_macros", + "swc_ecma_parser", ] [[package]] @@ -2662,9 +2663,9 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.43.1" +version = "0.43.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d22c8f6cf8f45b8f2c1c09713f5d0d2e007394c1e8a9fe8ce20d8be5b57dc6a" +checksum = "ffaa010559302b877261b5d6d44c3386fe525f036c33711da490398a2fbc8798" dependencies = [ "either", "enum_kind", @@ -2696,9 +2697,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms" -version = "0.30.3" +version = "0.30.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49e8cfe7e5e05795c3647534e68036a1764bea326611db40451507529db68528" +checksum = "fab9f78e6850e956c2f40b14fd45ad5e22c7527fce4e64dae0b014b17d286ebe" dependencies = [ "Inflector", "arrayvec", @@ -2772,9 +2773,9 @@ dependencies = [ [[package]] name = "swc_ecmascript" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3110a752791f7c5f0cce84b6d0a088c79e08571cab6365c509fc5b244d66242e" +checksum = "498cdc72a4ef3d032c98552ea617829ff2c5f5583770369fb0e85edb70e6297a" dependencies = [ "swc_ecma_ast", "swc_ecma_codegen", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 449353c2efb26b..80eeceef81007b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -66,7 +66,7 @@ shell-escape = "0.1.5" sourcemap = "6.0.1" swc_bundler = "0.17.6" swc_common = { version = "0.10.7", features = ["sourcemap"] } -swc_ecmascript = { version = "0.15.0", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } +swc_ecmascript = { version = "0.15.1", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } tempfile = "3.1.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } From e924bbdf3606e83ff9eef3a8ed640c4ecc34444f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 20 Dec 2020 00:34:22 +0100 Subject: [PATCH 089/135] fix: TLA in web worker (#8809) Implementors of `deno_core::JsRuntime` might want to do additional actions during each turn of event loop, eg. `deno_runtime::Worker` polls inspector, `deno_runtime::WebWorker` receives/dispatches messages from/to worker host. Previously `JsRuntime::mod_evaluate` was implemented in such fashion that it only polled `JsRuntime`'s event loop. This behavior turned out to be wrong in the example of `WebWorker` which couldn't receive/dispatch messages because its implementation of event loop was never called. This commit rewrites "mod_evaluate" to return a handle to receiver that resolves when module's promise resolves. It is now implementors responsibility to poll event loop after calling `mod_evaluate`. --- cli/tests/worker_with_top_level_await.ts | 15 ++++++++++ cli/tests/workers_test.ts | 19 +++++++++++++ core/modules.rs | 14 +++++---- core/runtime.rs | 36 +++++++++--------------- runtime/web_worker.rs | 25 +++++++++++++++- runtime/worker.rs | 17 ++++++++++- 6 files changed, 95 insertions(+), 31 deletions(-) create mode 100644 cli/tests/worker_with_top_level_await.ts diff --git a/cli/tests/worker_with_top_level_await.ts b/cli/tests/worker_with_top_level_await.ts new file mode 100644 index 00000000000000..cf3418bf7fc953 --- /dev/null +++ b/cli/tests/worker_with_top_level_await.ts @@ -0,0 +1,15 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +import { serve } from "../../std/http/server.ts"; + +const server = serve({ port: 8080 }); + +self.onmessage = (e: MessageEvent) => { + console.log("TLA worker received message", e.data); +}; + +self.postMessage("hello"); + +for await (const _r of server) { + // pass +} diff --git a/cli/tests/workers_test.ts b/cli/tests/workers_test.ts index d907c97a9b52b1..4f7682be2780c1 100644 --- a/cli/tests/workers_test.ts +++ b/cli/tests/workers_test.ts @@ -357,3 +357,22 @@ Deno.test({ w.terminate(); }, }); + +Deno.test({ + name: "Worker with top-level-await", + fn: async function (): Promise { + const promise = deferred(); + const worker = new Worker( + new URL("./worker_with_top_level_await.ts", import.meta.url).href, + { deno: true, type: "module" }, + ); + worker.onmessage = (e): void => { + console.log("received from worker", e.data); + worker.postMessage("from main"); + promise.resolve(); + }; + + await promise; + worker.terminate(); + }, +}); diff --git a/core/modules.rs b/core/modules.rs index 546f2464f89400..8248eb32d8e5ee 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -718,8 +718,8 @@ mod tests { let spec = ModuleSpecifier::resolve_url("file:///a.js").unwrap(); let a_id_fut = runtime.load_module(&spec, None); let a_id = futures::executor::block_on(a_id_fut).expect("Failed to load"); - - futures::executor::block_on(runtime.mod_evaluate(a_id)).unwrap(); + runtime.mod_evaluate(a_id); + futures::executor::block_on(runtime.run_event_loop()).unwrap(); let l = loads.lock().unwrap(); assert_eq!( l.to_vec(), @@ -786,7 +786,8 @@ mod tests { let result = runtime.load_module(&spec, None).await; assert!(result.is_ok()); let circular1_id = result.unwrap(); - runtime.mod_evaluate(circular1_id).await.unwrap(); + runtime.mod_evaluate(circular1_id); + runtime.run_event_loop().await.unwrap(); let l = loads.lock().unwrap(); assert_eq!( @@ -863,7 +864,8 @@ mod tests { println!(">> result {:?}", result); assert!(result.is_ok()); let redirect1_id = result.unwrap(); - runtime.mod_evaluate(redirect1_id).await.unwrap(); + runtime.mod_evaluate(redirect1_id); + runtime.run_event_loop().await.unwrap(); let l = loads.lock().unwrap(); assert_eq!( l.to_vec(), @@ -1012,8 +1014,8 @@ mod tests { .boxed_local(); let main_id = futures::executor::block_on(main_id_fut).expect("Failed to load"); - - futures::executor::block_on(runtime.mod_evaluate(main_id)).unwrap(); + runtime.mod_evaluate(main_id); + futures::executor::block_on(runtime.run_event_loop()).unwrap(); let l = loads.lock().unwrap(); assert_eq!( diff --git a/core/runtime.rs b/core/runtime.rs index 24bdf4dc29ae83..e9949bc8e26369 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -825,12 +825,17 @@ impl JsRuntime { Ok(()) } + // TODO(bartlomieju): make it return `ModuleEvaluationFuture`? /// Evaluates an already instantiated ES module. /// + /// Returns a receiver handle that resolves when module promise resolves. + /// Implementors must manually call `run_event_loop()` to drive module + /// evaluation future. + /// /// `AnyError` can be downcast to a type that exposes additional information /// about the V8 exception. By default this type is `JsError`, however it may /// be a different type if `RuntimeOptions::js_error_create_fn` has been set. - fn mod_evaluate_inner( + pub fn mod_evaluate( &mut self, id: ModuleId, ) -> mpsc::Receiver> { @@ -902,24 +907,6 @@ impl JsRuntime { receiver } - pub async fn mod_evaluate(&mut self, id: ModuleId) -> Result<(), AnyError> { - let mut receiver = self.mod_evaluate_inner(id); - - poll_fn(|cx| { - if let Poll::Ready(maybe_result) = receiver.poll_next_unpin(cx) { - debug!("received module evaluate {:#?}", maybe_result); - // If `None` is returned it means that runtime was destroyed before - // evaluation was complete. This can happen in Web Worker when `self.close()` - // is called at top level. - let result = maybe_result.unwrap_or(Ok(())); - return Poll::Ready(result); - } - let _r = self.poll_event_loop(cx)?; - Poll::Pending - }) - .await - } - fn dyn_import_error(&mut self, id: ModuleLoadId, err: AnyError) { let state_rc = Self::state(self.v8_isolate()); let context = self.global_context(); @@ -1110,7 +1097,8 @@ impl JsRuntime { v8::PromiseState::Fulfilled => { state.pending_mod_evaluate.take(); scope.perform_microtask_checkpoint(); - sender.try_send(Ok(())).unwrap(); + // Receiver end might have been already dropped, ignore the result + let _ = sender.try_send(Ok(())); } v8::PromiseState::Rejected => { let exception = promise.result(scope); @@ -1120,7 +1108,8 @@ impl JsRuntime { let err1 = exception_to_err_result::<()>(scope, exception, false) .map_err(|err| attach_handle_to_error(scope, err, exception)) .unwrap_err(); - sender.try_send(Err(err1)).unwrap(); + // Receiver end might have been already dropped, ignore the result + let _ = sender.try_send(Err(err1)); } } } @@ -2259,7 +2248,7 @@ pub mod tests { runtime.mod_instantiate(mod_a).unwrap(); assert_eq!(dispatch_count.load(Ordering::Relaxed), 0); - runtime.mod_evaluate_inner(mod_a); + runtime.mod_evaluate(mod_a); assert_eq!(dispatch_count.load(Ordering::Relaxed), 1); } @@ -2502,7 +2491,8 @@ pub mod tests { ) .unwrap(); - futures::executor::block_on(runtime.mod_evaluate(module_id)).unwrap(); + runtime.mod_evaluate(module_id); + futures::executor::block_on(runtime.run_event_loop()).unwrap(); let _snapshot = runtime.snapshot(); } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index c1713f815055eb..235cb2c7ef5ff4 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -315,7 +315,28 @@ impl WebWorker { module_specifier: &ModuleSpecifier, ) -> Result<(), AnyError> { let id = self.js_runtime.load_module(module_specifier, None).await?; - self.js_runtime.mod_evaluate(id).await + + let mut receiver = self.js_runtime.mod_evaluate(id); + tokio::select! { + maybe_result = receiver.next() => { + debug!("received worker module evaluate {:#?}", maybe_result); + // If `None` is returned it means that runtime was destroyed before + // evaluation was complete. This can happen in Web Worker when `self.close()` + // is called at top level. + let result = maybe_result.unwrap_or(Ok(())); + return result; + } + + event_loop_result = self.run_event_loop() => { + if self.has_been_terminated() { + return Ok(()); + } + event_loop_result?; + let maybe_result = receiver.next().await; + let result = maybe_result.unwrap_or(Ok(())); + return result; + } + } } /// Returns a way to communicate with the Worker from other threads. @@ -374,6 +395,8 @@ impl WebWorker { let msg = String::from_utf8(msg.to_vec()).unwrap(); let script = format!("workerMessageRecvCallback({})", msg); + // TODO(bartlomieju): set proper script name like "deno:runtime/web_worker.js" + // so it's dimmed in stack trace instead of using "__anonymous__" if let Err(e) = self.execute(&script) { // If execution was terminated during message callback then // just ignore it diff --git a/runtime/worker.rs b/runtime/worker.rs index adb525c4c98603..b01da45533418c 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -10,6 +10,7 @@ use crate::permissions::Permissions; use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; +use deno_core::futures::stream::StreamExt; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url::Url; @@ -211,7 +212,21 @@ impl MainWorker { ) -> Result<(), AnyError> { let id = self.preload_module(module_specifier).await?; self.wait_for_inspector_session(); - self.js_runtime.mod_evaluate(id).await + let mut receiver = self.js_runtime.mod_evaluate(id); + tokio::select! { + maybe_result = receiver.next() => { + debug!("received module evaluate {:#?}", maybe_result); + let result = maybe_result.expect("Module evaluation result not provided."); + return result; + } + + event_loop_result = self.run_event_loop() => { + event_loop_result?; + let maybe_result = receiver.next().await; + let result = maybe_result.expect("Module evaluation result not provided."); + return result; + } + } } fn wait_for_inspector_session(&mut self) { From 3eec73ff904e54b6e90879d93b09be4330c63712 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Sun, 20 Dec 2020 15:14:19 +0100 Subject: [PATCH 090/135] Revert "fix: TLA in web worker (#8809)" (#8839) This reverts commit e924bbdf3606e83ff9eef3a8ed640c4ecc34444f. --- cli/tests/worker_with_top_level_await.ts | 15 ---------- cli/tests/workers_test.ts | 19 ------------- core/modules.rs | 14 ++++----- core/runtime.rs | 36 +++++++++++++++--------- runtime/web_worker.rs | 25 +--------------- runtime/worker.rs | 17 +---------- 6 files changed, 31 insertions(+), 95 deletions(-) delete mode 100644 cli/tests/worker_with_top_level_await.ts diff --git a/cli/tests/worker_with_top_level_await.ts b/cli/tests/worker_with_top_level_await.ts deleted file mode 100644 index cf3418bf7fc953..00000000000000 --- a/cli/tests/worker_with_top_level_await.ts +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -import { serve } from "../../std/http/server.ts"; - -const server = serve({ port: 8080 }); - -self.onmessage = (e: MessageEvent) => { - console.log("TLA worker received message", e.data); -}; - -self.postMessage("hello"); - -for await (const _r of server) { - // pass -} diff --git a/cli/tests/workers_test.ts b/cli/tests/workers_test.ts index 4f7682be2780c1..d907c97a9b52b1 100644 --- a/cli/tests/workers_test.ts +++ b/cli/tests/workers_test.ts @@ -357,22 +357,3 @@ Deno.test({ w.terminate(); }, }); - -Deno.test({ - name: "Worker with top-level-await", - fn: async function (): Promise { - const promise = deferred(); - const worker = new Worker( - new URL("./worker_with_top_level_await.ts", import.meta.url).href, - { deno: true, type: "module" }, - ); - worker.onmessage = (e): void => { - console.log("received from worker", e.data); - worker.postMessage("from main"); - promise.resolve(); - }; - - await promise; - worker.terminate(); - }, -}); diff --git a/core/modules.rs b/core/modules.rs index 8248eb32d8e5ee..546f2464f89400 100644 --- a/core/modules.rs +++ b/core/modules.rs @@ -718,8 +718,8 @@ mod tests { let spec = ModuleSpecifier::resolve_url("file:///a.js").unwrap(); let a_id_fut = runtime.load_module(&spec, None); let a_id = futures::executor::block_on(a_id_fut).expect("Failed to load"); - runtime.mod_evaluate(a_id); - futures::executor::block_on(runtime.run_event_loop()).unwrap(); + + futures::executor::block_on(runtime.mod_evaluate(a_id)).unwrap(); let l = loads.lock().unwrap(); assert_eq!( l.to_vec(), @@ -786,8 +786,7 @@ mod tests { let result = runtime.load_module(&spec, None).await; assert!(result.is_ok()); let circular1_id = result.unwrap(); - runtime.mod_evaluate(circular1_id); - runtime.run_event_loop().await.unwrap(); + runtime.mod_evaluate(circular1_id).await.unwrap(); let l = loads.lock().unwrap(); assert_eq!( @@ -864,8 +863,7 @@ mod tests { println!(">> result {:?}", result); assert!(result.is_ok()); let redirect1_id = result.unwrap(); - runtime.mod_evaluate(redirect1_id); - runtime.run_event_loop().await.unwrap(); + runtime.mod_evaluate(redirect1_id).await.unwrap(); let l = loads.lock().unwrap(); assert_eq!( l.to_vec(), @@ -1014,8 +1012,8 @@ mod tests { .boxed_local(); let main_id = futures::executor::block_on(main_id_fut).expect("Failed to load"); - runtime.mod_evaluate(main_id); - futures::executor::block_on(runtime.run_event_loop()).unwrap(); + + futures::executor::block_on(runtime.mod_evaluate(main_id)).unwrap(); let l = loads.lock().unwrap(); assert_eq!( diff --git a/core/runtime.rs b/core/runtime.rs index e9949bc8e26369..24bdf4dc29ae83 100644 --- a/core/runtime.rs +++ b/core/runtime.rs @@ -825,17 +825,12 @@ impl JsRuntime { Ok(()) } - // TODO(bartlomieju): make it return `ModuleEvaluationFuture`? /// Evaluates an already instantiated ES module. /// - /// Returns a receiver handle that resolves when module promise resolves. - /// Implementors must manually call `run_event_loop()` to drive module - /// evaluation future. - /// /// `AnyError` can be downcast to a type that exposes additional information /// about the V8 exception. By default this type is `JsError`, however it may /// be a different type if `RuntimeOptions::js_error_create_fn` has been set. - pub fn mod_evaluate( + fn mod_evaluate_inner( &mut self, id: ModuleId, ) -> mpsc::Receiver> { @@ -907,6 +902,24 @@ impl JsRuntime { receiver } + pub async fn mod_evaluate(&mut self, id: ModuleId) -> Result<(), AnyError> { + let mut receiver = self.mod_evaluate_inner(id); + + poll_fn(|cx| { + if let Poll::Ready(maybe_result) = receiver.poll_next_unpin(cx) { + debug!("received module evaluate {:#?}", maybe_result); + // If `None` is returned it means that runtime was destroyed before + // evaluation was complete. This can happen in Web Worker when `self.close()` + // is called at top level. + let result = maybe_result.unwrap_or(Ok(())); + return Poll::Ready(result); + } + let _r = self.poll_event_loop(cx)?; + Poll::Pending + }) + .await + } + fn dyn_import_error(&mut self, id: ModuleLoadId, err: AnyError) { let state_rc = Self::state(self.v8_isolate()); let context = self.global_context(); @@ -1097,8 +1110,7 @@ impl JsRuntime { v8::PromiseState::Fulfilled => { state.pending_mod_evaluate.take(); scope.perform_microtask_checkpoint(); - // Receiver end might have been already dropped, ignore the result - let _ = sender.try_send(Ok(())); + sender.try_send(Ok(())).unwrap(); } v8::PromiseState::Rejected => { let exception = promise.result(scope); @@ -1108,8 +1120,7 @@ impl JsRuntime { let err1 = exception_to_err_result::<()>(scope, exception, false) .map_err(|err| attach_handle_to_error(scope, err, exception)) .unwrap_err(); - // Receiver end might have been already dropped, ignore the result - let _ = sender.try_send(Err(err1)); + sender.try_send(Err(err1)).unwrap(); } } } @@ -2248,7 +2259,7 @@ pub mod tests { runtime.mod_instantiate(mod_a).unwrap(); assert_eq!(dispatch_count.load(Ordering::Relaxed), 0); - runtime.mod_evaluate(mod_a); + runtime.mod_evaluate_inner(mod_a); assert_eq!(dispatch_count.load(Ordering::Relaxed), 1); } @@ -2491,8 +2502,7 @@ pub mod tests { ) .unwrap(); - runtime.mod_evaluate(module_id); - futures::executor::block_on(runtime.run_event_loop()).unwrap(); + futures::executor::block_on(runtime.mod_evaluate(module_id)).unwrap(); let _snapshot = runtime.snapshot(); } diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 235cb2c7ef5ff4..c1713f815055eb 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -315,28 +315,7 @@ impl WebWorker { module_specifier: &ModuleSpecifier, ) -> Result<(), AnyError> { let id = self.js_runtime.load_module(module_specifier, None).await?; - - let mut receiver = self.js_runtime.mod_evaluate(id); - tokio::select! { - maybe_result = receiver.next() => { - debug!("received worker module evaluate {:#?}", maybe_result); - // If `None` is returned it means that runtime was destroyed before - // evaluation was complete. This can happen in Web Worker when `self.close()` - // is called at top level. - let result = maybe_result.unwrap_or(Ok(())); - return result; - } - - event_loop_result = self.run_event_loop() => { - if self.has_been_terminated() { - return Ok(()); - } - event_loop_result?; - let maybe_result = receiver.next().await; - let result = maybe_result.unwrap_or(Ok(())); - return result; - } - } + self.js_runtime.mod_evaluate(id).await } /// Returns a way to communicate with the Worker from other threads. @@ -395,8 +374,6 @@ impl WebWorker { let msg = String::from_utf8(msg.to_vec()).unwrap(); let script = format!("workerMessageRecvCallback({})", msg); - // TODO(bartlomieju): set proper script name like "deno:runtime/web_worker.js" - // so it's dimmed in stack trace instead of using "__anonymous__" if let Err(e) = self.execute(&script) { // If execution was terminated during message callback then // just ignore it diff --git a/runtime/worker.rs b/runtime/worker.rs index b01da45533418c..adb525c4c98603 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -10,7 +10,6 @@ use crate::permissions::Permissions; use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::futures::future::FutureExt; -use deno_core::futures::stream::StreamExt; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::url::Url; @@ -212,21 +211,7 @@ impl MainWorker { ) -> Result<(), AnyError> { let id = self.preload_module(module_specifier).await?; self.wait_for_inspector_session(); - let mut receiver = self.js_runtime.mod_evaluate(id); - tokio::select! { - maybe_result = receiver.next() => { - debug!("received module evaluate {:#?}", maybe_result); - let result = maybe_result.expect("Module evaluation result not provided."); - return result; - } - - event_loop_result = self.run_event_loop() => { - event_loop_result?; - let maybe_result = receiver.next().await; - let result = maybe_result.expect("Module evaluation result not provided."); - return result; - } - } + self.js_runtime.mod_evaluate(id).await } fn wait_for_inspector_session(&mut self) { From d5ee168468a431140d2903e0a03ea0eeea491874 Mon Sep 17 00:00:00 2001 From: Liam Murphy <43807659+Liamolucko@users.noreply.github.com> Date: Mon, 21 Dec 2020 23:13:09 +1100 Subject: [PATCH 091/135] refactor: rewrite process_test.ts to use deno instead of python (#8841) Rewrites all the subprocess python scripts to be Deno scripts. --- cli/tests/unit/process_test.ts | 566 ++++++++++++++++++--------------- 1 file changed, 310 insertions(+), 256 deletions(-) diff --git a/cli/tests/unit/process_test.ts b/cli/tests/unit/process_test.ts index 43d848916ac38a..1e13427297b86e 100644 --- a/cli/tests/unit/process_test.ts +++ b/cli/tests/unit/process_test.ts @@ -7,94 +7,102 @@ import { unitTest, } from "./test_util.ts"; -unitTest(function runPermissions(): void { +unitTest({ perms: { read: true } }, function runPermissions(): void { assertThrows(() => { - Deno.run({ cmd: ["python", "-c", "print('hello world')"] }); + Deno.run({ cmd: [Deno.execPath(), "eval", "console.log('hello world')"] }); }, Deno.errors.PermissionDenied); }); -unitTest({ perms: { run: true } }, async function runSuccess(): Promise { - const p = Deno.run({ - cmd: ["python", "-c", "print('hello world')"], - stdout: "piped", - stderr: "null", - }); - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.stdout.close(); - p.close(); -}); - -unitTest({ perms: { run: true } }, async function runUrl(): Promise { - const q = Deno.run({ - cmd: ["python", "-c", "import sys; print sys.executable"], - stdout: "piped", - }); - await q.status(); - const pythonPath = new TextDecoder().decode(await q.output()).trim(); - q.close(); +unitTest( + { perms: { run: true, read: true } }, + async function runSuccess(): Promise { + const p = Deno.run({ + cmd: [Deno.execPath(), "eval", "console.log('hello world')"], + stdout: "piped", + stderr: "null", + }); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.stdout.close(); + p.close(); + }, +); - const p = Deno.run({ - cmd: [new URL(`file:///${pythonPath}`), "-c", "print('hello world')"], - stdout: "piped", - stderr: "null", - }); - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.stdout.close(); - p.close(); -}); +unitTest( + { perms: { run: true, read: true } }, + async function runUrl(): Promise { + const p = Deno.run({ + cmd: [ + new URL(`file:///${Deno.execPath()}`), + "eval", + "console.log('hello world')", + ], + stdout: "piped", + stderr: "null", + }); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.stdout.close(); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runStdinRid0(): Promise< - void -> { - const p = Deno.run({ - cmd: ["python", "-c", "print('hello world')"], - stdin: 0, - stdout: "piped", - stderr: "null", - }); - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.stdout.close(); - p.close(); -}); +unitTest( + { perms: { run: true, read: true } }, + async function runStdinRid0(): Promise< + void + > { + const p = Deno.run({ + cmd: [Deno.execPath(), "eval", "console.log('hello world')"], + stdin: 0, + stdout: "piped", + stderr: "null", + }); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.stdout.close(); + p.close(); + }, +); -unitTest({ perms: { run: true } }, function runInvalidStdio(): void { - assertThrows(() => - Deno.run({ - cmd: ["python", "-c", "print('hello world')"], - // @ts-expect-error because Deno.run should throw on invalid stdin. - stdin: "a", - }) - ); - assertThrows(() => - Deno.run({ - cmd: ["python", "-c", "print('hello world')"], - // @ts-expect-error because Deno.run should throw on invalid stdout. - stdout: "b", - }) - ); - assertThrows(() => - Deno.run({ - cmd: ["python", "-c", "print('hello world')"], - // @ts-expect-error because Deno.run should throw on invalid stderr. - stderr: "c", - }) - ); -}); +unitTest( + { perms: { run: true, read: true } }, + function runInvalidStdio(): void { + assertThrows(() => + Deno.run({ + cmd: [Deno.execPath(), "eval", "console.log('hello world')"], + // @ts-expect-error because Deno.run should throw on invalid stdin. + stdin: "a", + }) + ); + assertThrows(() => + Deno.run({ + cmd: [Deno.execPath(), "eval", "console.log('hello world')"], + // @ts-expect-error because Deno.run should throw on invalid stdout. + stdout: "b", + }) + ); + assertThrows(() => + Deno.run({ + cmd: [Deno.execPath(), "eval", "console.log('hello world')"], + // @ts-expect-error because Deno.run should throw on invalid stderr. + stderr: "c", + }) + ); + }, +); unitTest( - { perms: { run: true } }, + { perms: { run: true, read: true } }, async function runCommandFailedWithCode(): Promise { const p = Deno.run({ - cmd: ["python", "-c", "import sys;sys.exit(41 + 1)"], + cmd: [Deno.execPath(), "eval", "Deno.exit(41 + 1)"], }); const status = await p.status(); assertEquals(status.success, false); @@ -108,11 +116,11 @@ unitTest( { // No signals on windows. ignore: Deno.build.os === "windows", - perms: { run: true }, + perms: { run: true, read: true }, }, async function runCommandFailedWithSignal(): Promise { const p = Deno.run({ - cmd: ["python", "-c", "import os;os.kill(os.getpid(), 9)"], + cmd: [Deno.execPath(), "eval", "--unstable", "Deno.kill(Deno.pid, 9)"], }); const status = await p.status(); assertEquals(status.success, false); @@ -134,36 +142,34 @@ unitTest({ perms: { run: true } }, function runNotFound(): void { }); unitTest( - { perms: { write: true, run: true } }, + { perms: { write: true, run: true, read: true } }, async function runWithCwdIsAsync(): Promise { const enc = new TextEncoder(); const cwd = await Deno.makeTempDir({ prefix: "deno_command_test" }); const exitCodeFile = "deno_was_here"; - const pyProgramFile = "poll_exit.py"; - const pyProgram = ` -from sys import exit -from time import sleep - -while True: - try: - with open("${exitCodeFile}", "r") as f: - line = f.readline() - code = int(line) - exit(code) - except IOError: - # Retry if we got here before deno wrote the file. - sleep(0.01) - pass + const programFile = "poll_exit.ts"; + const program = ` +async function tryExit() { + try { + const code = parseInt(await Deno.readTextFile("${exitCodeFile}")); + Deno.exit(code); + } catch { + // Retry if we got here before deno wrote the file. + setTimeout(tryExit, 0.01); + } +} + +tryExit(); `; - Deno.writeFileSync(`${cwd}/${pyProgramFile}.py`, enc.encode(pyProgram)); + Deno.writeFileSync(`${cwd}/${programFile}`, enc.encode(program)); const p = Deno.run({ cwd, - cmd: ["python", `${pyProgramFile}.py`], + cmd: [Deno.execPath(), "run", "--allow-read", programFile], }); - // Write the expected exit code *after* starting python. + // Write the expected exit code *after* starting deno. // This is how we verify that `run()` is actually asynchronous. const code = 84; Deno.writeFileSync(`${cwd}/${exitCodeFile}`, enc.encode(`${code}`)); @@ -176,111 +182,146 @@ while True: }, ); -unitTest({ perms: { run: true } }, async function runStdinPiped(): Promise< - void -> { - const p = Deno.run({ - cmd: ["python", "-c", "import sys; assert 'hello' == sys.stdin.read();"], - stdin: "piped", - }); - assert(p.stdin); - assert(!p.stdout); - assert(!p.stderr); +unitTest( + { perms: { run: true, read: true } }, + async function runStdinPiped(): Promise< + void + > { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "if (new TextDecoder().decode(await Deno.readAll(Deno.stdin)) !== 'hello') throw new Error('Expected \\'hello\\'')", + ], + stdin: "piped", + }); + assert(p.stdin); + assert(!p.stdout); + assert(!p.stderr); - const msg = new TextEncoder().encode("hello"); - const n = await p.stdin.write(msg); - assertEquals(n, msg.byteLength); + const msg = new TextEncoder().encode("hello"); + const n = await p.stdin.write(msg); + assertEquals(n, msg.byteLength); - p.stdin.close(); + p.stdin.close(); - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.close(); -}); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runStdoutPiped(): Promise< - void -> { - const p = Deno.run({ - cmd: ["python", "-c", "import sys; sys.stdout.write('hello')"], - stdout: "piped", - }); - assert(!p.stdin); - assert(!p.stderr); +unitTest( + { perms: { run: true, read: true } }, + async function runStdoutPiped(): Promise< + void + > { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "await Deno.stdout.write(new TextEncoder().encode('hello'))", + ], + stdout: "piped", + }); + assert(!p.stdin); + assert(!p.stderr); + + const data = new Uint8Array(10); + let r = await p.stdout.read(data); + if (r === null) { + throw new Error("p.stdout.read(...) should not be null"); + } + assertEquals(r, 5); + const s = new TextDecoder().decode(data.subarray(0, r)); + assertEquals(s, "hello"); + r = await p.stdout.read(data); + assertEquals(r, null); + p.stdout.close(); - const data = new Uint8Array(10); - let r = await p.stdout.read(data); - if (r === null) { - throw new Error("p.stdout.read(...) should not be null"); - } - assertEquals(r, 5); - const s = new TextDecoder().decode(data.subarray(0, r)); - assertEquals(s, "hello"); - r = await p.stdout.read(data); - assertEquals(r, null); - p.stdout.close(); - - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.close(); -}); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runStderrPiped(): Promise< - void -> { - const p = Deno.run({ - cmd: ["python", "-c", "import sys; sys.stderr.write('hello')"], - stderr: "piped", - }); - assert(!p.stdin); - assert(!p.stdout); +unitTest( + { perms: { run: true, read: true } }, + async function runStderrPiped(): Promise< + void + > { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "await Deno.stderr.write(new TextEncoder().encode('hello'))", + ], + stderr: "piped", + }); + assert(!p.stdin); + assert(!p.stdout); + + const data = new Uint8Array(10); + let r = await p.stderr.read(data); + if (r === null) { + throw new Error("p.stderr.read should not return null here"); + } + assertEquals(r, 5); + const s = new TextDecoder().decode(data.subarray(0, r)); + assertEquals(s, "hello"); + r = await p.stderr.read(data); + assertEquals(r, null); + p.stderr!.close(); - const data = new Uint8Array(10); - let r = await p.stderr.read(data); - if (r === null) { - throw new Error("p.stderr.read should not return null here"); - } - assertEquals(r, 5); - const s = new TextDecoder().decode(data.subarray(0, r)); - assertEquals(s, "hello"); - r = await p.stderr.read(data); - assertEquals(r, null); - p.stderr!.close(); - - const status = await p.status(); - assertEquals(status.success, true); - assertEquals(status.code, 0); - assertEquals(status.signal, undefined); - p.close(); -}); + const status = await p.status(); + assertEquals(status.success, true); + assertEquals(status.code, 0); + assertEquals(status.signal, undefined); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runOutput(): Promise { - const p = Deno.run({ - cmd: ["python", "-c", "import sys; sys.stdout.write('hello')"], - stdout: "piped", - }); - const output = await p.output(); - const s = new TextDecoder().decode(output); - assertEquals(s, "hello"); - p.close(); -}); +unitTest( + { perms: { run: true, read: true } }, + async function runOutput(): Promise { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "await Deno.stdout.write(new TextEncoder().encode('hello'))", + ], + stdout: "piped", + }); + const output = await p.output(); + const s = new TextDecoder().decode(output); + assertEquals(s, "hello"); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runStderrOutput(): Promise< - void -> { - const p = Deno.run({ - cmd: ["python", "-c", "import sys; sys.stderr.write('error')"], - stderr: "piped", - }); - const error = await p.stderrOutput(); - const s = new TextDecoder().decode(error); - assertEquals(s, "error"); - p.close(); -}); +unitTest( + { perms: { run: true, read: true } }, + async function runStderrOutput(): Promise< + void + > { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "await Deno.stderr.write(new TextEncoder().encode('error'))", + ], + stderr: "piped", + }); + const error = await p.stderrOutput(); + const s = new TextDecoder().decode(error); + assertEquals(s, "error"); + p.close(); + }, +); unitTest( { perms: { run: true, write: true, read: true } }, @@ -294,9 +335,9 @@ unitTest( const p = Deno.run({ cmd: [ - "python", - "-c", - "import sys; sys.stderr.write('error\\n'); sys.stdout.write('output\\n');", + Deno.execPath(), + "eval", + "Deno.stderr.write(new TextEncoder().encode('error\\n')); Deno.stdout.write(new TextEncoder().encode('output\\n'));", ], stdout: file.rid, stderr: file.rid, @@ -325,7 +366,11 @@ unitTest( const file = await Deno.open(fileName); const p = Deno.run({ - cmd: ["python", "-c", "import sys; assert 'hello' == sys.stdin.read();"], + cmd: [ + Deno.execPath(), + "eval", + "if (new TextDecoder().decode(await Deno.readAll(Deno.stdin)) !== 'hello') throw new Error('Expected \\'hello\\'')", + ], stdin: file.rid, }); @@ -336,50 +381,56 @@ unitTest( }, ); -unitTest({ perms: { run: true } }, async function runEnv(): Promise { - const p = Deno.run({ - cmd: [ - "python", - "-c", - "import os, sys; sys.stdout.write(os.environ.get('FOO', '') + os.environ.get('BAR', ''))", - ], - env: { - FOO: "0123", - BAR: "4567", - }, - stdout: "piped", - }); - const output = await p.output(); - const s = new TextDecoder().decode(output); - assertEquals(s, "01234567"); - p.close(); -}); +unitTest( + { perms: { run: true, read: true } }, + async function runEnv(): Promise { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "Deno.stdout.write(new TextEncoder().encode(Deno.env.get('FOO') + Deno.env.get('BAR')))", + ], + env: { + FOO: "0123", + BAR: "4567", + }, + stdout: "piped", + }); + const output = await p.output(); + const s = new TextDecoder().decode(output); + assertEquals(s, "01234567"); + p.close(); + }, +); -unitTest({ perms: { run: true } }, async function runClose(): Promise { - const p = Deno.run({ - cmd: [ - "python", - "-c", - "from time import sleep; import sys; sleep(10000); sys.stderr.write('error')", - ], - stderr: "piped", - }); - assert(!p.stdin); - assert(!p.stdout); +unitTest( + { perms: { run: true, read: true } }, + async function runClose(): Promise { + const p = Deno.run({ + cmd: [ + Deno.execPath(), + "eval", + "setTimeout(() => Deno.stdout.write(new TextEncoder().encode('error')), 10000)", + ], + stderr: "piped", + }); + assert(!p.stdin); + assert(!p.stdout); - p.close(); + p.close(); - const data = new Uint8Array(10); - const r = await p.stderr.read(data); - assertEquals(r, null); - p.stderr.close(); -}); + const data = new Uint8Array(10); + const r = await p.stderr.read(data); + assertEquals(r, null); + p.stderr.close(); + }, +); unitTest( - { perms: { run: true } }, + { perms: { run: true, read: true } }, async function runKillAfterStatus(): Promise { const p = Deno.run({ - cmd: ["python", "-c", 'print("hello")'], + cmd: [Deno.execPath(), "eval", 'console.log("hello")'], }); await p.status(); @@ -417,31 +468,34 @@ unitTest(function killPermissions(): void { }, Deno.errors.PermissionDenied); }); -unitTest({ perms: { run: true } }, async function killSuccess(): Promise { - const p = Deno.run({ - cmd: ["python", "-c", "from time import sleep; sleep(10000)"], - }); +unitTest( + { perms: { run: true, read: true } }, + async function killSuccess(): Promise { + const p = Deno.run({ + cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], + }); - assertEquals(Deno.Signal.SIGINT, 2); - Deno.kill(p.pid, Deno.Signal.SIGINT); - const status = await p.status(); + assertEquals(Deno.Signal.SIGINT, 2); + Deno.kill(p.pid, Deno.Signal.SIGINT); + const status = await p.status(); - assertEquals(status.success, false); - try { - assertEquals(status.code, 128 + Deno.Signal.SIGINT); - assertEquals(status.signal, Deno.Signal.SIGINT); - } catch { - // TODO(nayeemrmn): On Windows sometimes the following values are given - // instead. Investigate and remove this catch when fixed. - assertEquals(status.code, 1); - assertEquals(status.signal, undefined); - } - p.close(); -}); + assertEquals(status.success, false); + try { + assertEquals(status.code, 128 + Deno.Signal.SIGINT); + assertEquals(status.signal, Deno.Signal.SIGINT); + } catch { + // TODO(nayeemrmn): On Windows sometimes the following values are given + // instead. Investigate and remove this catch when fixed. + assertEquals(status.code, 1); + assertEquals(status.signal, undefined); + } + p.close(); + }, +); -unitTest({ perms: { run: true } }, function killFailed(): void { +unitTest({ perms: { run: true, read: true } }, function killFailed(): void { const p = Deno.run({ - cmd: ["python", "-c", "from time import sleep; sleep(10000)"], + cmd: [Deno.execPath(), "eval", "setTimeout(() => {}, 10000)"], }); assert(!p.stdin); assert(!p.stdout); From 3078fcf55a8aa04d26316ab353d84f2c9512bd47 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Mon, 21 Dec 2020 21:04:25 +0800 Subject: [PATCH 092/135] feat(unstable): record raw coverage into a directory (#8642) --- cli/flags.rs | 32 +++++-- cli/main.rs | 51 ++++++----- cli/program_state.rs | 7 ++ cli/tests/integration_tests.rs | 12 +++ cli/tests/test_coverage.out | 8 +- cli/tests/test_run_test_coverage.out | 32 +++++++ cli/tests/test_run_test_coverage.ts | 14 +++ cli/tools/coverage.rs | 132 ++++++++++++++++++++------- runtime/worker.rs | 1 + 9 files changed, 221 insertions(+), 68 deletions(-) create mode 100644 cli/tests/test_run_test_coverage.out create mode 100644 cli/tests/test_run_test_coverage.ts diff --git a/cli/flags.rs b/cli/flags.rs index d3fd18c32a7a06..bc08d2ec7ae613 100644 --- a/cli/flags.rs +++ b/cli/flags.rs @@ -10,6 +10,7 @@ use log::Level; use std::net::SocketAddr; use std::path::PathBuf; use std::str::FromStr; +use tempfile::TempDir; #[derive(Clone, Debug, PartialEq)] pub enum DenoSubcommand { @@ -108,7 +109,7 @@ pub struct Flags { pub ca_file: Option, pub cached_only: bool, pub config_path: Option, - pub coverage: bool, + pub coverage_dir: Option, pub ignore: Vec, pub import_map_path: Option, pub inspect: Option, @@ -614,11 +615,23 @@ fn test_parse(flags: &mut Flags, matches: &clap::ArgMatches) { let allow_none = matches.is_present("allow-none"); let quiet = matches.is_present("quiet"); let filter = matches.value_of("filter").map(String::from); - let coverage = matches.is_present("coverage"); - if coverage { - flags.coverage = true; - } + flags.coverage_dir = if matches.is_present("coverage") { + if let Some(coverage_dir) = matches.value_of("coverage") { + Some(coverage_dir.to_string()) + } else { + Some( + TempDir::new() + .unwrap() + .into_path() + .to_str() + .unwrap() + .to_string(), + ) + } + } else { + None + }; if matches.is_present("script_arg") { let script_arg: Vec = matches @@ -1282,7 +1295,10 @@ fn test_subcommand<'a, 'b>() -> App<'a, 'b> { .arg( Arg::with_name("coverage") .long("coverage") - .takes_value(false) + .min_values(0) + .max_values(1) + .require_equals(true) + .takes_value(true) .requires("unstable") .conflicts_with("inspect") .conflicts_with("inspect-brk") @@ -3050,7 +3066,7 @@ mod tests { #[test] fn test_with_flags() { #[rustfmt::skip] - let r = flags_from_vec_safe(svec!["deno", "test", "--unstable", "--no-run", "--filter", "- foo", "--coverage", "--allow-net", "--allow-none", "dir1/", "dir2/", "--", "arg1", "arg2"]); + let r = flags_from_vec_safe(svec!["deno", "test", "--unstable", "--no-run", "--filter", "- foo", "--coverage=cov", "--allow-net", "--allow-none", "dir1/", "dir2/", "--", "arg1", "arg2"]); assert_eq!( r.unwrap(), Flags { @@ -3063,7 +3079,7 @@ mod tests { include: Some(svec!["dir1/", "dir2/"]), }, unstable: true, - coverage: true, + coverage_dir: Some("cov".to_string()), allow_net: true, argv: svec!["arg1", "arg2"], ..Flags::default() diff --git a/cli/main.rs b/cli/main.rs index b6b6b295b8b245..cd682498e859a9 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -116,7 +116,7 @@ fn create_web_worker_callback( }); let attach_inspector = program_state.maybe_inspector_server.is_some() - || program_state.flags.coverage; + || program_state.coverage_dir.is_some(); let maybe_inspector_server = program_state.maybe_inspector_server.clone(); let module_loader = CliModuleLoader::new_for_worker(program_state.clone()); @@ -192,7 +192,7 @@ pub fn create_main_worker( let attach_inspector = program_state.maybe_inspector_server.is_some() || program_state.flags.repl - || program_state.flags.coverage; + || program_state.coverage_dir.is_some(); let maybe_inspector_server = program_state.maybe_inspector_server.clone(); let should_break_on_first_statement = program_state.flags.inspect_brk.is_some(); @@ -1018,16 +1018,23 @@ async fn test_command( let mut worker = create_main_worker(&program_state, main_module.clone(), permissions); - let mut maybe_coverage_collector = if flags.coverage { - let session = worker.create_inspector_session(); - let mut coverage_collector = - tools::coverage::CoverageCollector::new(session); - coverage_collector.start_collecting().await?; + if let Some(ref coverage_dir) = flags.coverage_dir { + env::set_var("DENO_UNSTABLE_COVERAGE_DIR", coverage_dir); + } - Some(coverage_collector) - } else { - None - }; + let mut maybe_coverage_collector = + if let Some(ref coverage_dir) = program_state.coverage_dir { + let session = worker.create_inspector_session(); + + let coverage_dir = PathBuf::from(coverage_dir); + let mut coverage_collector = + tools::coverage::CoverageCollector::new(coverage_dir, session); + coverage_collector.start_collecting().await?; + + Some(coverage_collector) + } else { + None + }; let execute_result = worker.execute_module(&main_module).await; execute_result?; @@ -1037,19 +1044,19 @@ async fn test_command( worker.run_event_loop().await?; if let Some(coverage_collector) = maybe_coverage_collector.as_mut() { - let coverages = coverage_collector.collect().await?; coverage_collector.stop_collecting().await?; - let filtered_coverages = tools::coverage::filter_script_coverages( - coverages, - main_module.as_url().clone(), - test_modules, - ); - - let mut coverage_reporter = - tools::coverage::PrettyCoverageReporter::new(quiet); - for coverage in filtered_coverages { - coverage_reporter.visit_coverage(&coverage); + // TODO(caspervonb) extract reporting into it's own subcommand. + // For now, we'll only report for the command that passed --coverage as a flag. + if flags.coverage_dir.is_some() { + let mut exclude = test_modules.clone(); + let main_module_url = main_module.as_url().to_owned(); + exclude.push(main_module_url); + tools::coverage::report_coverages( + &coverage_collector.dir, + quiet, + exclude, + )?; } } diff --git a/cli/program_state.rs b/cli/program_state.rs index 008244b5f6c666..afae8c12558932 100644 --- a/cli/program_state.rs +++ b/cli/program_state.rs @@ -45,6 +45,7 @@ pub struct ProgramState { /// Flags parsed from `argv` contents. pub flags: flags::Flags, pub dir: deno_dir::DenoDir, + pub coverage_dir: Option, pub file_fetcher: FileFetcher, pub modules: Arc>>>, @@ -105,8 +106,14 @@ impl ProgramState { None => None, }; + let coverage_dir = flags + .coverage_dir + .clone() + .or_else(|| env::var("DENO_UNSTABLE_COVERAGE_DIR").ok()); + let program_state = ProgramState { dir, + coverage_dir, flags, file_fetcher, modules: Default::default(), diff --git a/cli/tests/integration_tests.rs b/cli/tests/integration_tests.rs index 302e808faf6128..5696f1f5bc7445 100644 --- a/cli/tests/integration_tests.rs +++ b/cli/tests/integration_tests.rs @@ -3310,6 +3310,18 @@ itest!(deno_test_coverage { exit_code: 0, }); +itest!(deno_test_coverage_explicit { + args: "test --coverage=.test_coverage --unstable test_coverage.ts", + output: "test_coverage.out", + exit_code: 0, +}); + +itest!(deno_test_run_test_coverage { + args: "test --allow-all --coverage --unstable test_run_test_coverage.ts", + output: "test_run_test_coverage.out", + exit_code: 0, +}); + itest!(deno_lint { args: "lint --unstable lint/file1.js lint/file2.ts lint/ignored_file.ts", output: "lint/expected.out", diff --git a/cli/tests/test_coverage.out b/cli/tests/test_coverage.out index a4b37e4de48cf4..b8423e7fd73a25 100644 --- a/cli/tests/test_coverage.out +++ b/cli/tests/test_coverage.out @@ -15,11 +15,11 @@ cover [WILDCARD]/cli/tests/subdir/mod1.ts ... 35.714% (5/14) 11 | export function throwsError() { 12 | throw Error("exception from mod1"); 13 | } -cover [WILDCARD]/cli/tests/subdir/subdir2/mod2.ts ... 62.500% (5/8) - 5 | export function printHello2() { - 6 | printHello(); - 7 | } cover [WILDCARD]/cli/tests/subdir/print_hello.ts ... 25.000% (1/4) 1 | export function printHello() { 2 | console.log("Hello"); 3 | } +cover [WILDCARD]/cli/tests/subdir/subdir2/mod2.ts ... 62.500% (5/8) + 5 | export function printHello2() { + 6 | printHello(); + 7 | } diff --git a/cli/tests/test_run_test_coverage.out b/cli/tests/test_run_test_coverage.out new file mode 100644 index 00000000000000..bcaae01a12f33d --- /dev/null +++ b/cli/tests/test_run_test_coverage.out @@ -0,0 +1,32 @@ +Check [WILDCARD]/$deno$test.ts +running 1 tests +test spawn test ... Check [WILDCARD]/$deno$test.ts +running 1 tests +test returnsFooSuccess ... ok ([WILDCARD]) + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out ([WILDCARD]) + +ok ([WILDCARD]) + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out ([WILDCARD]) + +cover [WILDCARD]/subdir/mod1.ts ... 35.714% (5/14) + 2 | export function returnsHi() { + 3 | return "Hi"; + 4 | } +-----|----- + 8 | export function printHello3() { + 9 | printHello2(); + 10 | } + 11 | export function throwsError() { + 12 | throw Error("exception from mod1"); + 13 | } +cover [WILDCARD]/subdir/print_hello.ts ... 25.000% (1/4) + 1 | export function printHello() { + 2 | console.log("Hello"); + 3 | } +cover [WILDCARD]/subdir/subdir2/mod2.ts ... 62.500% (5/8) + 5 | export function printHello2() { + 6 | printHello(); + 7 | } +cover [WILDCARD]/test_coverage.ts ... 100.000% (5/5) diff --git a/cli/tests/test_run_test_coverage.ts b/cli/tests/test_run_test_coverage.ts new file mode 100644 index 00000000000000..e3f0e47ce9464b --- /dev/null +++ b/cli/tests/test_run_test_coverage.ts @@ -0,0 +1,14 @@ +Deno.test("spawn test", async function () { + const process = Deno.run({ + cmd: [ + Deno.execPath(), + "test", + "--allow-all", + "--unstable", + "test_coverage.ts", + ], + }); + + await process.status(); + process.close(); +}); diff --git a/cli/tools/coverage.rs b/cli/tools/coverage.rs index 229cb8020f4b15..53385b482b3f00 100644 --- a/cli/tools/coverage.rs +++ b/cli/tools/coverage.rs @@ -7,14 +7,19 @@ use deno_core::serde_json::json; use deno_core::url::Url; use deno_runtime::inspector::InspectorSession; use serde::Deserialize; +use serde::Serialize; +use std::fs; +use std::path::PathBuf; +use uuid::Uuid; pub struct CoverageCollector { + pub dir: PathBuf, session: Box, } impl CoverageCollector { - pub fn new(session: Box) -> Self { - Self { session } + pub fn new(dir: PathBuf, session: Box) -> Self { + Self { dir, session } } pub async fn start_collecting(&mut self) -> Result<(), AnyError> { @@ -33,7 +38,7 @@ impl CoverageCollector { Ok(()) } - pub async fn collect(&mut self) -> Result, AnyError> { + pub async fn stop_collecting(&mut self) -> Result<(), AnyError> { let result = self .session .post_message("Profiler.takePreciseCoverage", None) @@ -42,9 +47,11 @@ impl CoverageCollector { let take_coverage_result: TakePreciseCoverageResult = serde_json::from_value(result)?; - let mut coverages: Vec = Vec::new(); - for script_coverage in take_coverage_result.result { - let result = self + fs::create_dir_all(&self.dir)?; + + let script_coverages = take_coverage_result.result; + for script_coverage in script_coverages { + let get_script_source_value = self .session .post_message( "Debugger.getScriptSource", @@ -55,22 +62,28 @@ impl CoverageCollector { .await?; let get_script_source_result: GetScriptSourceResult = - serde_json::from_value(result)?; + serde_json::from_value(get_script_source_value)?; - coverages.push(Coverage { + let script_source = get_script_source_result.script_source.clone(); + + let coverage = Coverage { script_coverage, - script_source: get_script_source_result.script_source, - }) + script_source, + }; + + // TODO(caspervonb) Would be much better to look up the source during the reporting stage + // instead of storing it here. + // Long term, that's what we should be doing. + let filename = format!("{}.json", Uuid::new_v4()); + let json = serde_json::to_string(&coverage)?; + fs::write(self.dir.join(filename), &json)?; } - Ok(coverages) - } - - pub async fn stop_collecting(&mut self) -> Result<(), AnyError> { self .session .post_message("Profiler.stopPreciseCoverage", None) .await?; + self.session.post_message("Profiler.disable", None).await?; self.session.post_message("Debugger.disable", None).await?; @@ -78,7 +91,9 @@ impl CoverageCollector { } } -#[derive(Debug, Deserialize)] +// TODO(caspervonb) all of these structs can and should be made private, possibly moved to +// inspector::protocol. +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct CoverageRange { pub start_offset: usize, @@ -86,7 +101,7 @@ pub struct CoverageRange { pub count: usize, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct FunctionCoverage { pub function_name: String, @@ -94,7 +109,7 @@ pub struct FunctionCoverage { pub is_block_coverage: bool, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ScriptCoverage { pub script_id: String, @@ -102,7 +117,7 @@ pub struct ScriptCoverage { pub functions: Vec, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Coverage { pub script_coverage: ScriptCoverage, @@ -132,8 +147,12 @@ impl PrettyCoverageReporter { PrettyCoverageReporter { quiet } } - pub fn visit_coverage(&mut self, coverage: &Coverage) { - let lines = coverage.script_source.lines().collect::>(); + pub fn visit_coverage( + &mut self, + script_coverage: &ScriptCoverage, + script_source: &str, + ) { + let lines = script_source.lines().collect::>(); let mut covered_lines: Vec = Vec::new(); let mut uncovered_lines: Vec = Vec::new(); @@ -143,7 +162,7 @@ impl PrettyCoverageReporter { let line_end_offset = line_start_offset + line.len(); let mut count = 0; - for function in &coverage.script_coverage.functions { + for function in &script_coverage.functions { for range in &function.ranges { if range.start_offset <= line_start_offset && range.end_offset >= line_end_offset @@ -167,7 +186,7 @@ impl PrettyCoverageReporter { } if !self.quiet { - print!("cover {} ... ", coverage.script_coverage.url); + print!("cover {} ... ", script_coverage.url); let line_coverage_ratio = covered_lines.len() as f32 / lines.len() as f32; let line_coverage = format!( @@ -212,10 +231,41 @@ impl PrettyCoverageReporter { } } -pub fn filter_script_coverages( +fn collect_coverages(dir: &PathBuf) -> Result, AnyError> { + let mut coverages: Vec = Vec::new(); + + let entries = fs::read_dir(dir)?; + for entry in entries { + let json = fs::read_to_string(entry.unwrap().path())?; + let coverage: Coverage = serde_json::from_str(&json)?; + + coverages.push(coverage); + } + + // TODO(caspervonb) drain_filter would make this cleaner, its nightly at the moment. + if coverages.len() > 1 { + coverages.sort_by_key(|k| k.script_coverage.url.clone()); + + for i in (1..coverages.len() - 1).rev() { + if coverages[i].script_coverage.url + == coverages[i - 1].script_coverage.url + { + let current = coverages.remove(i); + let previous = &mut coverages[i - 1]; + + for function in current.script_coverage.functions { + previous.script_coverage.functions.push(function); + } + } + } + } + + Ok(coverages) +} + +fn filter_coverages( coverages: Vec, - test_file_url: Url, - test_modules: Vec, + exclude: Vec, ) -> Vec { coverages .into_iter() @@ -225,20 +275,16 @@ pub fn filter_script_coverages( return false; } - if url == test_file_url { - return false; - } - - for test_module_url in &test_modules { - if &url == test_module_url { + for module_url in &exclude { + if &url == module_url { return false; } } if let Ok(path) = url.to_file_path() { - for test_module_url in &test_modules { - if let Ok(test_module_path) = test_module_url.to_file_path() { - if path.starts_with(test_module_path.parent().unwrap()) { + for module_url in &exclude { + if let Ok(module_path) = module_url.to_file_path() { + if path.starts_with(module_path.parent().unwrap()) { return true; } } @@ -250,3 +296,21 @@ pub fn filter_script_coverages( }) .collect::>() } + +pub fn report_coverages( + dir: &PathBuf, + quiet: bool, + exclude: Vec, +) -> Result<(), AnyError> { + let coverages = collect_coverages(dir)?; + let coverages = filter_coverages(coverages, exclude); + + let mut coverage_reporter = PrettyCoverageReporter::new(quiet); + for coverage in coverages { + let script_coverage = coverage.script_coverage; + let script_source = coverage.script_source; + coverage_reporter.visit_coverage(&script_coverage, &script_source); + } + + Ok(()) +} diff --git a/runtime/worker.rs b/runtime/worker.rs index adb525c4c98603..58a35cc950e260 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -87,6 +87,7 @@ impl MainWorker { } else { None }; + let should_break_on_first_statement = inspector.is_some() && options.should_break_on_first_statement; From bd85d0ed420b792eebdd81f88fca503e028c9565 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Mon, 21 Dec 2020 14:44:26 +0100 Subject: [PATCH 093/135] refactor: rewrite lsp to be async (#8727) Co-authored-by: Luca Casonato --- Cargo.lock | 163 ++++-- cli/Cargo.toml | 5 +- cli/lsp/README.md | 17 +- cli/lsp/analysis.rs | 31 +- cli/lsp/capabilities.rs | 26 +- cli/lsp/config.rs | 9 +- cli/lsp/diagnostics.rs | 201 ++++---- cli/lsp/dispatch.rs | 185 ------- cli/lsp/handlers.rs | 304 ----------- cli/lsp/language_server.rs | 981 ++++++++++++++++++++++++++++++++++++ cli/lsp/lsp_extensions.rs | 26 - cli/lsp/memory_cache.rs | 5 - cli/lsp/mod.rs | 469 +---------------- cli/lsp/sources.rs | 10 +- cli/lsp/state.rs | 395 --------------- cli/lsp/text.rs | 3 +- cli/lsp/tsc.rs | 227 +++++---- cli/lsp/utils.rs | 62 --- cli/main.rs | 2 +- cli/tests/lsp_tests.rs | 88 ---- cli/tsc/99_main_compiler.js | 5 +- 21 files changed, 1397 insertions(+), 1817 deletions(-) delete mode 100644 cli/lsp/dispatch.rs delete mode 100644 cli/lsp/handlers.rs create mode 100644 cli/lsp/language_server.rs delete mode 100644 cli/lsp/lsp_extensions.rs delete mode 100644 cli/lsp/state.rs delete mode 100644 cli/tests/lsp_tests.rs diff --git a/Cargo.lock b/Cargo.lock index 71ab5aec2d1c4f..c760e19a852a16 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -110,6 +110,17 @@ dependencies = [ "pin-project-lite 0.1.7", ] +[[package]] +name = "async-trait" +version = "0.1.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3a45e77e34375a7923b1e8febb049bb011f064714a8e17a1a616fef01da13d" +dependencies = [ + "proc-macro2 1.0.24", + "quote 1.0.7", + "syn 1.0.48", +] + [[package]] name = "atty" version = "0.2.14" @@ -121,6 +132,18 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "auto_impl" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42cbf586c80ada5e5ccdecae80d3ef0854f224e2dd74435f8d87e6831b8d0a38" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.24", + "quote 1.0.7", + "syn 1.0.48", +] + [[package]] name = "autocfg" version = "0.1.7" @@ -366,20 +389,10 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" dependencies = [ - "crossbeam-utils 0.7.2", + "crossbeam-utils", "maybe-uninit", ] -[[package]] -name = "crossbeam-channel" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" -dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils 0.8.1", -] - [[package]] name = "crossbeam-utils" version = "0.7.2" @@ -391,17 +404,6 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "crossbeam-utils" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d" -dependencies = [ - "autocfg 1.0.1", - "cfg-if 1.0.0", - "lazy_static", -] - [[package]] name = "darling" version = "0.10.2" @@ -457,7 +459,6 @@ dependencies = [ "byteorder", "chrono", "clap", - "crossbeam-channel 0.5.0", "deno_core", "deno_doc", "deno_fetch", @@ -477,8 +478,7 @@ dependencies = [ "lazy_static", "libc", "log", - "lsp-server", - "lsp-types", + "lspower", "nix", "notify", "os_pipe", @@ -500,6 +500,7 @@ dependencies = [ "tokio 0.2.22", "tokio-rustls", "tokio-tungstenite", + "tower-test", "uuid", "walkdir", "winapi 0.3.9", @@ -1100,6 +1101,15 @@ dependencies = [ "http", ] +[[package]] +name = "heck" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "hermit-abi" version = "0.1.15" @@ -1377,29 +1387,51 @@ dependencies = [ ] [[package]] -name = "lsp-server" -version = "0.5.0" +name = "lsp-types" +version = "0.85.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69b18dfe0e4a380b872aa79d8e0ee6c3d7a9682466e84b83ad807c88b3545f79" +checksum = "857650f3e83fb62f89d15410414e0ed7d0735445020da398d37f65d20a5423b9" dependencies = [ - "crossbeam-channel 0.5.0", - "log", + "base64 0.12.3", + "bitflags", "serde", "serde_json", + "serde_repr", + "url", ] [[package]] -name = "lsp-types" -version = "0.84.0" +name = "lspower" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b95be71fe205e44de754185bcf86447b65813ce1ceb298f8d3793ade5fff08d" +checksum = "64106b17ca8f6f73cc21a3d1f39684ff65293a291aa96026aee85eaae02339a5" dependencies = [ - "base64 0.12.3", - "bitflags", + "async-trait", + "auto_impl", + "bytes 0.5.6", + "dashmap", + "futures", + "log", + "lsp-types", + "lspower-macros", + "nom", "serde", "serde_json", - "serde_repr", - "url", + "tokio 0.2.22", + "tokio-util", + "tower-service", +] + +[[package]] +name = "lspower-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10b77a3b4fcd1a014a7a7a1043a5c3646068abfc75b46a9f2c4ab813d53f7c3c" +dependencies = [ + "heck", + "proc-macro2 1.0.24", + "quote 1.0.7", + "syn 1.0.48", ] [[package]] @@ -1601,7 +1633,7 @@ checksum = "77d03607cf88b4b160ba0e9ed425fff3cee3b55ac813f0c685b3a3772da37d0e" dependencies = [ "anymap", "bitflags", - "crossbeam-channel 0.4.4", + "crossbeam-channel", "filetime", "fsevent", "fsevent-sys", @@ -1876,6 +1908,30 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.24", + "quote 1.0.7", + "syn 1.0.48", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.24", + "quote 1.0.7", + "version_check", +] + [[package]] name = "proc-macro-hack" version = "0.5.19" @@ -3036,6 +3092,17 @@ dependencies = [ "webpki", ] +[[package]] +name = "tokio-test" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed0049c119b6d505c4447f5c64873636c7af6c75ab0d45fd9f618d82acb8016d" +dependencies = [ + "bytes 0.5.6", + "futures-core", + "tokio 0.2.22", +] + [[package]] name = "tokio-tungstenite" version = "0.11.0" @@ -3072,12 +3139,32 @@ dependencies = [ "serde", ] +[[package]] +name = "tower-layer" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35d656f2638b288b33495d1053ea74c40dc05ec0b92084dd71ca5566c4ed1dc" + [[package]] name = "tower-service" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" +[[package]] +name = "tower-test" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ba4bbc2c1e4a8543c30d4c13a4c8314ed72d6e07581910f665aa13fde0153c8" +dependencies = [ + "futures-util", + "pin-project 0.4.23", + "tokio 0.2.22", + "tokio-test", + "tower-layer", + "tower-service", +] + [[package]] name = "tracing" version = "0.1.19" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 80eeceef81007b..65f455215ffa18 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -40,7 +40,6 @@ atty = "0.2.14" base64 = "0.12.3" byteorder = "1.3.4" clap = "2.33.3" -crossbeam-channel = "0.5.0" dissimilar = "1.0.2" dprint-plugin-typescript = "0.35.1" encoding_rs = "0.8.24" @@ -52,8 +51,7 @@ jsonc-parser = "0.14.0" lazy_static = "1.4.0" libc = "0.2.77" log = "0.4.11" -lsp-server = "0.5.0" -lsp-types = { version = "0.84.0", features = ["proposed"] } +lspower = "0.1.0" notify = "5.0.0-pre.3" percent-encoding = "2.1.0" regex = "1.3.9" @@ -87,6 +85,7 @@ chrono = "0.4.15" os_pipe = "0.9.2" test_util = { path = "../test_util" } tokio-tungstenite = "0.11.0" +tower-test = "0.3.0" [target.'cfg(unix)'.dev-dependencies] exec = "0.3.1" # Used in test_raw_tty diff --git a/cli/lsp/README.md b/cli/lsp/README.md index dcc9532733f4bf..87a662fc3050cc 100644 --- a/cli/lsp/README.md +++ b/cli/lsp/README.md @@ -6,18 +6,11 @@ which is specifically tailored to provide a _Deno_ view of code. It is integrated into the command line and can be started via the `lsp` sub-command. > :warning: The Language Server is highly experimental and far from feature -> complete. - -This document gives an overview of the structure of the language server. - -## Acknowledgement - -The structure of the language server was heavily influenced and adapted from -[`rust-analyzer`](https://rust-analyzer.github.io/). +> complete. This document gives an overview of the structure of the language +> server. ## Structure -When the language server is started, a `ServerState` instance is created which -holds all the state of the language server, as well as provides the -infrastructure for receiving and sending notifications and requests from a -language server client. +When the language server is started, a `LanguageServer` instance is created +which holds all of the state of the language server. It also defines all of the +methods that the client calls via the Language Server RPC protocol. diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 95e21ed9a3f7fa..7cf6aca371fae6 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -11,12 +11,11 @@ use crate::tools::lint::create_linter; use deno_core::error::AnyError; use deno_core::ModuleSpecifier; use deno_lint::rules; -use lsp_types::Position; -use lsp_types::Range; +use lspower::lsp_types; +use lspower::lsp_types::Position; +use lspower::lsp_types::Range; use std::collections::HashMap; use std::rc::Rc; -use std::sync::Arc; -use std::sync::RwLock; /// Category of self-generated diagnostic messages (those not coming from) /// TypeScript. @@ -114,13 +113,11 @@ pub enum ResolvedImport { pub fn resolve_import( specifier: &str, referrer: &ModuleSpecifier, - maybe_import_map: Option>>, + maybe_import_map: &Option, ) -> ResolvedImport { let maybe_mapped = if let Some(import_map) = maybe_import_map { - if let Ok(maybe_specifier) = import_map - .read() - .unwrap() - .resolve(specifier, referrer.as_str()) + if let Ok(maybe_specifier) = + import_map.resolve(specifier, referrer.as_str()) { maybe_specifier } else { @@ -162,7 +159,7 @@ pub fn analyze_dependencies( specifier: &ModuleSpecifier, source: &str, media_type: &MediaType, - maybe_import_map: Option>>, + maybe_import_map: &Option, ) -> Option<(HashMap, Option)> { let specifier_str = specifier.to_string(); let source_map = Rc::new(swc_common::SourceMap::default()); @@ -179,12 +176,12 @@ pub fn analyze_dependencies( TypeScriptReference::Path(import) => { let dep = dependencies.entry(import.clone()).or_default(); let resolved_import = - resolve_import(&import, specifier, maybe_import_map.clone()); + resolve_import(&import, specifier, maybe_import_map); dep.maybe_code = Some(resolved_import); } TypeScriptReference::Types(import) => { let resolved_import = - resolve_import(&import, specifier, maybe_import_map.clone()); + resolve_import(&import, specifier, maybe_import_map); if media_type == &MediaType::JavaScript || media_type == &MediaType::JSX { @@ -204,17 +201,13 @@ pub fn analyze_dependencies( desc.kind != swc_ecmascript::dep_graph::DependencyKind::Require }) { let resolved_import = - resolve_import(&desc.specifier, specifier, maybe_import_map.clone()); + resolve_import(&desc.specifier, specifier, maybe_import_map); // Check for `@deno-types` pragmas that effect the import let maybe_resolved_type_import = if let Some(comment) = desc.leading_comments.last() { if let Some(deno_types) = parse_deno_types(&comment.text).as_ref() { - Some(resolve_import( - deno_types, - specifier, - maybe_import_map.clone(), - )) + Some(resolve_import(deno_types, specifier, maybe_import_map)) } else { None } @@ -291,7 +284,7 @@ mod tests { import * as React from "https://cdn.skypack.dev/react"; "#; let actual = - analyze_dependencies(&specifier, source, &MediaType::TypeScript, None); + analyze_dependencies(&specifier, source, &MediaType::TypeScript, &None); assert!(actual.is_some()); let (actual, maybe_type) = actual.unwrap(); assert!(maybe_type.is_none()); diff --git a/cli/lsp/capabilities.rs b/cli/lsp/capabilities.rs index 954baaf51b5f09..e43e6a7e278006 100644 --- a/cli/lsp/capabilities.rs +++ b/cli/lsp/capabilities.rs @@ -5,16 +5,16 @@ ///! language server, which helps determine what messages are sent from the ///! client. ///! -use lsp_types::ClientCapabilities; -use lsp_types::CompletionOptions; -use lsp_types::HoverProviderCapability; -use lsp_types::OneOf; -use lsp_types::SaveOptions; -use lsp_types::ServerCapabilities; -use lsp_types::TextDocumentSyncCapability; -use lsp_types::TextDocumentSyncKind; -use lsp_types::TextDocumentSyncOptions; -use lsp_types::WorkDoneProgressOptions; +use lspower::lsp_types::ClientCapabilities; +use lspower::lsp_types::CompletionOptions; +use lspower::lsp_types::HoverProviderCapability; +use lspower::lsp_types::OneOf; +use lspower::lsp_types::SaveOptions; +use lspower::lsp_types::ServerCapabilities; +use lspower::lsp_types::TextDocumentSyncCapability; +use lspower::lsp_types::TextDocumentSyncKind; +use lspower::lsp_types::TextDocumentSyncOptions; +use lspower::lsp_types::WorkDoneProgressOptions; pub fn server_capabilities( _client_capabilities: &ClientCapabilities, @@ -61,16 +61,16 @@ pub fn server_capabilities( document_range_formatting_provider: None, document_on_type_formatting_provider: None, selection_range_provider: None, - semantic_highlighting: None, folding_range_provider: None, rename_provider: None, document_link_provider: None, color_provider: None, execute_command_provider: None, - workspace: None, call_hierarchy_provider: None, - semantic_tokens_provider: None, on_type_rename_provider: None, + semantic_highlighting: None, + semantic_tokens_provider: None, + workspace: None, experimental: None, } } diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index fc3f030c9c4f06..b689275ef9b3fc 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -1,10 +1,12 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use deno_core::error::AnyError; use deno_core::serde::Deserialize; use deno_core::serde_json; use deno_core::serde_json::Value; use deno_core::url::Url; +use lspower::jsonrpc::Error as LSPError; +use lspower::jsonrpc::Result as LSPResult; +use lspower::lsp_types; #[derive(Debug, Clone, Default)] pub struct ClientCapabilities { @@ -29,8 +31,9 @@ pub struct Config { } impl Config { - pub fn update(&mut self, value: Value) -> Result<(), AnyError> { - let settings: WorkspaceSettings = serde_json::from_value(value)?; + pub fn update(&mut self, value: Value) -> LSPResult<()> { + let settings: WorkspaceSettings = serde_json::from_value(value) + .map_err(|err| LSPError::invalid_params(err.to_string()))?; self.settings = settings; Ok(()) } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index a7f027c1bbd200..1d0a1fac99ceea 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -2,8 +2,8 @@ use super::analysis::get_lint_references; use super::analysis::references_to_diagnostics; +use super::language_server::StateSnapshot; use super::memory_cache::FileId; -use super::state::ServerStateSnapshot; use super::tsc; use crate::diagnostics; @@ -12,52 +12,11 @@ use crate::media_type::MediaType; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::Value; -use deno_core::url::Url; -use deno_core::JsRuntime; +use lspower::lsp_types; use std::collections::HashMap; use std::collections::HashSet; use std::mem; -impl<'a> From<&'a diagnostics::DiagnosticCategory> - for lsp_types::DiagnosticSeverity -{ - fn from(category: &'a diagnostics::DiagnosticCategory) -> Self { - match category { - diagnostics::DiagnosticCategory::Error => { - lsp_types::DiagnosticSeverity::Error - } - diagnostics::DiagnosticCategory::Warning => { - lsp_types::DiagnosticSeverity::Warning - } - diagnostics::DiagnosticCategory::Suggestion => { - lsp_types::DiagnosticSeverity::Hint - } - diagnostics::DiagnosticCategory::Message => { - lsp_types::DiagnosticSeverity::Information - } - } - } -} - -impl<'a> From<&'a diagnostics::Position> for lsp_types::Position { - fn from(pos: &'a diagnostics::Position) -> Self { - Self { - line: pos.line as u32, - character: pos.character as u32, - } - } -} - -fn to_lsp_range( - start: &diagnostics::Position, - end: &diagnostics::Position, -) -> lsp_types::Range { - lsp_types::Range { - start: start.into(), - end: end.into(), - } -} - #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum DiagnosticSource { Lint, @@ -108,41 +67,84 @@ impl DiagnosticCollection { pub type DiagnosticVec = Vec<(FileId, Option, Vec)>; -pub fn generate_linting_diagnostics( - state: &ServerStateSnapshot, +pub async fn generate_lint_diagnostics( + state_snapshot: StateSnapshot, + diagnostic_collection: DiagnosticCollection, ) -> DiagnosticVec { - if !state.config.settings.lint { - return Vec::new(); - } - let mut diagnostics = Vec::new(); - let file_cache = state.file_cache.read().unwrap(); - for (specifier, doc_data) in state.doc_data.iter() { - let file_id = file_cache.lookup(specifier).unwrap(); - let version = doc_data.version; - let current_version = state.diagnostics.get_version(&file_id); - if version != current_version { - let media_type = MediaType::from(specifier); - if let Ok(source_code) = file_cache.get_contents(file_id) { - if let Ok(references) = - get_lint_references(specifier, &media_type, &source_code) - { - if !references.is_empty() { - diagnostics.push(( - file_id, - version, - references_to_diagnostics(references), - )); - } else { - diagnostics.push((file_id, version, Vec::new())); + tokio::task::spawn_blocking(move || { + let mut diagnostic_list = Vec::new(); + + let file_cache = state_snapshot.file_cache.read().unwrap(); + for (specifier, doc_data) in state_snapshot.doc_data.iter() { + let file_id = file_cache.lookup(specifier).unwrap(); + let version = doc_data.version; + let current_version = diagnostic_collection.get_version(&file_id); + if version != current_version { + let media_type = MediaType::from(specifier); + if let Ok(source_code) = file_cache.get_contents(file_id) { + if let Ok(references) = + get_lint_references(specifier, &media_type, &source_code) + { + if !references.is_empty() { + diagnostic_list.push(( + file_id, + version, + references_to_diagnostics(references), + )); + } else { + diagnostic_list.push((file_id, version, Vec::new())); + } } + } else { + error!("Missing file contents for: {}", specifier); } - } else { - error!("Missing file contents for: {}", specifier); } } + + diagnostic_list + }) + .await + .unwrap() +} + +impl<'a> From<&'a diagnostics::DiagnosticCategory> + for lsp_types::DiagnosticSeverity +{ + fn from(category: &'a diagnostics::DiagnosticCategory) -> Self { + match category { + diagnostics::DiagnosticCategory::Error => { + lsp_types::DiagnosticSeverity::Error + } + diagnostics::DiagnosticCategory::Warning => { + lsp_types::DiagnosticSeverity::Warning + } + diagnostics::DiagnosticCategory::Suggestion => { + lsp_types::DiagnosticSeverity::Hint + } + diagnostics::DiagnosticCategory::Message => { + lsp_types::DiagnosticSeverity::Information + } + } + } +} + +impl<'a> From<&'a diagnostics::Position> for lsp_types::Position { + fn from(pos: &'a diagnostics::Position) -> Self { + Self { + line: pos.line as u32, + character: pos.character as u32, + } } +} - diagnostics +fn to_lsp_range( + start: &diagnostics::Position, + end: &diagnostics::Position, +) -> lsp_types::Range { + lsp_types::Range { + start: start.into(), + end: end.into(), + } } type TsDiagnostics = Vec; @@ -168,7 +170,7 @@ fn to_lsp_related_information( if let (Some(source), Some(start), Some(end)) = (&ri.source, &ri.start, &ri.end) { - let uri = Url::parse(&source).unwrap(); + let uri = lsp_types::Url::parse(&source).unwrap(); Some(lsp_types::DiagnosticRelatedInformation { location: lsp_types::Location { uri, @@ -223,43 +225,36 @@ fn ts_json_to_diagnostics( ) } -pub fn generate_ts_diagnostics( - state: &ServerStateSnapshot, - runtime: &mut JsRuntime, +pub async fn generate_ts_diagnostics( + ts_server: &tsc::TsServer, + diagnostic_collection: &DiagnosticCollection, + state_snapshot: StateSnapshot, ) -> Result { - if !state.config.settings.enable { - return Ok(Vec::new()); - } let mut diagnostics = Vec::new(); - let file_cache = state.file_cache.read().unwrap(); - for (specifier, doc_data) in state.doc_data.iter() { - let file_id = file_cache.lookup(specifier).unwrap(); + let state_snapshot_ = state_snapshot.clone(); + for (specifier, doc_data) in state_snapshot_.doc_data.iter() { + let file_id = { + // TODO(lucacasonato): this is highly inefficient + let file_cache = state_snapshot_.file_cache.read().unwrap(); + file_cache.lookup(specifier).unwrap() + }; let version = doc_data.version; - let current_version = state.diagnostics.get_version(&file_id); + let current_version = diagnostic_collection.get_version(&file_id); if version != current_version { // TODO(@kitsonk): consider refactoring to get all diagnostics in one shot // for a file. - let request_semantic_diagnostics = - tsc::RequestMethod::GetSemanticDiagnostics(specifier.clone()); - let mut ts_diagnostics = ts_json_to_diagnostics(tsc::request( - runtime, - state, - request_semantic_diagnostics, - )?)?; - let request_suggestion_diagnostics = - tsc::RequestMethod::GetSuggestionDiagnostics(specifier.clone()); - ts_diagnostics.append(&mut ts_json_to_diagnostics(tsc::request( - runtime, - state, - request_suggestion_diagnostics, - )?)?); - let request_syntactic_diagnostics = - tsc::RequestMethod::GetSyntacticDiagnostics(specifier.clone()); - ts_diagnostics.append(&mut ts_json_to_diagnostics(tsc::request( - runtime, - state, - request_syntactic_diagnostics, - )?)?); + let req = tsc::RequestMethod::GetSemanticDiagnostics(specifier.clone()); + let mut ts_diagnostics = ts_json_to_diagnostics( + ts_server.request(state_snapshot.clone(), req).await?, + )?; + let req = tsc::RequestMethod::GetSuggestionDiagnostics(specifier.clone()); + ts_diagnostics.append(&mut ts_json_to_diagnostics( + ts_server.request(state_snapshot.clone(), req).await?, + )?); + let req = tsc::RequestMethod::GetSyntacticDiagnostics(specifier.clone()); + ts_diagnostics.append(&mut ts_json_to_diagnostics( + ts_server.request(state_snapshot.clone(), req).await?, + )?); diagnostics.push((file_id, version, ts_diagnostics)); } } diff --git a/cli/lsp/dispatch.rs b/cli/lsp/dispatch.rs deleted file mode 100644 index 774bdcef9bbb09..00000000000000 --- a/cli/lsp/dispatch.rs +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -use super::state::ServerState; -use super::state::ServerStateSnapshot; -use super::state::Task; -use super::utils::from_json; -use super::utils::is_canceled; - -use deno_core::error::custom_error; -use deno_core::error::AnyError; -use lsp_server::ErrorCode; -use lsp_server::Notification; -use lsp_server::Request; -use lsp_server::RequestId; -use lsp_server::Response; -use serde::de::DeserializeOwned; -use serde::Serialize; -use std::fmt; -use std::panic; - -pub struct NotificationDispatcher<'a> { - pub notification: Option, - pub server_state: &'a mut ServerState, -} - -impl<'a> NotificationDispatcher<'a> { - pub fn on( - &mut self, - f: fn(&mut ServerState, N::Params) -> Result<(), AnyError>, - ) -> Result<&mut Self, AnyError> - where - N: lsp_types::notification::Notification + 'static, - N::Params: DeserializeOwned + Send + 'static, - { - let notification = match self.notification.take() { - Some(it) => it, - None => return Ok(self), - }; - let params = match notification.extract::(N::METHOD) { - Ok(it) => it, - Err(notification) => { - self.notification = Some(notification); - return Ok(self); - } - }; - f(self.server_state, params)?; - Ok(self) - } - - pub fn finish(&mut self) { - if let Some(notification) = &self.notification { - if !notification.method.starts_with("$/") { - error!("unhandled notification: {:?}", notification); - } - } - } -} - -fn result_to_response( - id: RequestId, - result: Result, -) -> Response -where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + 'static, - R::Result: Serialize + 'static, -{ - match result { - Ok(response) => Response::new_ok(id, &response), - Err(err) => { - if is_canceled(&*err) { - Response::new_err( - id, - ErrorCode::ContentModified as i32, - "content modified".to_string(), - ) - } else { - Response::new_err(id, ErrorCode::InternalError as i32, err.to_string()) - } - } - } -} - -pub struct RequestDispatcher<'a> { - pub request: Option, - pub server_state: &'a mut ServerState, -} - -impl<'a> RequestDispatcher<'a> { - pub fn finish(&mut self) { - if let Some(request) = self.request.take() { - error!("unknown request: {:?}", request); - let response = Response::new_err( - request.id, - ErrorCode::MethodNotFound as i32, - "unknown request".to_string(), - ); - self.server_state.respond(response); - } - } - - /// Handle a request which will respond to the LSP client asynchronously via - /// a spawned thread. - pub fn on( - &mut self, - f: fn(ServerStateSnapshot, R::Params) -> Result, - ) -> &mut Self - where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + Send + fmt::Debug + 'static, - R::Result: Serialize + 'static, - { - let (id, params) = match self.parse::() { - Some(it) => it, - None => return self, - }; - self.server_state.spawn({ - let state = self.server_state.snapshot(); - move || { - let result = f(state, params); - Task::Response(result_to_response::(id, result)) - } - }); - - self - } - - /// Handle a request which will respond synchronously, returning a result if - /// the request cannot be handled or has issues. - pub fn on_sync( - &mut self, - f: fn(&mut ServerState, R::Params) -> Result, - ) -> Result<&mut Self, AnyError> - where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + panic::UnwindSafe + fmt::Debug + 'static, - R::Result: Serialize + 'static, - { - let (id, params) = match self.parse::() { - Some(it) => it, - None => return Ok(self), - }; - let state = panic::AssertUnwindSafe(&mut *self.server_state); - - let response = panic::catch_unwind(move || { - let result = f(state.0, params); - result_to_response::(id, result) - }) - .map_err(|_err| { - custom_error( - "SyncTaskPanic", - format!("sync task {:?} panicked", R::METHOD), - ) - })?; - self.server_state.respond(response); - Ok(self) - } - - fn parse(&mut self) -> Option<(RequestId, R::Params)> - where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + 'static, - { - let request = match &self.request { - Some(request) if request.method == R::METHOD => { - self.request.take().unwrap() - } - _ => return None, - }; - - let response = from_json(R::METHOD, request.params); - match response { - Ok(params) => Some((request.id, params)), - Err(err) => { - let response = Response::new_err( - request.id, - ErrorCode::InvalidParams as i32, - err.to_string(), - ); - self.server_state.respond(response); - None - } - } - } -} diff --git a/cli/lsp/handlers.rs b/cli/lsp/handlers.rs deleted file mode 100644 index 69cdd8041f1912..00000000000000 --- a/cli/lsp/handlers.rs +++ /dev/null @@ -1,304 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -use super::lsp_extensions; -use super::state::ServerState; -use super::state::ServerStateSnapshot; -use super::text; -use super::tsc; -use super::utils; - -use deno_core::error::custom_error; -use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::ModuleSpecifier; -use dprint_plugin_typescript as dprint; -use lsp_types::CompletionParams; -use lsp_types::CompletionResponse; -use lsp_types::DocumentFormattingParams; -use lsp_types::DocumentHighlight; -use lsp_types::DocumentHighlightParams; -use lsp_types::GotoDefinitionParams; -use lsp_types::GotoDefinitionResponse; -use lsp_types::Hover; -use lsp_types::HoverParams; -use lsp_types::Location; -use lsp_types::ReferenceParams; -use lsp_types::TextEdit; -use std::path::PathBuf; - -fn get_line_index( - state: &mut ServerState, - specifier: &ModuleSpecifier, -) -> Result, AnyError> { - let line_index = if specifier.as_url().scheme() == "asset" { - let server_state = state.snapshot(); - if let Some(source) = - tsc::get_asset(specifier, &mut state.ts_runtime, &server_state)? - { - text::index_lines(&source) - } else { - return Err(custom_error( - "NotFound", - format!("asset source missing: {}", specifier), - )); - } - } else { - let file_cache = state.file_cache.read().unwrap(); - if let Some(file_id) = file_cache.lookup(specifier) { - let file_text = file_cache.get_contents(file_id)?; - text::index_lines(&file_text) - } else { - let mut sources = state.sources.write().unwrap(); - if let Some(line_index) = sources.get_line_index(specifier) { - line_index - } else { - return Err(custom_error( - "NotFound", - format!("source for specifier not found: {}", specifier), - )); - } - } - }; - Ok(line_index) -} - -pub fn handle_formatting( - state: ServerStateSnapshot, - params: DocumentFormattingParams, -) -> Result>, AnyError> { - let specifier = utils::normalize_url(params.text_document.uri.clone()); - let file_cache = state.file_cache.read().unwrap(); - let file_id = file_cache.lookup(&specifier).unwrap(); - let file_text = file_cache.get_contents(file_id)?; - - let file_path = if let Ok(file_path) = params.text_document.uri.to_file_path() - { - file_path - } else { - PathBuf::from(params.text_document.uri.path()) - }; - let config = dprint::configuration::ConfigurationBuilder::new() - .deno() - .build(); - - // TODO(@kitsonk) this could be handled better in `cli/tools/fmt.rs` in the - // future. - let new_text = dprint::format_text(&file_path, &file_text, &config) - .map_err(|e| custom_error("FormatError", e))?; - - let text_edits = text::get_edits(&file_text, &new_text); - if text_edits.is_empty() { - Ok(None) - } else { - Ok(Some(text_edits)) - } -} - -pub fn handle_document_highlight( - state: &mut ServerState, - params: DocumentHighlightParams, -) -> Result>, AnyError> { - let specifier = utils::normalize_url( - params.text_document_position_params.text_document.uri, - ); - let line_index = get_line_index(state, &specifier)?; - let server_state = state.snapshot(); - let files_to_search = vec![specifier.clone()]; - let maybe_document_highlights: Option> = - serde_json::from_value(tsc::request( - &mut state.ts_runtime, - &server_state, - tsc::RequestMethod::GetDocumentHighlights(( - specifier, - text::to_char_pos( - &line_index, - params.text_document_position_params.position, - ), - files_to_search, - )), - )?)?; - - if let Some(document_highlights) = maybe_document_highlights { - Ok(Some( - document_highlights - .into_iter() - .map(|dh| dh.to_highlight(&line_index)) - .flatten() - .collect(), - )) - } else { - Ok(None) - } -} - -pub fn handle_goto_definition( - state: &mut ServerState, - params: GotoDefinitionParams, -) -> Result, AnyError> { - let specifier = utils::normalize_url( - params.text_document_position_params.text_document.uri, - ); - let line_index = get_line_index(state, &specifier)?; - let server_state = state.snapshot(); - let maybe_definition: Option = - serde_json::from_value(tsc::request( - &mut state.ts_runtime, - &server_state, - tsc::RequestMethod::GetDefinition(( - specifier, - text::to_char_pos( - &line_index, - params.text_document_position_params.position, - ), - )), - )?)?; - - if let Some(definition) = maybe_definition { - Ok( - definition - .to_definition(&line_index, |s| get_line_index(state, &s).unwrap()), - ) - } else { - Ok(None) - } -} - -pub fn handle_hover( - state: &mut ServerState, - params: HoverParams, -) -> Result, AnyError> { - let specifier = utils::normalize_url( - params.text_document_position_params.text_document.uri, - ); - let line_index = get_line_index(state, &specifier)?; - let server_state = state.snapshot(); - let maybe_quick_info: Option = - serde_json::from_value(tsc::request( - &mut state.ts_runtime, - &server_state, - tsc::RequestMethod::GetQuickInfo(( - specifier, - text::to_char_pos( - &line_index, - params.text_document_position_params.position, - ), - )), - )?)?; - - if let Some(quick_info) = maybe_quick_info { - Ok(Some(quick_info.to_hover(&line_index))) - } else { - Ok(None) - } -} - -pub fn handle_completion( - state: &mut ServerState, - params: CompletionParams, -) -> Result, AnyError> { - let specifier = - utils::normalize_url(params.text_document_position.text_document.uri); - let line_index = get_line_index(state, &specifier)?; - let server_state = state.snapshot(); - let maybe_completion_info: Option = - serde_json::from_value(tsc::request( - &mut state.ts_runtime, - &server_state, - tsc::RequestMethod::GetCompletions(( - specifier, - text::to_char_pos(&line_index, params.text_document_position.position), - tsc::UserPreferences { - // TODO(lucacasonato): enable this. see https://github.com/denoland/deno/pull/8651 - include_completions_with_insert_text: Some(false), - ..Default::default() - }, - )), - )?)?; - - if let Some(completions) = maybe_completion_info { - Ok(Some(completions.into_completion_response(&line_index))) - } else { - Ok(None) - } -} - -pub fn handle_references( - state: &mut ServerState, - params: ReferenceParams, -) -> Result>, AnyError> { - let specifier = - utils::normalize_url(params.text_document_position.text_document.uri); - let line_index = get_line_index(state, &specifier)?; - let server_state = state.snapshot(); - let maybe_references: Option> = - serde_json::from_value(tsc::request( - &mut state.ts_runtime, - &server_state, - tsc::RequestMethod::GetReferences(( - specifier, - text::to_char_pos(&line_index, params.text_document_position.position), - )), - )?)?; - - if let Some(references) = maybe_references { - let mut results = Vec::new(); - for reference in references { - if !params.context.include_declaration && reference.is_definition { - continue; - } - let reference_specifier = - ModuleSpecifier::resolve_url(&reference.file_name).unwrap(); - let line_index = get_line_index(state, &reference_specifier)?; - results.push(reference.to_location(&line_index)); - } - - Ok(Some(results)) - } else { - Ok(None) - } -} - -pub fn handle_virtual_text_document( - state: &mut ServerState, - params: lsp_extensions::VirtualTextDocumentParams, -) -> Result { - let specifier = utils::normalize_url(params.text_document.uri); - let url = specifier.as_url(); - let contents = if url.as_str() == "deno:///status.md" { - let file_cache = state.file_cache.read().unwrap(); - format!( - r#"# Deno Language Server Status - -- Documents in memory: {} - -"#, - file_cache.len() - ) - } else { - match url.scheme() { - "asset" => { - let server_state = state.snapshot(); - if let Some(text) = - tsc::get_asset(&specifier, &mut state.ts_runtime, &server_state)? - { - text - } else { - error!("Missing asset: {}", specifier); - "".to_string() - } - } - _ => { - let mut sources = state.sources.write().unwrap(); - if let Some(text) = sources.get_text(&specifier) { - text - } else { - return Err(custom_error( - "NotFound", - format!("The cached sources was not found: {}", specifier), - )); - } - } - } - }; - Ok(contents) -} diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs new file mode 100644 index 00000000000000..c1e3ac8d5e3d4b --- /dev/null +++ b/cli/lsp/language_server.rs @@ -0,0 +1,981 @@ +// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. + +use deno_core::error::anyhow; +use deno_core::error::AnyError; +use deno_core::serde::Deserialize; +use deno_core::serde::Serialize; +use deno_core::serde_json; +use deno_core::serde_json::json; +use deno_core::serde_json::Value; +use deno_core::ModuleSpecifier; +use dprint_plugin_typescript as dprint; +use lspower::jsonrpc::Error as LSPError; +use lspower::jsonrpc::ErrorCode as LSPErrorCode; +use lspower::jsonrpc::Result as LSPResult; +use lspower::lsp_types::*; +use lspower::Client; +use std::collections::HashMap; +use std::env; +use std::path::PathBuf; +use std::sync::Arc; +use std::sync::RwLock; +use tokio::fs; + +use crate::deno_dir; +use crate::import_map::ImportMap; +use crate::media_type::MediaType; +use crate::tsc_config::TsConfig; + +use super::analysis; +use super::capabilities; +use super::config::Config; +use super::diagnostics; +use super::diagnostics::DiagnosticCollection; +use super::diagnostics::DiagnosticSource; +use super::memory_cache::MemoryCache; +use super::sources::Sources; +use super::text; +use super::text::apply_content_changes; +use super::tsc; +use super::tsc::TsServer; +use super::utils; + +#[derive(Debug, Clone)] +pub struct LanguageServer { + assets: Arc>>>, + client: Client, + ts_server: TsServer, + config: Arc>, + doc_data: Arc>>, + file_cache: Arc>, + sources: Arc>, + diagnostics: Arc>, + maybe_import_map: Arc>>, + maybe_import_map_uri: Arc>>, +} + +#[derive(Debug, Clone, Default)] +pub struct StateSnapshot { + pub assets: Arc>>>, + pub doc_data: HashMap, + pub file_cache: Arc>, + pub sources: Arc>, +} + +impl LanguageServer { + pub fn new(client: Client) -> Self { + let maybe_custom_root = env::var("DENO_DIR").map(String::into).ok(); + let dir = deno_dir::DenoDir::new(maybe_custom_root) + .expect("could not access DENO_DIR"); + let location = dir.root.join("deps"); + let sources = Arc::new(RwLock::new(Sources::new(&location))); + + LanguageServer { + assets: Default::default(), + client, + ts_server: TsServer::new(), + config: Default::default(), + doc_data: Default::default(), + file_cache: Default::default(), + sources, + diagnostics: Default::default(), + maybe_import_map: Default::default(), + maybe_import_map_uri: Default::default(), + } + } + + pub async fn update_import_map(&self) -> Result<(), AnyError> { + let (maybe_import_map, maybe_root_uri) = { + let config = self.config.read().unwrap(); + (config.settings.import_map.clone(), config.root_uri.clone()) + }; + if let Some(import_map_str) = &maybe_import_map { + info!("update import map"); + let import_map_url = if let Ok(url) = Url::from_file_path(import_map_str) + { + Ok(url) + } else if let Some(root_uri) = &maybe_root_uri { + let root_path = root_uri + .to_file_path() + .map_err(|_| anyhow!("Bad root_uri: {}", root_uri))?; + let import_map_path = root_path.join(import_map_str); + Url::from_file_path(import_map_path).map_err(|_| { + anyhow!("Bad file path for import map: {:?}", import_map_str) + }) + } else { + Err(anyhow!( + "The path to the import map (\"{}\") is not resolvable.", + import_map_str + )) + }?; + let import_map_path = import_map_url + .to_file_path() + .map_err(|_| anyhow!("Bad file path."))?; + let import_map_json = + fs::read_to_string(import_map_path).await.map_err(|err| { + anyhow!( + "Failed to load the import map at: {}. [{}]", + import_map_url, + err + ) + })?; + let import_map = + ImportMap::from_json(&import_map_url.to_string(), &import_map_json)?; + *self.maybe_import_map_uri.write().unwrap() = Some(import_map_url); + *self.maybe_import_map.write().unwrap() = Some(import_map); + } else { + *self.maybe_import_map.write().unwrap() = None; + } + Ok(()) + } + + async fn prepare_diagnostics(&self) -> Result<(), AnyError> { + let (enabled, lint_enabled) = { + let config = self.config.read().unwrap(); + (config.settings.enable, config.settings.lint) + }; + + let lint = async { + if lint_enabled { + let diagnostic_collection = self.diagnostics.read().unwrap().clone(); + let diagnostics = diagnostics::generate_lint_diagnostics( + self.snapshot(), + diagnostic_collection, + ) + .await; + { + let mut diagnostics_collection = self.diagnostics.write().unwrap(); + for (file_id, version, diagnostics) in diagnostics { + diagnostics_collection.set( + file_id, + DiagnosticSource::Lint, + version, + diagnostics, + ); + } + } + self.publish_diagnostics().await? + }; + + Ok::<(), AnyError>(()) + }; + + let ts = async { + if enabled { + let diagnostics = { + let diagnostic_collection = self.diagnostics.read().unwrap().clone(); + diagnostics::generate_ts_diagnostics( + &self.ts_server, + &diagnostic_collection, + self.snapshot(), + ) + .await? + }; + { + let mut diagnostics_collection = self.diagnostics.write().unwrap(); + for (file_id, version, diagnostics) in diagnostics { + diagnostics_collection.set( + file_id, + DiagnosticSource::TypeScript, + version, + diagnostics, + ); + } + }; + self.publish_diagnostics().await? + } + + Ok::<(), AnyError>(()) + }; + + let (lint_res, ts_res) = tokio::join!(lint, ts); + lint_res?; + ts_res?; + + Ok(()) + } + + async fn publish_diagnostics(&self) -> Result<(), AnyError> { + let (maybe_changes, diagnostics_collection) = { + let mut diagnostics_collection = self.diagnostics.write().unwrap(); + let maybe_changes = diagnostics_collection.take_changes(); + (maybe_changes, diagnostics_collection.clone()) + }; + if let Some(diagnostic_changes) = maybe_changes { + let settings = self.config.read().unwrap().settings.clone(); + for file_id in diagnostic_changes { + // TODO(@kitsonk) not totally happy with the way we collect and store + // different types of diagnostics and offer them up to the client, we + // do need to send "empty" vectors though when a particular feature is + // disabled, otherwise the client will not clear down previous + // diagnostics + let mut diagnostics: Vec = if settings.lint { + diagnostics_collection + .diagnostics_for(file_id, DiagnosticSource::Lint) + .cloned() + .collect() + } else { + vec![] + }; + if settings.enable { + diagnostics.extend( + diagnostics_collection + .diagnostics_for(file_id, DiagnosticSource::TypeScript) + .cloned(), + ); + } + let specifier = { + let file_cache = self.file_cache.read().unwrap(); + file_cache.get_specifier(file_id).clone() + }; + let uri = specifier.as_url().clone(); + let version = if let Some(doc_data) = + self.doc_data.read().unwrap().get(&specifier) + { + doc_data.version + } else { + None + }; + self + .client + .publish_diagnostics(uri, diagnostics, version) + .await; + } + } + + Ok(()) + } + + pub fn snapshot(&self) -> StateSnapshot { + StateSnapshot { + assets: self.assets.clone(), + doc_data: self.doc_data.read().unwrap().clone(), + file_cache: self.file_cache.clone(), + sources: self.sources.clone(), + } + } + + pub async fn get_line_index( + &self, + specifier: ModuleSpecifier, + ) -> Result, AnyError> { + let line_index = if specifier.as_url().scheme() == "asset" { + let state_snapshot = self.snapshot(); + if let Some(source) = + tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await? + { + text::index_lines(&source) + } else { + return Err(anyhow!("asset source missing: {}", specifier)); + } + } else { + let file_cache = self.file_cache.read().unwrap(); + if let Some(file_id) = file_cache.lookup(&specifier) { + let file_text = file_cache.get_contents(file_id)?; + text::index_lines(&file_text) + } else { + let mut sources = self.sources.write().unwrap(); + if let Some(line_index) = sources.get_line_index(&specifier) { + line_index + } else { + return Err(anyhow!("source for specifier not found: {}", specifier)); + } + } + }; + Ok(line_index) + } +} + +#[lspower::async_trait] +impl lspower::LanguageServer for LanguageServer { + async fn initialize( + &self, + params: InitializeParams, + ) -> LSPResult { + info!("Starting Deno language server..."); + + let capabilities = capabilities::server_capabilities(¶ms.capabilities); + + let version = format!( + "{} ({}, {})", + crate::version::deno(), + env!("PROFILE"), + env!("TARGET") + ); + info!(" version: {}", version); + + let server_info = ServerInfo { + name: "deno-language-server".to_string(), + version: Some(version), + }; + + if let Some(client_info) = params.client_info { + info!( + "Connected to \"{}\" {}", + client_info.name, + client_info.version.unwrap_or_default(), + ); + } + + { + let mut config = self.config.write().unwrap(); + config.root_uri = params.root_uri; + if let Some(value) = params.initialization_options { + config.update(value)?; + } + config.update_capabilities(¶ms.capabilities); + } + + // TODO(@kitsonk) need to make this configurable, respect unstable + let ts_config = TsConfig::new(json!({ + "allowJs": true, + "experimentalDecorators": true, + "isolatedModules": true, + "lib": ["deno.ns", "deno.window"], + "module": "esnext", + "noEmit": true, + "strict": true, + "target": "esnext", + })); + // TODO(lucacasonato): handle error correctly + self + .ts_server + .request(self.snapshot(), tsc::RequestMethod::Configure(ts_config)) + .await + .unwrap(); + + Ok(InitializeResult { + capabilities, + server_info: Some(server_info), + }) + } + + async fn initialized(&self, _: InitializedParams) { + // Check to see if we need to setup the import map + if let Err(err) = self.update_import_map().await { + self + .client + .show_message(MessageType::Warning, err.to_string()) + .await; + } + + // we are going to watch all the JSON files in the workspace, and the + // notification handler will pick up any of the changes of those files we + // are interested in. + let watch_registration_options = DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: "**/*.json".to_string(), + kind: Some(WatchKind::Change), + }], + }; + let registration = Registration { + id: "workspace/didChangeWatchedFiles".to_string(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: Some( + serde_json::to_value(watch_registration_options).unwrap(), + ), + }; + if let Err(err) = self.client.register_capability(vec![registration]).await + { + warn!("Client errored on capabilities.\n{}", err); + } + + info!("Server ready."); + } + + async fn shutdown(&self) -> LSPResult<()> { + Ok(()) + } + + async fn did_open(&self, params: DidOpenTextDocumentParams) { + if params.text_document.uri.scheme() == "deno" { + // we can ignore virtual text documents opening, as they don't need to + // be tracked in memory, as they are static assets that won't change + // already managed by the language service + return; + } + let specifier = utils::normalize_url(params.text_document.uri); + let maybe_import_map = self.maybe_import_map.read().unwrap().clone(); + if self + .doc_data + .write() + .unwrap() + .insert( + specifier.clone(), + DocumentData::new( + specifier.clone(), + params.text_document.version, + ¶ms.text_document.text, + maybe_import_map, + ), + ) + .is_some() + { + error!("duplicate DidOpenTextDocument: {}", specifier); + } + + self + .file_cache + .write() + .unwrap() + .set_contents(specifier, Some(params.text_document.text.into_bytes())); + // TODO(@lucacasonato): error handling + self.prepare_diagnostics().await.unwrap(); + } + + async fn did_change(&self, params: DidChangeTextDocumentParams) { + let specifier = utils::normalize_url(params.text_document.uri); + let mut content = { + let file_cache = self.file_cache.read().unwrap(); + let file_id = file_cache.lookup(&specifier).unwrap(); + file_cache.get_contents(file_id).unwrap() + }; + apply_content_changes(&mut content, params.content_changes); + { + let mut doc_data = self.doc_data.write().unwrap(); + let doc_data = doc_data.get_mut(&specifier).unwrap(); + let maybe_import_map = self.maybe_import_map.read().unwrap(); + doc_data.update( + params.text_document.version, + &content, + &maybe_import_map, + ); + } + + self + .file_cache + .write() + .unwrap() + .set_contents(specifier, Some(content.into_bytes())); + + // TODO(@lucacasonato): error handling + self.prepare_diagnostics().await.unwrap(); + } + + async fn did_close(&self, params: DidCloseTextDocumentParams) { + if params.text_document.uri.scheme() == "deno" { + // we can ignore virtual text documents opening, as they don't need to + // be tracked in memory, as they are static assets that won't change + // already managed by the language service + return; + } + let specifier = utils::normalize_url(params.text_document.uri); + if self.doc_data.write().unwrap().remove(&specifier).is_none() { + error!("orphaned document: {}", specifier); + } + // TODO(@kitsonk) should we do garbage collection on the diagnostics? + // TODO(@lucacasonato): error handling + self.prepare_diagnostics().await.unwrap(); + } + + async fn did_save(&self, _params: DidSaveTextDocumentParams) { + // nothing to do yet... cleanup things? + } + + async fn did_change_configuration( + &self, + _params: DidChangeConfigurationParams, + ) { + let res = self + .client + .configuration(vec![ConfigurationItem { + scope_uri: None, + section: Some("deno".to_string()), + }]) + .await + .map(|vec| vec.get(0).cloned()); + + match res { + Err(err) => error!("failed to fetch the extension settings {:?}", err), + Ok(Some(config)) => { + if let Err(err) = self.config.write().unwrap().update(config) { + error!("failed to update settings: {}", err); + } + if let Err(err) = self.update_import_map().await { + self + .client + .show_message(MessageType::Warning, err.to_string()) + .await; + } + } + _ => error!("received empty extension settings from the client"), + } + } + + async fn did_change_watched_files( + &self, + params: DidChangeWatchedFilesParams, + ) { + // if the current import map has changed, we need to reload it + let maybe_import_map_uri = + self.maybe_import_map_uri.read().unwrap().clone(); + if let Some(import_map_uri) = maybe_import_map_uri { + if params.changes.iter().any(|fe| import_map_uri == fe.uri) { + if let Err(err) = self.update_import_map().await { + self + .client + .show_message(MessageType::Warning, err.to_string()) + .await; + } + } + } + } + + async fn formatting( + &self, + params: DocumentFormattingParams, + ) -> LSPResult>> { + let specifier = utils::normalize_url(params.text_document.uri.clone()); + let file_text = { + let file_cache = self.file_cache.read().unwrap(); + let file_id = file_cache.lookup(&specifier).unwrap(); + // TODO(lucacasonato): handle error properly + file_cache.get_contents(file_id).unwrap() + }; + + let file_path = + if let Ok(file_path) = params.text_document.uri.to_file_path() { + file_path + } else { + PathBuf::from(params.text_document.uri.path()) + }; + + // TODO(lucacasonato): handle error properly + let text_edits = tokio::task::spawn_blocking(move || { + let config = dprint::configuration::ConfigurationBuilder::new() + .deno() + .build(); + // TODO(@kitsonk) this could be handled better in `cli/tools/fmt.rs` in the + // future. + match dprint::format_text(&file_path, &file_text, &config) { + Ok(new_text) => Some(text::get_edits(&file_text, &new_text)), + Err(err) => { + warn!("Format error: {}", err); + None + } + } + }) + .await + .unwrap(); + + if let Some(text_edits) = text_edits { + if text_edits.is_empty() { + Ok(None) + } else { + Ok(Some(text_edits)) + } + } else { + Ok(None) + } + } + + async fn hover(&self, params: HoverParams) -> LSPResult> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + // TODO(lucacasonato): handle error correctly + let line_index = self.get_line_index(specifier.clone()).await.unwrap(); + let req = tsc::RequestMethod::GetQuickInfo(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + )); + // TODO(lucacasonato): handle error correctly + let res = self.ts_server.request(self.snapshot(), req).await.unwrap(); + // TODO(lucacasonato): handle error correctly + let maybe_quick_info: Option = + serde_json::from_value(res).unwrap(); + if let Some(quick_info) = maybe_quick_info { + Ok(Some(quick_info.to_hover(&line_index))) + } else { + Ok(None) + } + } + + async fn document_highlight( + &self, + params: DocumentHighlightParams, + ) -> LSPResult>> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + // TODO(lucacasonato): handle error correctly + let line_index = self.get_line_index(specifier.clone()).await.unwrap(); + let files_to_search = vec![specifier.clone()]; + let req = tsc::RequestMethod::GetDocumentHighlights(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + files_to_search, + )); + // TODO(lucacasonato): handle error correctly + let res = self.ts_server.request(self.snapshot(), req).await.unwrap(); + // TODO(lucacasonato): handle error correctly + let maybe_document_highlights: Option> = + serde_json::from_value(res).unwrap(); + + if let Some(document_highlights) = maybe_document_highlights { + Ok(Some( + document_highlights + .into_iter() + .map(|dh| dh.to_highlight(&line_index)) + .flatten() + .collect(), + )) + } else { + Ok(None) + } + } + + async fn references( + &self, + params: ReferenceParams, + ) -> LSPResult>> { + let specifier = + utils::normalize_url(params.text_document_position.text_document.uri); + // TODO(lucacasonato): handle error correctly + let line_index = self.get_line_index(specifier.clone()).await.unwrap(); + let req = tsc::RequestMethod::GetReferences(( + specifier, + text::to_char_pos(&line_index, params.text_document_position.position), + )); + // TODO(lucacasonato): handle error correctly + let res = self.ts_server.request(self.snapshot(), req).await.unwrap(); + // TODO(lucacasonato): handle error correctly + let maybe_references: Option> = + serde_json::from_value(res).unwrap(); + + if let Some(references) = maybe_references { + let mut results = Vec::new(); + for reference in references { + if !params.context.include_declaration && reference.is_definition { + continue; + } + let reference_specifier = + ModuleSpecifier::resolve_url(&reference.file_name).unwrap(); + // TODO(lucacasonato): handle error correctly + let line_index = + self.get_line_index(reference_specifier).await.unwrap(); + results.push(reference.to_location(&line_index)); + } + + Ok(Some(results)) + } else { + Ok(None) + } + } + + async fn goto_definition( + &self, + params: GotoDefinitionParams, + ) -> LSPResult> { + let specifier = utils::normalize_url( + params.text_document_position_params.text_document.uri, + ); + // TODO(lucacasonato): handle error correctly + let line_index = self.get_line_index(specifier.clone()).await.unwrap(); + let req = tsc::RequestMethod::GetDefinition(( + specifier, + text::to_char_pos( + &line_index, + params.text_document_position_params.position, + ), + )); + // TODO(lucacasonato): handle error correctly + let res = self.ts_server.request(self.snapshot(), req).await.unwrap(); + // TODO(lucacasonato): handle error correctly + let maybe_definition: Option = + serde_json::from_value(res).unwrap(); + + if let Some(definition) = maybe_definition { + Ok( + definition + .to_definition(&line_index, |s| self.get_line_index(s)) + .await, + ) + } else { + Ok(None) + } + } + + async fn completion( + &self, + params: CompletionParams, + ) -> LSPResult> { + let specifier = + utils::normalize_url(params.text_document_position.text_document.uri); + // TODO(lucacasonato): handle error correctly + let line_index = self.get_line_index(specifier.clone()).await.unwrap(); + let req = tsc::RequestMethod::GetCompletions(( + specifier, + text::to_char_pos(&line_index, params.text_document_position.position), + tsc::UserPreferences { + // TODO(lucacasonato): enable this. see https://github.com/denoland/deno/pull/8651 + include_completions_with_insert_text: Some(false), + ..Default::default() + }, + )); + // TODO(lucacasonato): handle error correctly + let res = self.ts_server.request(self.snapshot(), req).await.unwrap(); + // TODO(lucacasonato): handle error correctly + let maybe_completion_info: Option = + serde_json::from_value(res).unwrap(); + + if let Some(completions) = maybe_completion_info { + Ok(Some(completions.into_completion_response(&line_index))) + } else { + Ok(None) + } + } + + async fn request_else( + &self, + method: &str, + params: Option, + ) -> LSPResult> { + match method { + "deno/virtualTextDocument" => match params.map(serde_json::from_value) { + Some(Ok(params)) => Ok(Some( + serde_json::to_value(self.virtual_text_document(params).await?) + .map_err(|err| { + error!( + "Failed to serialize virtual_text_document response: {:#?}", + err + ); + LSPError::internal_error() + })?, + )), + Some(Err(err)) => Err(LSPError::invalid_params(err.to_string())), + None => Err(LSPError::invalid_params("Missing parameters")), + }, + _ => { + error!("Got a {} request, but no handler is defined", method); + Err(LSPError::method_not_found()) + } + } + } +} + +impl LanguageServer { + async fn virtual_text_document( + &self, + params: VirtualTextDocumentParams, + ) -> LSPResult> { + let specifier = utils::normalize_url(params.text_document.uri); + let url = specifier.as_url(); + let contents = if url.as_str() == "deno:/status.md" { + let file_cache = self.file_cache.read().unwrap(); + Some(format!( + r#"# Deno Language Server Status + + - Documents in memory: {} + + "#, + file_cache.len() + )) + } else { + match url.scheme() { + "asset" => { + let state_snapshot = self.snapshot(); + if let Some(text) = + tsc::get_asset(&specifier, &self.ts_server, &state_snapshot) + .await + .map_err(|_| LSPError::new(LSPErrorCode::InternalError))? + { + Some(text) + } else { + error!("Missing asset: {}", specifier); + None + } + } + _ => { + let mut sources = self.sources.write().unwrap(); + if let Some(text) = sources.get_text(&specifier) { + Some(text) + } else { + error!("The cached sources was not found: {}", specifier); + None + } + } + } + }; + Ok(contents) + } +} + +#[derive(Debug, Clone)] +pub struct DocumentData { + pub dependencies: Option>, + pub version: Option, + specifier: ModuleSpecifier, +} + +impl DocumentData { + pub fn new( + specifier: ModuleSpecifier, + version: i32, + source: &str, + maybe_import_map: Option, + ) -> Self { + let dependencies = if let Some((dependencies, _)) = + analysis::analyze_dependencies( + &specifier, + source, + &MediaType::from(&specifier), + &maybe_import_map, + ) { + Some(dependencies) + } else { + None + }; + Self { + dependencies, + version: Some(version), + specifier, + } + } + + pub fn update( + &mut self, + version: i32, + source: &str, + maybe_import_map: &Option, + ) { + self.dependencies = if let Some((dependencies, _)) = + analysis::analyze_dependencies( + &self.specifier, + source, + &MediaType::from(&self.specifier), + maybe_import_map, + ) { + Some(dependencies) + } else { + None + }; + self.version = Some(version) + } +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VirtualTextDocumentParams { + pub text_document: TextDocumentIdentifier, +} + +#[cfg(test)] +mod tests { + use super::*; + use lspower::jsonrpc; + use lspower::ExitedError; + use lspower::LspService; + use std::fs; + use std::task::Poll; + use tower_test::mock::Spawn; + + enum LspResponse { + None, + RequestAny, + Request(u64, Value), + } + + struct LspTestHarness { + requests: Vec<(&'static str, LspResponse)>, + service: Spawn, + } + + impl LspTestHarness { + pub fn new(requests: Vec<(&'static str, LspResponse)>) -> Self { + let (service, _) = LspService::new(LanguageServer::new); + let service = Spawn::new(service); + Self { requests, service } + } + + async fn run(&mut self) { + for (req_path_str, expected) in self.requests.iter() { + assert_eq!(self.service.poll_ready(), Poll::Ready(Ok(()))); + let fixtures_path = test_util::root_path().join("cli/tests/lsp"); + assert!(fixtures_path.is_dir()); + let req_path = fixtures_path.join(req_path_str); + let req_str = fs::read_to_string(req_path).unwrap(); + let req: jsonrpc::Incoming = serde_json::from_str(&req_str).unwrap(); + let response: Result, ExitedError> = + self.service.call(req).await; + match response { + Ok(result) => match expected { + LspResponse::None => assert_eq!(result, None), + LspResponse::RequestAny => match result { + Some(jsonrpc::Outgoing::Response(_)) => (), + _ => panic!("unexpected result: {:?}", result), + }, + LspResponse::Request(id, value) => match result { + Some(jsonrpc::Outgoing::Response(resp)) => assert_eq!( + resp, + jsonrpc::Response::ok(jsonrpc::Id::Number(*id), value.clone()) + ), + _ => panic!("unexpected result: {:?}", result), + }, + }, + Err(err) => panic!("Error result: {}", err), + } + } + } + } + + #[tokio::test] + async fn test_startup_shutdown() { + let mut harness = LspTestHarness::new(vec![ + ("initialize_request.json", LspResponse::RequestAny), + ("initialized_notification.json", LspResponse::None), + ( + "shutdown_request.json", + LspResponse::Request(3, json!(null)), + ), + ("exit_notification.json", LspResponse::None), + ]); + harness.run().await; + } + + #[tokio::test] + async fn test_hover() { + let mut harness = LspTestHarness::new(vec![ + ("initialize_request.json", LspResponse::RequestAny), + ("initialized_notification.json", LspResponse::None), + ("did_open_notification.json", LspResponse::None), + ( + "hover_request.json", + LspResponse::Request( + 2, + json!({ + "contents": [ + { + "language": "typescript", + "value": "const Deno.args: string[]" + }, + "Returns the script arguments to the program. If for example we run a\nprogram:\n\ndeno run --allow-read https://deno.land/std/examples/cat.ts /etc/passwd\n\nThen `Deno.args` will contain:\n\n[ \"/etc/passwd\" ]" + ], + "range": { + "start": { + "line": 0, + "character": 17 + }, + "end": { + "line": 0, + "character": 21 + } + } + }), + ), + ), + ( + "shutdown_request.json", + LspResponse::Request(3, json!(null)), + ), + ("exit_notification.json", LspResponse::None), + ]); + harness.run().await; + } +} diff --git a/cli/lsp/lsp_extensions.rs b/cli/lsp/lsp_extensions.rs deleted file mode 100644 index eb0a62464d4977..00000000000000 --- a/cli/lsp/lsp_extensions.rs +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -///! -///! Extensions to the language service protocol that are specific to Deno. -///! -use deno_core::serde::Deserialize; -use deno_core::serde::Serialize; -use lsp_types::request::Request; -use lsp_types::TextDocumentIdentifier; - -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct VirtualTextDocumentParams { - pub text_document: TextDocumentIdentifier, -} - -/// Request a _virtual_ text document from the server. Used for example to -/// provide a status document of the language server which can be viewed in the -/// IDE. -pub enum VirtualTextDocument {} - -impl Request for VirtualTextDocument { - type Params = VirtualTextDocumentParams; - type Result = String; - const METHOD: &'static str = "deno/virtualTextDocument"; -} diff --git a/cli/lsp/memory_cache.rs b/cli/lsp/memory_cache.rs index 75c5bdb2511139..cfba1ecabb6af5 100644 --- a/cli/lsp/memory_cache.rs +++ b/cli/lsp/memory_cache.rs @@ -4,7 +4,6 @@ use deno_core::error::AnyError; use deno_core::ModuleSpecifier; use std::collections::HashMap; use std::fmt; -use std::mem; #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(pub u32); @@ -111,10 +110,6 @@ impl MemoryCache { change_kind, }) } - - pub fn take_changes(&mut self) -> Vec { - mem::take(&mut self.changes) - } } impl fmt::Debug for MemoryCache { diff --git a/cli/lsp/mod.rs b/cli/lsp/mod.rs index 0f83e4ab2c178c..912a8c684a062f 100644 --- a/cli/lsp/mod.rs +++ b/cli/lsp/mod.rs @@ -1,472 +1,29 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. +use deno_core::error::AnyError; +use lspower::LspService; +use lspower::Server; mod analysis; mod capabilities; mod config; mod diagnostics; -mod dispatch; -mod handlers; -mod lsp_extensions; +mod language_server; mod memory_cache; mod sources; -mod state; mod text; mod tsc; mod utils; -use config::Config; -use diagnostics::DiagnosticSource; -use dispatch::NotificationDispatcher; -use dispatch::RequestDispatcher; -use state::update_import_map; -use state::DocumentData; -use state::Event; -use state::ServerState; -use state::Status; -use state::Task; -use text::apply_content_changes; - -use crate::tsc_config::TsConfig; - -use crossbeam_channel::Receiver; -use deno_core::error::custom_error; -use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::serde_json::json; -use lsp_server::Connection; -use lsp_server::ErrorCode; -use lsp_server::Message; -use lsp_server::Notification; -use lsp_server::Request; -use lsp_server::RequestId; -use lsp_server::Response; -use lsp_types::notification::Notification as _; -use lsp_types::Diagnostic; -use lsp_types::InitializeParams; -use lsp_types::InitializeResult; -use lsp_types::ServerInfo; -use std::env; -use std::time::Instant; - -pub fn start() -> Result<(), AnyError> { - info!("Starting Deno language server..."); - - let (connection, io_threads) = Connection::stdio(); - let (initialize_id, initialize_params) = connection.initialize_start()?; - let initialize_params: InitializeParams = - serde_json::from_value(initialize_params)?; - - let capabilities = - capabilities::server_capabilities(&initialize_params.capabilities); - - let version = format!( - "{} ({}, {})", - crate::version::deno(), - env!("PROFILE"), - env!("TARGET") - ); - - info!(" version: {}", version); - - let initialize_result = InitializeResult { - capabilities, - server_info: Some(ServerInfo { - name: "deno-language-server".to_string(), - version: Some(version), - }), - }; - let initialize_result = serde_json::to_value(initialize_result)?; +pub async fn start() -> Result<(), AnyError> { + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); - connection.initialize_finish(initialize_id, initialize_result)?; + let (service, messages) = + LspService::new(language_server::LanguageServer::new); + Server::new(stdin, stdout) + .interleave(messages) + .serve(service) + .await; - if let Some(client_info) = initialize_params.client_info { - info!( - "Connected to \"{}\" {}", - client_info.name, - client_info.version.unwrap_or_default() - ); - } - - let mut config = Config::default(); - config.root_uri = initialize_params.root_uri.clone(); - if let Some(value) = initialize_params.initialization_options { - config.update(value)?; - } - config.update_capabilities(&initialize_params.capabilities); - - let mut server_state = state::ServerState::new(connection.sender, config); - - // TODO(@kitsonk) need to make this configurable, respect unstable - let ts_config = TsConfig::new(json!({ - "allowJs": true, - "experimentalDecorators": true, - "isolatedModules": true, - "lib": ["deno.ns", "deno.window"], - "module": "esnext", - "noEmit": true, - "strict": true, - "target": "esnext", - })); - let state = server_state.snapshot(); - tsc::request( - &mut server_state.ts_runtime, - &state, - tsc::RequestMethod::Configure(ts_config), - )?; - - // listen for events and run the main loop - server_state.run(connection.receiver)?; - - io_threads.join()?; - info!("Stop language server"); Ok(()) } - -impl ServerState { - fn handle_event(&mut self, event: Event) -> Result<(), AnyError> { - let received = Instant::now(); - debug!("handle_event({:?})", event); - - match event { - Event::Message(message) => match message { - Message::Request(request) => self.on_request(request, received)?, - Message::Notification(notification) => { - self.on_notification(notification)? - } - Message::Response(response) => self.complete_request(response), - }, - Event::Task(mut task) => loop { - match task { - Task::Response(response) => self.respond(response), - Task::Diagnostics((source, diagnostics_per_file)) => { - for (file_id, version, diagnostics) in diagnostics_per_file { - self.diagnostics.set( - file_id, - source.clone(), - version, - diagnostics, - ); - } - } - } - - task = match self.task_receiver.try_recv() { - Ok(task) => task, - Err(_) => break, - }; - }, - } - - // process server sent notifications, like diagnostics - // TODO(@kitsonk) currently all of these refresh all open documents, though - // in a lot of cases, like linting, we would only care about the files - // themselves that have changed - if self.process_changes() { - debug!("process changes"); - let state = self.snapshot(); - self.spawn(move || { - let diagnostics = diagnostics::generate_linting_diagnostics(&state); - Task::Diagnostics((DiagnosticSource::Lint, diagnostics)) - }); - // TODO(@kitsonk) isolates do not have Send to be safely sent between - // threads, so I am not sure this is the best way to handle queuing up of - // getting the diagnostics from the isolate. - let state = self.snapshot(); - let diagnostics = - diagnostics::generate_ts_diagnostics(&state, &mut self.ts_runtime)?; - self.spawn(move || { - Task::Diagnostics((DiagnosticSource::TypeScript, diagnostics)) - }); - } - - // process any changes to the diagnostics - if let Some(diagnostic_changes) = self.diagnostics.take_changes() { - debug!("diagnostics have changed"); - let state = self.snapshot(); - for file_id in diagnostic_changes { - let file_cache = state.file_cache.read().unwrap(); - // TODO(@kitsonk) not totally happy with the way we collect and store - // different types of diagnostics and offer them up to the client, we - // do need to send "empty" vectors though when a particular feature is - // disabled, otherwise the client will not clear down previous - // diagnostics - let mut diagnostics: Vec = if state.config.settings.lint { - self - .diagnostics - .diagnostics_for(file_id, DiagnosticSource::Lint) - .cloned() - .collect() - } else { - vec![] - }; - if state.config.settings.enable { - diagnostics.extend( - self - .diagnostics - .diagnostics_for(file_id, DiagnosticSource::TypeScript) - .cloned(), - ); - } - let specifier = file_cache.get_specifier(file_id); - let uri = specifier.as_url().clone(); - let version = if let Some(doc_data) = self.doc_data.get(specifier) { - doc_data.version - } else { - None - }; - self.send_notification::( - lsp_types::PublishDiagnosticsParams { - uri, - diagnostics, - version, - }, - ); - } - } - - Ok(()) - } - - fn on_notification( - &mut self, - notification: Notification, - ) -> Result<(), AnyError> { - NotificationDispatcher { - notification: Some(notification), - server_state: self, - } - // TODO(@kitsonk) this is just stubbed out and we don't currently actually - // cancel in progress work, though most of our work isn't long running - .on::(|state, params| { - let id: RequestId = match params.id { - lsp_types::NumberOrString::Number(id) => id.into(), - lsp_types::NumberOrString::String(id) => id.into(), - }; - state.cancel(id); - Ok(()) - })? - .on::(|state, params| { - if params.text_document.uri.scheme() == "deno" { - // we can ignore virtual text documents opening, as they don't need to - // be tracked in memory, as they are static assets that won't change - // already managed by the language service - return Ok(()); - } - let specifier = utils::normalize_url(params.text_document.uri); - if state - .doc_data - .insert( - specifier.clone(), - DocumentData::new( - specifier.clone(), - params.text_document.version, - ¶ms.text_document.text, - state.maybe_import_map.clone(), - ), - ) - .is_some() - { - error!("duplicate DidOpenTextDocument: {}", specifier); - } - state - .file_cache - .write() - .unwrap() - .set_contents(specifier, Some(params.text_document.text.into_bytes())); - - Ok(()) - })? - .on::(|state, params| { - let specifier = utils::normalize_url(params.text_document.uri); - let mut file_cache = state.file_cache.write().unwrap(); - let file_id = file_cache.lookup(&specifier).unwrap(); - let mut content = file_cache.get_contents(file_id)?; - apply_content_changes(&mut content, params.content_changes); - let doc_data = state.doc_data.get_mut(&specifier).unwrap(); - doc_data.update( - params.text_document.version, - &content, - state.maybe_import_map.clone(), - ); - file_cache.set_contents(specifier, Some(content.into_bytes())); - - Ok(()) - })? - .on::(|state, params| { - if params.text_document.uri.scheme() == "deno" { - // we can ignore virtual text documents opening, as they don't need to - // be tracked in memory, as they are static assets that won't change - // already managed by the language service - return Ok(()); - } - let specifier = utils::normalize_url(params.text_document.uri); - if state.doc_data.remove(&specifier).is_none() { - error!("orphaned document: {}", specifier); - } - // TODO(@kitsonk) should we do garbage collection on the diagnostics? - - Ok(()) - })? - .on::(|_state, _params| { - // nothing to do yet... cleanup things? - - Ok(()) - })? - .on::(|state, _params| { - state.send_request::( - lsp_types::ConfigurationParams { - items: vec![lsp_types::ConfigurationItem { - scope_uri: None, - section: Some("deno".to_string()), - }], - }, - |state, response| { - let Response { error, result, .. } = response; - - match (error, result) { - (Some(err), _) => { - error!("failed to fetch the extension settings: {:?}", err); - } - (None, Some(config)) => { - if let Some(config) = config.get(0) { - if let Err(err) = state.config.update(config.clone()) { - error!("failed to update settings: {}", err); - } - if let Err(err) = update_import_map(state) { - state - .send_notification::( - lsp_types::ShowMessageParams { - typ: lsp_types::MessageType::Warning, - message: err.to_string(), - }, - ); - } - } - } - (None, None) => { - error!("received empty extension settings from the client"); - } - } - }, - ); - - Ok(()) - })? - .on::(|state, params| { - // if the current import map has changed, we need to reload it - if let Some(import_map_uri) = &state.maybe_import_map_uri { - if params.changes.iter().any(|fe| import_map_uri == &fe.uri) { - update_import_map(state)?; - } - } - Ok(()) - })? - .finish(); - - Ok(()) - } - - fn on_request( - &mut self, - request: Request, - received: Instant, - ) -> Result<(), AnyError> { - self.register_request(&request, received); - - if self.shutdown_requested { - self.respond(Response::new_err( - request.id, - ErrorCode::InvalidRequest as i32, - "Shutdown already requested".to_string(), - )); - return Ok(()); - } - - if self.status == Status::Loading && request.method != "shutdown" { - self.respond(Response::new_err( - request.id, - ErrorCode::ContentModified as i32, - "Deno Language Server is still loading...".to_string(), - )); - return Ok(()); - } - - RequestDispatcher { - request: Some(request), - server_state: self, - } - .on_sync::(|s, ()| { - s.shutdown_requested = true; - Ok(()) - })? - .on_sync::( - handlers::handle_document_highlight, - )? - .on_sync::( - handlers::handle_goto_definition, - )? - .on_sync::(handlers::handle_hover)? - .on_sync::(handlers::handle_completion)? - .on_sync::(handlers::handle_references)? - .on_sync::( - handlers::handle_virtual_text_document, - )? - .on::(handlers::handle_formatting) - .finish(); - - Ok(()) - } - - /// Start consuming events from the provided receiver channel. - pub fn run(mut self, inbox: Receiver) -> Result<(), AnyError> { - // Check to see if we need to setup the import map - if let Err(err) = update_import_map(&mut self) { - self.send_notification::( - lsp_types::ShowMessageParams { - typ: lsp_types::MessageType::Warning, - message: err.to_string(), - }, - ); - } - - // we are going to watch all the JSON files in the workspace, and the - // notification handler will pick up any of the changes of those files we - // are interested in. - let watch_registration_options = - lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: vec![lsp_types::FileSystemWatcher { - glob_pattern: "**/*.json".to_string(), - kind: Some(lsp_types::WatchKind::Change), - }], - }; - let registration = lsp_types::Registration { - id: "workspace/didChangeWatchedFiles".to_string(), - method: "workspace/didChangeWatchedFiles".to_string(), - register_options: Some( - serde_json::to_value(watch_registration_options).unwrap(), - ), - }; - self.send_request::( - lsp_types::RegistrationParams { - registrations: vec![registration], - }, - |_, _| (), - ); - - self.transition(Status::Ready); - - while let Some(event) = self.next_event(&inbox) { - if let Event::Message(Message::Notification(notification)) = &event { - if notification.method == lsp_types::notification::Exit::METHOD { - return Ok(()); - } - } - self.handle_event(event)? - } - - Err(custom_error( - "ClientError", - "Client exited without proper shutdown sequence.", - )) - } -} diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs index 09b0a4cc8bac74..63b4ebd9946027 100644 --- a/cli/lsp/sources.rs +++ b/cli/lsp/sources.rs @@ -18,8 +18,6 @@ use std::collections::HashMap; use std::fs; use std::path::Path; use std::path::PathBuf; -use std::sync::Arc; -use std::sync::RwLock; use std::time::SystemTime; #[derive(Debug, Clone, Default)] @@ -34,7 +32,7 @@ struct Metadata { #[derive(Debug, Clone, Default)] pub struct Sources { http_cache: HttpCache, - maybe_import_map: Option>>, + maybe_import_map: Option, metadata: HashMap, redirects: HashMap, remotes: HashMap, @@ -102,7 +100,7 @@ impl Sources { &specifier, &source, &media_type, - None, + &None, ) { maybe_types = mt; Some(dependencies) @@ -132,7 +130,7 @@ impl Sources { Some(analysis::resolve_import( types, &specifier, - self.maybe_import_map.clone(), + &self.maybe_import_map, )) } else { None @@ -142,7 +140,7 @@ impl Sources { &specifier, &source, &media_type, - None, + &None, ) { if maybe_types.is_none() { maybe_types = mt; diff --git a/cli/lsp/state.rs b/cli/lsp/state.rs deleted file mode 100644 index ceb4325a193eb7..00000000000000 --- a/cli/lsp/state.rs +++ /dev/null @@ -1,395 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -use super::analysis; -use super::config::Config; -use super::diagnostics::DiagnosticCollection; -use super::diagnostics::DiagnosticSource; -use super::diagnostics::DiagnosticVec; -use super::memory_cache::MemoryCache; -use super::sources::Sources; -use super::tsc; -use super::utils::notification_is; - -use crate::deno_dir; -use crate::import_map::ImportMap; -use crate::media_type::MediaType; - -use crossbeam_channel::select; -use crossbeam_channel::unbounded; -use crossbeam_channel::Receiver; -use crossbeam_channel::Sender; -use deno_core::error::anyhow; -use deno_core::error::AnyError; -use deno_core::url::Url; -use deno_core::JsRuntime; -use deno_core::ModuleSpecifier; -use lsp_server::Message; -use lsp_server::Notification; -use lsp_server::Request; -use lsp_server::RequestId; -use lsp_server::Response; -use std::collections::HashMap; -use std::env; -use std::fmt; -use std::fs; -use std::sync::Arc; -use std::sync::RwLock; -use std::time::Instant; - -type ReqHandler = fn(&mut ServerState, Response); -type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>; - -pub fn update_import_map(state: &mut ServerState) -> Result<(), AnyError> { - if let Some(import_map_str) = &state.config.settings.import_map { - let import_map_url = if let Ok(url) = Url::from_file_path(import_map_str) { - Ok(url) - } else if let Some(root_uri) = &state.config.root_uri { - let root_path = root_uri - .to_file_path() - .map_err(|_| anyhow!("Bad root_uri: {}", root_uri))?; - let import_map_path = root_path.join(import_map_str); - Url::from_file_path(import_map_path).map_err(|_| { - anyhow!("Bad file path for import map: {:?}", import_map_str) - }) - } else { - Err(anyhow!( - "The path to the import map (\"{}\") is not resolvable.", - import_map_str - )) - }?; - let import_map_path = import_map_url - .to_file_path() - .map_err(|_| anyhow!("Bad file path."))?; - let import_map_json = - fs::read_to_string(import_map_path).map_err(|err| { - anyhow!( - "Failed to load the import map at: {}. [{}]", - import_map_url, - err - ) - })?; - let import_map = - ImportMap::from_json(&import_map_url.to_string(), &import_map_json)?; - state.maybe_import_map_uri = Some(import_map_url); - state.maybe_import_map = Some(Arc::new(RwLock::new(import_map))); - } else { - state.maybe_import_map = None; - } - Ok(()) -} - -pub enum Event { - Message(Message), - Task(Task), -} - -impl fmt::Debug for Event { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let debug_verbose_not = - |notification: &Notification, f: &mut fmt::Formatter| { - f.debug_struct("Notification") - .field("method", ¬ification.method) - .finish() - }; - - match self { - Event::Message(Message::Notification(notification)) => { - if notification_is::( - notification, - ) || notification_is::( - notification, - ) { - return debug_verbose_not(notification, f); - } - } - Event::Task(Task::Response(response)) => { - return f - .debug_struct("Response") - .field("id", &response.id) - .field("error", &response.error) - .finish(); - } - _ => (), - } - match self { - Event::Message(it) => fmt::Debug::fmt(it, f), - Event::Task(it) => fmt::Debug::fmt(it, f), - } - } -} - -#[derive(Eq, PartialEq, Copy, Clone)] -pub enum Status { - Loading, - Ready, -} - -impl Default for Status { - fn default() -> Self { - Status::Loading - } -} - -#[derive(Debug)] -pub enum Task { - Diagnostics((DiagnosticSource, DiagnosticVec)), - Response(Response), -} - -#[derive(Debug, Clone)] -pub struct DocumentData { - pub dependencies: Option>, - pub version: Option, - specifier: ModuleSpecifier, -} - -impl DocumentData { - pub fn new( - specifier: ModuleSpecifier, - version: i32, - source: &str, - maybe_import_map: Option>>, - ) -> Self { - let dependencies = if let Some((dependencies, _)) = - analysis::analyze_dependencies( - &specifier, - source, - &MediaType::from(&specifier), - maybe_import_map, - ) { - Some(dependencies) - } else { - None - }; - Self { - dependencies, - version: Some(version), - specifier, - } - } - - pub fn update( - &mut self, - version: i32, - source: &str, - maybe_import_map: Option>>, - ) { - self.dependencies = if let Some((dependencies, _)) = - analysis::analyze_dependencies( - &self.specifier, - source, - &MediaType::from(&self.specifier), - maybe_import_map, - ) { - Some(dependencies) - } else { - None - }; - self.version = Some(version) - } -} - -/// An immutable snapshot of the server state at a point in time. -#[derive(Debug, Clone, Default)] -pub struct ServerStateSnapshot { - pub assets: Arc>>>, - pub config: Config, - pub diagnostics: DiagnosticCollection, - pub doc_data: HashMap, - pub file_cache: Arc>, - pub sources: Arc>, -} - -pub struct ServerState { - pub assets: Arc>>>, - pub config: Config, - pub diagnostics: DiagnosticCollection, - pub doc_data: HashMap, - pub file_cache: Arc>, - pub maybe_import_map: Option>>, - pub maybe_import_map_uri: Option, - req_queue: ReqQueue, - sender: Sender, - pub sources: Arc>, - pub shutdown_requested: bool, - pub status: Status, - task_sender: Sender, - pub task_receiver: Receiver, - pub ts_runtime: JsRuntime, -} - -impl ServerState { - pub fn new(sender: Sender, config: Config) -> Self { - let (task_sender, task_receiver) = unbounded(); - let custom_root = env::var("DENO_DIR").map(String::into).ok(); - let dir = - deno_dir::DenoDir::new(custom_root).expect("could not access DENO_DIR"); - let location = dir.root.join("deps"); - let sources = Sources::new(&location); - // TODO(@kitsonk) we need to allow displaying diagnostics here, but the - // current compiler snapshot sends them to stdio which would totally break - // the language server... - let ts_runtime = tsc::start(false).expect("could not start tsc"); - - Self { - assets: Default::default(), - config, - diagnostics: Default::default(), - doc_data: Default::default(), - file_cache: Arc::new(RwLock::new(Default::default())), - maybe_import_map: None, - maybe_import_map_uri: None, - req_queue: Default::default(), - sender, - sources: Arc::new(RwLock::new(sources)), - shutdown_requested: false, - status: Default::default(), - task_receiver, - task_sender, - ts_runtime, - } - } - - pub fn cancel(&mut self, request_id: RequestId) { - if let Some(response) = self.req_queue.incoming.cancel(request_id) { - self.send(response.into()); - } - } - - pub fn complete_request(&mut self, response: Response) { - let handler = self.req_queue.outgoing.complete(response.id.clone()); - handler(self, response) - } - - pub fn next_event(&self, inbox: &Receiver) -> Option { - select! { - recv(inbox) -> msg => msg.ok().map(Event::Message), - recv(self.task_receiver) -> task => Some(Event::Task(task.unwrap())), - } - } - - /// Handle any changes and return a `bool` that indicates if there were - /// important changes to the state. - pub fn process_changes(&mut self) -> bool { - let mut file_cache = self.file_cache.write().unwrap(); - let changed_files = file_cache.take_changes(); - // other processing of changed files should be done here as needed - !changed_files.is_empty() - } - - pub fn register_request(&mut self, request: &Request, received: Instant) { - self - .req_queue - .incoming - .register(request.id.clone(), (request.method.clone(), received)); - } - - pub fn respond(&mut self, response: Response) { - if let Some((_, _)) = self.req_queue.incoming.complete(response.id.clone()) - { - self.send(response.into()); - } - } - - fn send(&mut self, message: Message) { - self.sender.send(message).unwrap() - } - - pub fn send_notification( - &mut self, - params: N::Params, - ) { - let notification = Notification::new(N::METHOD.to_string(), params); - self.send(notification.into()); - } - - pub fn send_request( - &mut self, - params: R::Params, - handler: ReqHandler, - ) { - let request = - self - .req_queue - .outgoing - .register(R::METHOD.to_string(), params, handler); - self.send(request.into()); - } - - pub fn snapshot(&self) -> ServerStateSnapshot { - ServerStateSnapshot { - assets: Arc::clone(&self.assets), - config: self.config.clone(), - diagnostics: self.diagnostics.clone(), - doc_data: self.doc_data.clone(), - file_cache: Arc::clone(&self.file_cache), - sources: Arc::clone(&self.sources), - } - } - - pub fn spawn(&mut self, task: F) - where - F: FnOnce() -> Task + Send + 'static, - { - let sender = self.task_sender.clone(); - tokio::task::spawn_blocking(move || sender.send(task()).unwrap()); - } - - pub fn transition(&mut self, new_status: Status) { - self.status = new_status; - } -} - -#[cfg(test)] -mod tests { - use super::*; - use deno_core::serde_json::json; - use deno_core::serde_json::Value; - use lsp_server::Connection; - use tempfile::TempDir; - - #[test] - fn test_update_import_map() { - let temp_dir = TempDir::new().expect("could not create temp dir"); - let import_map_path = temp_dir.path().join("import_map.json"); - let import_map_str = &import_map_path.to_string_lossy(); - fs::write( - import_map_path.clone(), - r#"{ - "imports": { - "denoland/": "https://deno.land/x/" - } - }"#, - ) - .expect("could not write file"); - let mut config = Config::default(); - config - .update(json!({ - "enable": false, - "config": Value::Null, - "lint": false, - "importMap": import_map_str, - "unstable": true, - })) - .expect("could not update config"); - let (connection, _) = Connection::memory(); - let mut state = ServerState::new(connection.sender, config); - let result = update_import_map(&mut state); - assert!(result.is_ok()); - assert!(state.maybe_import_map.is_some()); - let expected = - Url::from_file_path(import_map_path).expect("could not parse url"); - assert_eq!(state.maybe_import_map_uri, Some(expected)); - let import_map = state.maybe_import_map.unwrap(); - let import_map = import_map.read().unwrap(); - assert_eq!( - import_map - .resolve("denoland/mod.ts", "https://example.com/index.js") - .expect("bad response"), - Some( - ModuleSpecifier::resolve_url("https://deno.land/x/mod.ts") - .expect("could not create URL") - ) - ); - } -} diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs index 5bca534c1bc7d1..a0bcb08d34ce27 100644 --- a/cli/lsp/text.rs +++ b/cli/lsp/text.rs @@ -4,7 +4,8 @@ use deno_core::serde_json::json; use deno_core::serde_json::Value; use dissimilar::diff; use dissimilar::Chunk; -use lsp_types::TextEdit; +use lspower::lsp_types; +use lspower::lsp_types::TextEdit; use std::ops::Bound; use std::ops::Range; use std::ops::RangeBounds; diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 5cbf1ecc56f9ff..4cd13f70d2ac0d 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -1,18 +1,21 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use super::analysis::ResolvedImport; -use super::state::ServerStateSnapshot; +use super::language_server::StateSnapshot; use super::text; use super::utils; use crate::js; use crate::media_type::MediaType; +use crate::tokio_util::create_basic_runtime; use crate::tsc; use crate::tsc::ResolveArgs; use crate::tsc_config::TsConfig; +use deno_core::error::anyhow; use deno_core::error::custom_error; use deno_core::error::AnyError; +use deno_core::futures::Future; use deno_core::json_op_sync; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -23,31 +26,89 @@ use deno_core::JsRuntime; use deno_core::ModuleSpecifier; use deno_core::OpFn; use deno_core::RuntimeOptions; +use lspower::lsp_types; use regex::Captures; use regex::Regex; use std::borrow::Cow; use std::collections::HashMap; +use std::thread; +use tokio::sync::mpsc; +use tokio::sync::oneshot; + +type Request = ( + RequestMethod, + StateSnapshot, + oneshot::Sender>, +); + +#[derive(Clone, Debug)] +pub struct TsServer(mpsc::UnboundedSender); + +impl TsServer { + pub fn new() -> Self { + let (tx, mut rx) = mpsc::unbounded_channel::(); + let _join_handle = thread::spawn(move || { + // TODO(@kitsonk) we need to allow displaying diagnostics here, but the + // current compiler snapshot sends them to stdio which would totally break + // the language server... + let mut ts_runtime = start(false).expect("could not start tsc"); + + let mut runtime = create_basic_runtime(); + runtime.block_on(async { + while let Some((req, state_snapshot, tx)) = rx.recv().await { + let value = request(&mut ts_runtime, state_snapshot, req); + if tx.send(value).is_err() { + warn!("Unable to send result to client."); + } + } + }) + }); + + Self(tx) + } + + pub async fn request( + &self, + snapshot: StateSnapshot, + req: RequestMethod, + ) -> Result { + let (tx, rx) = oneshot::channel::>(); + if self.0.send((req, snapshot, tx)).is_err() { + return Err(anyhow!("failed to send request to tsc thread")); + } + rx.await? + } +} /// Optionally returns an internal asset, first checking for any static assets /// in Rust, then checking any previously retrieved static assets from the /// isolate, and then finally, the tsc isolate itself. -pub fn get_asset( +pub async fn get_asset( specifier: &ModuleSpecifier, - runtime: &mut JsRuntime, - server_state: &ServerStateSnapshot, + ts_server: &TsServer, + state_snapshot: &StateSnapshot, ) -> Result, AnyError> { let specifier_str = specifier.to_string().replace("asset:///", ""); if let Some(asset_text) = tsc::get_asset(&specifier_str) { Ok(Some(asset_text.to_string())) } else { - let mut assets = server_state.assets.write().unwrap(); - if let Some(asset) = assets.get(specifier) { - Ok(asset.clone()) - } else { - let asset = request_asset(specifier, runtime, server_state)?; - assets.insert(specifier.clone(), asset.clone()); - Ok(asset) + { + let assets = state_snapshot.assets.read().unwrap(); + if let Some(asset) = assets.get(specifier) { + return Ok(asset.clone()); + } } + let asset: Option = serde_json::from_value( + ts_server + .request( + state_snapshot.clone(), + RequestMethod::GetAsset(specifier.clone()), + ) + .await?, + )?; + let mut assets = state_snapshot.assets.write().unwrap(); + assets.insert(specifier.clone(), asset.clone()); + Ok(asset) } } @@ -235,7 +296,7 @@ pub enum ScriptElementKind { impl From for lsp_types::CompletionItemKind { fn from(kind: ScriptElementKind) -> Self { - use lsp_types::CompletionItemKind; + use lspower::lsp_types::CompletionItemKind; match kind { ScriptElementKind::PrimitiveType | ScriptElementKind::Keyword => { @@ -395,21 +456,21 @@ pub struct DefinitionInfoAndBoundSpan { } impl DefinitionInfoAndBoundSpan { - pub fn to_definition( + pub async fn to_definition( &self, line_index: &[u32], - mut index_provider: F, + index_provider: F, ) -> Option where - F: FnMut(ModuleSpecifier) -> Vec, + F: Fn(ModuleSpecifier) -> Fut, + Fut: Future, AnyError>>, { if let Some(definitions) = &self.definitions { - let location_links = definitions - .iter() - .map(|di| { - let target_specifier = - ModuleSpecifier::resolve_url(&di.file_name).unwrap(); - let target_line_index = index_provider(target_specifier); + let mut location_links = Vec::::new(); + for di in definitions { + let target_specifier = + ModuleSpecifier::resolve_url(&di.file_name).unwrap(); + if let Ok(target_line_index) = index_provider(target_specifier).await { let target_uri = utils::normalize_file_name(&di.file_name).unwrap(); let (target_range, target_selection_range) = if let Some(context_span) = &di.context_span { @@ -423,15 +484,14 @@ impl DefinitionInfoAndBoundSpan { di.text_span.to_range(&target_line_index), ) }; - lsp_types::LocationLink { + location_links.push(lsp_types::LocationLink { origin_selection_range: Some(self.text_span.to_range(line_index)), target_uri, target_range, target_selection_range, - } - }) - .collect(); - + }); + } + } Some(lsp_types::GotoDefinitionResponse::Link(location_links)) } else { None @@ -599,17 +659,17 @@ struct State<'a> { asset: Option, last_id: usize, response: Option, - server_state: ServerStateSnapshot, + state_snapshot: StateSnapshot, snapshots: HashMap<(Cow<'a, str>, Cow<'a, str>), String>, } impl<'a> State<'a> { - fn new(server_state: ServerStateSnapshot) -> Self { + fn new(state_snapshot: StateSnapshot) -> Self { Self { asset: None, last_id: 1, response: None, - server_state, + state_snapshot, snapshots: Default::default(), } } @@ -626,9 +686,11 @@ fn cache_snapshot( .contains_key(&(specifier.clone().into(), version.clone().into())) { let s = ModuleSpecifier::resolve_url(&specifier)?; - let file_cache = state.server_state.file_cache.read().unwrap(); - let file_id = file_cache.lookup(&s).unwrap(); - let content = file_cache.get_contents(file_id)?; + let content = { + let file_cache = state.state_snapshot.file_cache.read().unwrap(); + let file_id = file_cache.lookup(&s).unwrap(); + file_cache.get_contents(file_id)? + }; state .snapshots .insert((specifier.into(), version.into()), content); @@ -713,7 +775,7 @@ fn get_change_range(state: &mut State, args: Value) -> Result { fn get_length(state: &mut State, args: Value) -> Result { let v: SourceSnapshotArgs = serde_json::from_value(args)?; let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; - if state.server_state.doc_data.contains_key(&specifier) { + if state.state_snapshot.doc_data.contains_key(&specifier) { cache_snapshot(state, v.specifier.clone(), v.version.clone())?; let content = state .snapshots @@ -721,7 +783,7 @@ fn get_length(state: &mut State, args: Value) -> Result { .unwrap(); Ok(json!(content.chars().count())) } else { - let mut sources = state.server_state.sources.write().unwrap(); + let mut sources = state.state_snapshot.sources.write().unwrap(); Ok(json!(sources.get_length(&specifier).unwrap())) } } @@ -738,7 +800,7 @@ struct GetTextArgs { fn get_text(state: &mut State, args: Value) -> Result { let v: GetTextArgs = serde_json::from_value(args)?; let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; - let content = if state.server_state.doc_data.contains_key(&specifier) { + let content = if state.state_snapshot.doc_data.contains_key(&specifier) { cache_snapshot(state, v.specifier.clone(), v.version.clone())?; state .snapshots @@ -746,7 +808,7 @@ fn get_text(state: &mut State, args: Value) -> Result { .unwrap() .clone() } else { - let mut sources = state.server_state.sources.write().unwrap(); + let mut sources = state.state_snapshot.sources.write().unwrap(); sources.get_text(&specifier).unwrap() }; Ok(json!(text::slice(&content, v.start..v.end))) @@ -756,13 +818,13 @@ fn resolve(state: &mut State, args: Value) -> Result { let v: ResolveArgs = serde_json::from_value(args)?; let mut resolved = Vec::>::new(); let referrer = ModuleSpecifier::resolve_url(&v.base)?; - let mut sources = if let Ok(sources) = state.server_state.sources.write() { + let mut sources = if let Ok(sources) = state.state_snapshot.sources.write() { sources } else { return Err(custom_error("Deadlock", "deadlock locking sources")); }; - if let Some(doc_data) = state.server_state.doc_data.get(&referrer) { + if let Some(doc_data) = state.state_snapshot.doc_data.get(&referrer) { if let Some(dependencies) = &doc_data.dependencies { for specifier in &v.specifiers { if specifier.starts_with("asset:///") { @@ -782,7 +844,7 @@ fn resolve(state: &mut State, args: Value) -> Result { if let ResolvedImport::Resolved(resolved_specifier) = resolved_import { if state - .server_state + .state_snapshot .doc_data .contains_key(&resolved_specifier) || sources.contains(&resolved_specifier) @@ -837,7 +899,7 @@ fn respond(state: &mut State, args: Value) -> Result { fn script_names(state: &mut State, _args: Value) -> Result { let script_names: Vec<&ModuleSpecifier> = - state.server_state.doc_data.keys().collect(); + state.state_snapshot.doc_data.keys().collect(); Ok(json!(script_names)) } @@ -850,13 +912,13 @@ struct ScriptVersionArgs { fn script_version(state: &mut State, args: Value) -> Result { let v: ScriptVersionArgs = serde_json::from_value(args)?; let specifier = ModuleSpecifier::resolve_url(&v.specifier)?; - let maybe_doc_data = state.server_state.doc_data.get(&specifier); + let maybe_doc_data = state.state_snapshot.doc_data.get(&specifier); if let Some(doc_data) = maybe_doc_data { if let Some(version) = doc_data.version { return Ok(json!(version.to_string())); } } else { - let mut sources = state.server_state.sources.write().unwrap(); + let mut sources = state.state_snapshot.sources.write().unwrap(); if let Some(version) = sources.get_script_version(&specifier) { return Ok(json!(version)); } @@ -889,7 +951,7 @@ pub fn start(debug: bool) -> Result { { let op_state = runtime.op_state(); let mut op_state = op_state.borrow_mut(); - op_state.put(State::new(ServerStateSnapshot::default())); + op_state.put(State::new(StateSnapshot::default())); } runtime.register_op("op_dispose", op(dispose)); @@ -1071,14 +1133,14 @@ impl RequestMethod { /// Send a request into a runtime and return the JSON value of the response. pub fn request( runtime: &mut JsRuntime, - server_state: &ServerStateSnapshot, + state_snapshot: StateSnapshot, method: RequestMethod, ) -> Result { let id = { let op_state = runtime.op_state(); let mut op_state = op_state.borrow_mut(); let state = op_state.borrow_mut::(); - state.server_state = server_state.clone(); + state.state_snapshot = state_snapshot; state.last_id += 1; state.last_id }; @@ -1101,40 +1163,16 @@ pub fn request( } } -fn request_asset( - specifier: &ModuleSpecifier, - runtime: &mut JsRuntime, - server_state: &ServerStateSnapshot, -) -> Result, AnyError> { - let id = { - let op_state = runtime.op_state(); - let mut op_state = op_state.borrow_mut(); - let state = op_state.borrow_mut::(); - state.server_state = server_state.clone(); - state.last_id += 1; - state.last_id - }; - let request_params = RequestMethod::GetAsset(specifier.clone()).to_value(id); - let request_src = format!("globalThis.serverRequest({});", request_params); - runtime.execute("[native_code]", &request_src)?; - - let op_state = runtime.op_state(); - let mut op_state = op_state.borrow_mut(); - let state = op_state.borrow_mut::(); - - Ok(state.asset.clone()) -} - #[cfg(test)] mod tests { use super::super::memory_cache::MemoryCache; - use super::super::state::DocumentData; use super::*; + use crate::lsp::language_server::DocumentData; use std::collections::HashMap; use std::sync::Arc; use std::sync::RwLock; - fn mock_server_state(sources: Vec<(&str, &str, i32)>) -> ServerStateSnapshot { + fn mock_state_snapshot(sources: Vec<(&str, &str, i32)>) -> StateSnapshot { let mut doc_data = HashMap::new(); let mut file_cache = MemoryCache::default(); for (specifier, content, version) in sources { @@ -1147,10 +1185,8 @@ mod tests { file_cache.set_contents(specifier, Some(content.as_bytes().to_vec())); } let file_cache = Arc::new(RwLock::new(file_cache)); - ServerStateSnapshot { + StateSnapshot { assets: Default::default(), - config: Default::default(), - diagnostics: Default::default(), doc_data, file_cache, sources: Default::default(), @@ -1161,20 +1197,20 @@ mod tests { debug: bool, config: Value, sources: Vec<(&str, &str, i32)>, - ) -> (JsRuntime, ServerStateSnapshot) { - let server_state = mock_server_state(sources.clone()); + ) -> (JsRuntime, StateSnapshot) { + let state_snapshot = mock_state_snapshot(sources.clone()); let mut runtime = start(debug).expect("could not start server"); let ts_config = TsConfig::new(config); assert_eq!( request( &mut runtime, - &server_state, + state_snapshot.clone(), RequestMethod::Configure(ts_config) ) .expect("failed request"), json!(true) ); - (runtime, server_state) + (runtime, state_snapshot) } #[test] @@ -1207,7 +1243,7 @@ mod tests { #[test] fn test_project_reconfigure() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1224,7 +1260,7 @@ mod tests { })); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::Configure(ts_config), ); assert!(result.is_ok()); @@ -1234,7 +1270,7 @@ mod tests { #[test] fn test_get_semantic_diagnostics() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1247,7 +1283,7 @@ mod tests { .expect("could not resolve url"); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::GetSemanticDiagnostics(specifier), ); assert!(result.is_ok()); @@ -1276,7 +1312,7 @@ mod tests { #[test] fn test_module_resolution() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1300,7 +1336,7 @@ mod tests { .expect("could not resolve url"); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::GetSemanticDiagnostics(specifier), ); assert!(result.is_ok()); @@ -1310,7 +1346,7 @@ mod tests { #[test] fn test_bad_module_specifiers() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1330,7 +1366,7 @@ mod tests { .expect("could not resolve url"); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::GetSyntacticDiagnostics(specifier), ); assert!(result.is_ok()); @@ -1340,7 +1376,7 @@ mod tests { #[test] fn test_remote_modules() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1364,7 +1400,7 @@ mod tests { .expect("could not resolve url"); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::GetSyntacticDiagnostics(specifier), ); assert!(result.is_ok()); @@ -1374,7 +1410,7 @@ mod tests { #[test] fn test_partial_modules() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1401,7 +1437,7 @@ mod tests { .expect("could not resolve url"); let result = request( &mut runtime, - &server_state, + state_snapshot, RequestMethod::GetSyntacticDiagnostics(specifier), ); assert!(result.is_ok()); @@ -1428,7 +1464,7 @@ mod tests { #[test] fn test_request_asset() { - let (mut runtime, server_state) = setup( + let (mut runtime, state_snapshot) = setup( false, json!({ "target": "esnext", @@ -1440,9 +1476,14 @@ mod tests { ); let specifier = ModuleSpecifier::resolve_url("asset:///lib.esnext.d.ts") .expect("could not resolve url"); - let result = request_asset(&specifier, &mut runtime, &server_state); + let result = request( + &mut runtime, + state_snapshot, + RequestMethod::GetAsset(specifier), + ); assert!(result.is_ok()); - let response = result.unwrap(); + let response: Option = + serde_json::from_value(result.unwrap()).unwrap(); assert!(response.is_some()); } } diff --git a/cli/lsp/utils.rs b/cli/lsp/utils.rs index 0c3d5a635c9810..3bdd00875acdc1 100644 --- a/cli/lsp/utils.rs +++ b/cli/lsp/utils.rs @@ -1,71 +1,9 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use deno_core::error::custom_error; use deno_core::error::AnyError; -use deno_core::serde_json::Value; use deno_core::url::Position; use deno_core::url::Url; use deno_core::ModuleSpecifier; -use lsp_server::Notification; -use serde::de::DeserializeOwned; -use std::error::Error; -use std::fmt; - -// TODO(@kitsonk) support actually supporting cancellation requests from the -// client. - -pub struct Canceled { - _private: (), -} - -impl Canceled { - #[allow(unused)] - pub fn new() -> Self { - Self { _private: () } - } - - #[allow(unused)] - pub fn throw() -> ! { - std::panic::resume_unwind(Box::new(Canceled::new())) - } -} - -impl fmt::Display for Canceled { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "cancelled") - } -} - -impl fmt::Debug for Canceled { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Canceled") - } -} - -impl Error for Canceled {} - -pub fn from_json( - what: &'static str, - json: Value, -) -> Result { - let response = T::deserialize(&json).map_err(|err| { - custom_error( - "DeserializeFailed", - format!("Failed to deserialize {}: {}; {}", what, err, json), - ) - })?; - Ok(response) -} - -pub fn is_canceled(e: &(dyn Error + 'static)) -> bool { - e.downcast_ref::().is_some() -} - -pub fn notification_is( - notification: &Notification, -) -> bool { - notification.method == N::METHOD -} /// Normalizes a file name returned from the TypeScript compiler into a URI that /// should be sent by the language server to the client. diff --git a/cli/main.rs b/cli/main.rs index cd682498e859a9..55f1ac9ce0c17d 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -416,7 +416,7 @@ async fn install_command( } async fn language_server_command() -> Result<(), AnyError> { - lsp::start() + lsp::start().await } async fn lint_command( diff --git a/cli/tests/lsp_tests.rs b/cli/tests/lsp_tests.rs deleted file mode 100644 index 7de655ac80fe05..00000000000000 --- a/cli/tests/lsp_tests.rs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. - -///! -///! Integration test for the Deno Language Server (`deno lsp`) -///! -use std::fs; -use std::io::Read; -use std::io::Write; -use std::process::Stdio; - -struct LspIntegrationTest { - pub fixtures: Vec<&'static str>, -} - -impl LspIntegrationTest { - pub fn run(&self) -> (String, String) { - let root_path = test_util::root_path(); - let deno_exe = test_util::deno_exe_path(); - let tests_dir = root_path.join("cli/tests/lsp"); - println!("tests_dir: {:?} deno_exe: {:?}", tests_dir, deno_exe); - let mut command = test_util::deno_cmd(); - command - .arg("lsp") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()); - - let process = command.spawn().expect("failed to execute deno"); - - for fixture in &self.fixtures { - let mut stdin = process.stdin.as_ref().unwrap(); - let fixture_path = tests_dir.join(fixture); - let content = - fs::read_to_string(&fixture_path).expect("could not read fixture"); - let content_length = content.chars().count(); - write!( - stdin, - "Content-Length: {}\r\n\r\n{}", - content_length, content - ) - .unwrap(); - } - - let mut so = String::new(); - process.stdout.unwrap().read_to_string(&mut so).unwrap(); - - let mut se = String::new(); - process.stderr.unwrap().read_to_string(&mut se).unwrap(); - - (so, se) - } -} - -#[test] -fn test_lsp_startup_shutdown() { - let test = LspIntegrationTest { - fixtures: vec![ - "initialize_request.json", - "initialized_notification.json", - "shutdown_request.json", - "exit_notification.json", - ], - }; - let (response, out) = test.run(); - assert!(response.contains("deno-language-server")); - assert!(out.contains("Connected to \"test-harness\" 1.0.0")); -} - -#[test] -fn test_lsp_hover() { - // a straight forward integration tests starts up the lsp, opens a document - // which logs `Deno.args` to the console, and hovers over the `args` property - // to get the intellisense about it, which is a total end-to-end test that - // includes sending information in and out of the TypeScript compiler. - let test = LspIntegrationTest { - fixtures: vec![ - "initialize_request.json", - "initialized_notification.json", - "did_open_notification.json", - "hover_request.json", - "shutdown_request.json", - "exit_notification.json", - ], - }; - let (response, out) = test.run(); - assert!(response.contains("const Deno.args: string[]")); - assert!(out.contains("Connected to \"test-harness\" 1.0.0")); -} diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 0be0fdc2c7f3a9..9b08dee93c1fad 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -492,10 +492,7 @@ delete Object.prototype.__proto__; request.specifier, ts.ScriptTarget.ESNext, ); - return core.jsonOpSync( - "op_set_asset", - { text: sourceFile && sourceFile.text }, - ); + return respond(id, sourceFile && sourceFile.text); } case "getSemanticDiagnostics": { const diagnostics = languageService.getSemanticDiagnostics( From b1c2d219353edafa2bdc95ac2b4dbab5d4b7f459 Mon Sep 17 00:00:00 2001 From: Liam Perlaki Date: Mon, 21 Dec 2020 16:29:50 +0100 Subject: [PATCH 094/135] refactor(runtime): remove warp dependency (#8813) This commit replaces the "warp" web server in the "deno_runtime" crate with a "hyper" server and a "tokio-tungstenite" websocket implementation. --- Cargo.lock | 16 ++- runtime/Cargo.toml | 6 +- runtime/inspector.rs | 228 ++++++++++++++++++++++++++++--------------- 3 files changed, 164 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c760e19a852a16..367ec806e9e31f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -592,6 +592,7 @@ dependencies = [ "filetime", "fwdansi", "http", + "hyper", "indexmap", "lazy_static", "libc", @@ -612,7 +613,6 @@ dependencies = [ "tokio-rustls", "tokio-tungstenite", "uuid", - "warp", "webpki", "webpki-roots", "winapi 0.3.9", @@ -1146,6 +1146,12 @@ version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" +[[package]] +name = "httpdate" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" + [[package]] name = "humantime" version = "1.3.0" @@ -1157,9 +1163,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.13.7" +version = "0.13.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e68a8dd9716185d9e64ea473ea6ef63529252e3e27623295a0378a19665d5eb" +checksum = "f6ad767baac13b44d4529fcf58ba2cd0995e36e7b435bc5b039de6f47e880dbf" dependencies = [ "bytes 0.5.6", "futures-channel", @@ -1169,10 +1175,10 @@ dependencies = [ "http", "http-body", "httparse", + "httpdate", "itoa", - "pin-project 0.4.23", + "pin-project 1.0.2", "socket2", - "time", "tokio 0.2.22", "tower-service", "tracing", diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 1e5c9511fd4f34..c62bce9947215e 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -55,13 +55,9 @@ sys-info = "0.7.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } tokio-rustls = "0.14.1" -# Keep in-sync with warp. tokio-tungstenite = "0.11.0" uuid = { version = "0.8.1", features = ["v4"] } -# TODO(bartlomieju): remove dependency on warp, it's only used -# for a WebSocket server in inspector.rs -# Keep in-sync with tokio-tungestenite. -warp = { version = "0.2.5", features = ["tls"] } +hyper = "0.13.9" webpki = "0.21.3" webpki-roots = "=0.19.0" # Pinned to v0.19.0 to match 'reqwest'. diff --git a/runtime/inspector.rs b/runtime/inspector.rs index fc0e793d9d4375..58df05c54a847c 100644 --- a/runtime/inspector.rs +++ b/runtime/inspector.rs @@ -11,7 +11,7 @@ use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedReceiver; use deno_core::futures::channel::mpsc::UnboundedSender; use deno_core::futures::channel::oneshot; -use deno_core::futures::future::Future; +use deno_core::futures::future::{self, Future}; use deno_core::futures::pin_mut; use deno_core::futures::prelude::*; use deno_core::futures::select; @@ -23,8 +23,6 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::v8; -use std::cell::BorrowMutError; -use std::cell::RefCell; use std::collections::HashMap; use std::ffi::c_void; use std::mem::replace; @@ -40,9 +38,10 @@ use std::ptr::NonNull; use std::sync::Arc; use std::sync::Mutex; use std::thread; +use std::{cell::BorrowMutError, convert::Infallible}; +use std::{cell::RefCell, rc::Rc}; +use tokio_tungstenite::tungstenite; use uuid::Uuid; -use warp::filters::ws; -use warp::Filter; pub struct InspectorServer { pub host: SocketAddr, @@ -59,12 +58,12 @@ impl InspectorServer { let (shutdown_server_tx, shutdown_server_rx) = oneshot::channel(); let thread_handle = thread::spawn(move || { - crate::tokio_util::run_basic(server( - host, - register_inspector_rx, - shutdown_server_rx, - name, - )) + let mut rt = crate::tokio_util::create_basic_runtime(); + let local = tokio::task::LocalSet::new(); + local.block_on( + &mut rt, + server(host, register_inspector_rx, shutdown_server_rx, name), + ) }); Self { @@ -142,95 +141,172 @@ impl InspectorInfo { } } +// Needed so hyper can use non Send futures +#[derive(Clone)] +struct LocalExecutor; + +impl hyper::rt::Executor for LocalExecutor +where + Fut: Future + 'static, + Fut::Output: 'static, +{ + fn execute(&self, fut: Fut) { + tokio::task::spawn_local(fut); + } +} + +fn handle_ws_request( + req: http::Request, + inspector_map: Rc>>, +) -> http::Result> { + let (parts, body) = req.into_parts(); + let req = http::Request::from_parts(parts, ()); + + if let Some(new_websocket_tx) = req + .uri() + .path() + .strip_prefix("/ws/") + .and_then(|s| Uuid::parse_str(s).ok()) + .and_then(|uuid| { + inspector_map + .borrow() + .get(&uuid) + .map(|info| info.new_websocket_tx.clone()) + }) + { + let resp = tungstenite::handshake::server::create_response(&req) + .map(|resp| resp.map(|_| hyper::Body::empty())) + .or_else(|e| match e { + tungstenite::error::Error::HttpFormat(http_error) => Err(http_error), + _ => http::Response::builder() + .status(http::StatusCode::BAD_REQUEST) + .body("Not a valid Websocket Request".into()), + }); + tokio::task::spawn_local(async move { + let upgraded = body.on_upgrade().await.unwrap(); + let websocket = tokio_tungstenite::WebSocketStream::from_raw_socket( + upgraded, + tungstenite::protocol::Role::Server, + None, + ) + .await; + let (proxy, pump) = create_websocket_proxy(websocket); + + let _ = new_websocket_tx.unbounded_send(proxy); + pump.await; + }); + + resp + } else { + http::Response::builder() + .status(http::StatusCode::NOT_FOUND) + .body("No Valid inspector".into()) + } +} + +fn handle_json_request( + inspector_map: Rc>>, +) -> http::Result> { + let data = inspector_map + .borrow() + .values() + .map(|info| info.get_json_metadata()) + .collect::>(); + http::Response::builder() + .status(http::StatusCode::OK) + .header(http::header::CONTENT_TYPE, "application/json") + .body(serde_json::to_string(&data).unwrap().into()) +} + +fn handle_json_version_request( + version_response: Value, +) -> http::Result> { + http::Response::builder() + .status(http::StatusCode::OK) + .header(http::header::CONTENT_TYPE, "application/json") + .body(serde_json::to_string(&version_response).unwrap().into()) +} + async fn server( host: SocketAddr, register_inspector_rx: UnboundedReceiver, shutdown_server_rx: oneshot::Receiver<()>, name: String, ) { - // TODO: put the `inspector_map` in an `Rc>` instead. This is - // currently not possible because warp requires all filters to implement - // `Send`, which should not be necessary because we are using the - // single-threaded Tokio runtime. - let inspector_map = HashMap::::new(); - let inspector_map = Arc::new(Mutex::new(inspector_map)); - - let inspector_map_ = inspector_map.clone(); + let inspector_map_ = + Rc::new(RefCell::new(HashMap::::new())); + + let inspector_map = Rc::clone(&inspector_map_); let register_inspector_handler = register_inspector_rx .map(|info| { eprintln!( "Debugger listening on {}", info.get_websocket_debugger_url() ); - let mut g = inspector_map_.lock().unwrap(); - if g.insert(info.uuid, info).is_some() { + if inspector_map.borrow_mut().insert(info.uuid, info).is_some() { panic!("Inspector UUID already in map"); } }) .collect::<()>(); - let inspector_map_ = inspector_map_.clone(); + let inspector_map = Rc::clone(&inspector_map_); let deregister_inspector_handler = future::poll_fn(|cx| { - let mut g = inspector_map_.lock().unwrap(); - g.retain(|_, info| info.canary_rx.poll_unpin(cx) == Poll::Pending); + inspector_map + .borrow_mut() + .retain(|_, info| info.canary_rx.poll_unpin(cx) == Poll::Pending); Poll::::Pending }) .fuse(); - let inspector_map_ = inspector_map.clone(); - let websocket_route = warp::path("ws") - .and(warp::path::param()) - .and(warp::ws()) - .and_then(move |uuid: String, ws: warp::ws::Ws| { - future::ready( - Uuid::parse_str(&uuid) - .ok() - .and_then(|uuid| { - let g = inspector_map_.lock().unwrap(); - g.get(&uuid).map(|info| info.new_websocket_tx.clone()).map( - |new_websocket_tx| { - ws.on_upgrade(move |websocket| async move { - let (proxy, pump) = create_websocket_proxy(websocket); - let _ = new_websocket_tx.unbounded_send(proxy); - pump.await; - }) - }, - ) - }) - .ok_or_else(warp::reject::not_found), - ) - }); - let json_version_response = json!({ "Browser": name, "Protocol-Version": "1.3", "V8-Version": deno_core::v8_version(), }); - let json_version_route = warp::path!("json" / "version") - .map(move || warp::reply::json(&json_version_response)); - - let inspector_map_ = inspector_map.clone(); - let json_list_route = warp::path("json").map(move || { - let g = inspector_map_.lock().unwrap(); - let json_values = g - .values() - .map(|info| info.get_json_metadata()) - .collect::>(); - warp::reply::json(&json!(json_values)) + + let make_svc = hyper::service::make_service_fn(|_| { + let inspector_map = Rc::clone(&inspector_map_); + let json_version_response = json_version_response.clone(); + + future::ok::<_, Infallible>(hyper::service::service_fn( + move |req: http::Request| { + future::ready({ + match (req.method(), req.uri().path()) { + (&http::Method::GET, path) if path.starts_with("/ws/") => { + handle_ws_request(req, inspector_map.clone()) + } + (&http::Method::GET, "/json") => { + handle_json_request(inspector_map.clone()) + } + (&http::Method::GET, "/json/version") => { + handle_json_version_request(json_version_response.clone()) + } + _ => http::Response::builder() + .status(http::StatusCode::NOT_FOUND) + .body("Not Found".into()), + } + }) + }, + )) }); - let server_routes = - websocket_route.or(json_version_route).or(json_list_route); - let server_handler = warp::serve(server_routes) - .try_bind_with_graceful_shutdown(host, async { - shutdown_server_rx.await.ok(); - }) - .map(|(_, fut)| fut) - .unwrap_or_else(|err| { - eprintln!("Cannot start inspector server: {}.", err); + // Create the server manually so it can use the Local Executor + let server_handler = hyper::server::Builder::new( + hyper::server::conn::AddrIncoming::bind(&host).unwrap_or_else(|e| { + eprintln!("Cannot start inspector server: {}.", e); process::exit(1); - }) - .fuse(); + }), + hyper::server::conn::Http::new().with_executor(LocalExecutor), + ) + .serve(make_svc) + .with_graceful_shutdown(async { + shutdown_server_rx.await.ok(); + }) + .unwrap_or_else(|err| { + eprintln!("Cannot start inspector server: {}.", err); + process::exit(1); + }) + .fuse(); pin_mut!(register_inspector_handler); pin_mut!(deregister_inspector_handler); @@ -243,9 +319,9 @@ async fn server( } } -type WebSocketProxySender = UnboundedSender; +type WebSocketProxySender = UnboundedSender; type WebSocketProxyReceiver = - UnboundedReceiver>; + UnboundedReceiver>; /// Encapsulates an UnboundedSender/UnboundedReceiver pair that together form /// a duplex channel for sending/receiving websocket messages. @@ -273,7 +349,7 @@ impl WebSocketProxy { /// be used to send/receive messages on the websocket, and the second element /// is a future that does the forwarding. fn create_websocket_proxy( - websocket: ws::WebSocket, + websocket: tokio_tungstenite::WebSocketStream, ) -> (WebSocketProxy, impl Future + Send) { // The 'outbound' channel carries messages sent to the websocket. let (outbound_tx, outbound_rx) = mpsc::unbounded(); @@ -759,8 +835,8 @@ impl DenoInspectorSession { let result = websocket_rx .map_ok(move |msg| { - let msg = msg.as_bytes(); - let msg = v8::inspector::StringView::from(msg); + let msg = msg.into_data(); + let msg = v8::inspector::StringView::from(msg.as_slice()); unsafe { &mut *self_ptr }.dispatch_protocol_message(msg); }) .try_collect::<()>() @@ -776,7 +852,7 @@ impl DenoInspectorSession { fn send_to_websocket(&self, msg: v8::UniquePtr) { let msg = msg.unwrap().string().to_string(); - let msg = ws::Message::text(msg); + let msg = tungstenite::Message::text(msg); let _ = self.websocket_tx.unbounded_send(msg); } From 4033b390367e025e73c851a6ce7021631b27e3c0 Mon Sep 17 00:00:00 2001 From: Liam Murphy <43807659+Liamolucko@users.noreply.github.com> Date: Tue, 22 Dec 2020 02:30:59 +1100 Subject: [PATCH 095/135] refactor: rewrite chown_test.ts not to depend on python (#8843) This commit rewrites "chown_test.ts" to use the GNU "id" command instead of python. This won't work on Windows, but these tests aren't currently run on Windows anyway. --- cli/tests/unit/chown_test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/chown_test.ts b/cli/tests/unit/chown_test.ts index acdb736cb9242f..d85f5d3b2c9959 100644 --- a/cli/tests/unit/chown_test.ts +++ b/cli/tests/unit/chown_test.ts @@ -12,11 +12,11 @@ async function getUidAndGid(): Promise<{ uid: number; gid: number }> { // get the user ID and group ID of the current process const uidProc = Deno.run({ stdout: "piped", - cmd: ["python", "-c", "import os; print(os.getuid())"], + cmd: ["id", "-u"], }); const gidProc = Deno.run({ stdout: "piped", - cmd: ["python", "-c", "import os; print(os.getgid())"], + cmd: ["id", "-g"], }); assertEquals((await uidProc.status()).code, 0); From 1e144ec022df4b824a6a1d8061e1420e57baccea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 22 Dec 2020 00:57:03 +0100 Subject: [PATCH 096/135] upgrade: deno_doc, deno_lint, dprint, swc (#8849) --- Cargo.lock | 52 +++++++++++++++++++++++++------------------------- cli/Cargo.toml | 10 +++++----- cli/ast.rs | 1 + 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 367ec806e9e31f..3f6dd2d259303c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -536,9 +536,9 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a347f4b721c3f4a3459f3510826b7c54855a8309857e57a63ec10b87c5dcbe9" +checksum = "68b5a8d931f83afd08af0ed58a3a95d23d6ed5122934db9fa0625bc15a8bed02" dependencies = [ "futures", "lazy_static", @@ -561,9 +561,9 @@ dependencies = [ [[package]] name = "deno_lint" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5945fdac793b4f374e2368e5009131cb4c2beca017911ac3ac0de1a87c85bbdb" +checksum = "e5fb9a8a07a58414172fdf46eaa88551f238575f52a2fd39bebbfa00626be608" dependencies = [ "anyhow", "derive_more", @@ -698,9 +698,9 @@ dependencies = [ [[package]] name = "dprint-plugin-typescript" -version = "0.35.1" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6f3888ce8b87a27670cbe4669dd3ae769c0a95a8b50600b76afac3e022da43" +checksum = "8344f63ab5f12f9f8cffa78c90e0aad001361cdddfa12bb733e2d5f0424c36d6" dependencies = [ "dprint-core", "serde", @@ -1317,9 +1317,9 @@ dependencies = [ [[package]] name = "jsdoc" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e220ced55f8e93f0a3fb6b6f7c5829235299d818dac283535760a4182b5f0a" +checksum = "8dd6d090c0140363cb94f672def08187d731ecdf28b84a1fc8e55386a2ca2c77" dependencies = [ "nom", "serde", @@ -2622,9 +2622,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.17.6" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b09f66f03e88c271bdd4d1f7c92b86ba2521597e1f0f2245c9f0cba3e857127" +checksum = "65fac08904af807442e8f08c50f46a95d346a09db69a94f83670f5ae7e5945ec" dependencies = [ "anyhow", "crc", @@ -2669,9 +2669,9 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.35.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39d14f5b7769eb53a98276475ca72d188f5fa0fbf1819d3e9241b9e295ae0542" +checksum = "b65d3a2852e7c496d3e1dfc38719a31e90ae03a58ce891faf48ccb9c1ddff7ee" dependencies = [ "enum_kind", "is-macro", @@ -2684,9 +2684,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.41.4" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50c98de260a5f33084211ce488c64acb052fc29851d917ee270fae498960128" +checksum = "19ae0f17cc128399269387d00e9aa6731f7a5397d6e86fd4262a941dc219b5aa" dependencies = [ "bitflags", "num-bigint", @@ -2713,9 +2713,9 @@ dependencies = [ [[package]] name = "swc_ecma_dep_graph" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "195f056055fd028a3824f5b74011252cead88a1915213ab574f50c6a7be33a1c" +checksum = "da3afd18873ffd61f995b182be524d20cfd051014ac8a4641c7b10ec4b1621bf" dependencies = [ "swc_atoms", "swc_common", @@ -2725,9 +2725,9 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.43.5" +version = "0.44.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffaa010559302b877261b5d6d44c3386fe525f036c33711da490398a2fbc8798" +checksum = "e2ec412ebca8caa62ae98f90d5829d3f85d4db4b31c7339cdae72bbc063c42ea" dependencies = [ "either", "enum_kind", @@ -2759,9 +2759,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms" -version = "0.30.6" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fab9f78e6850e956c2f40b14fd45ad5e22c7527fce4e64dae0b014b17d286ebe" +checksum = "fc476391a584d3a4f94038f8f09d37686727ff1706ac1247de6de28d677bda61" dependencies = [ "Inflector", "arrayvec", @@ -2807,9 +2807,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64fd3d84ee287900f37fd06a1856f57cd0d65b37bb0557abde2c91d8a9b28155" +checksum = "a37310926374783577b62865546ddccbe6c5f1700586151a8a6d953ef45b5bcb" dependencies = [ "once_cell", "scoped-tls", @@ -2822,9 +2822,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee22eeb8c6987f9e740e1867292bd45785408f62ded95e1dc57b9c6c26e85a96" +checksum = "88671e165a87ae2c7d9194b1715c9502e0c1c82eb559951e95ef76586a195cd6" dependencies = [ "num-bigint", "swc_atoms", @@ -2835,9 +2835,9 @@ dependencies = [ [[package]] name = "swc_ecmascript" -version = "0.15.1" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "498cdc72a4ef3d032c98552ea617829ff2c5f5583770369fb0e85edb70e6297a" +checksum = "22a63164105d73e729bc426f6982571d800f3e14770c848a7ee0f6e8a8d821a8" dependencies = [ "swc_ecma_ast", "swc_ecma_codegen", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 65f455215ffa18..16dc7d9d5772fa 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -32,8 +32,8 @@ winapi = "0.3.9" [dependencies] deno_core = { path = "../core", version = "0.73.0" } -deno_doc = "0.1.18" -deno_lint = "0.2.13" +deno_doc = "0.1.19" +deno_lint = "0.2.14" deno_runtime = { path = "../runtime", version = "0.3.0" } atty = "0.2.14" @@ -41,7 +41,7 @@ base64 = "0.12.3" byteorder = "1.3.4" clap = "2.33.3" dissimilar = "1.0.2" -dprint-plugin-typescript = "0.35.1" +dprint-plugin-typescript = "0.36.0" encoding_rs = "0.8.24" env_logger = "0.7.1" filetime = "0.2.12" @@ -62,9 +62,9 @@ semver-parser = "0.9.0" serde = { version = "1.0.116", features = ["derive"] } shell-escape = "0.1.5" sourcemap = "6.0.1" -swc_bundler = "0.17.6" +swc_bundler = "0.18.0" swc_common = { version = "0.10.7", features = ["sourcemap"] } -swc_ecmascript = { version = "0.15.1", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } +swc_ecmascript = { version = "0.16.0", features = ["codegen", "dep_graph", "parser", "react", "transforms", "visit"] } tempfile = "3.1.0" termcolor = "1.1.0" tokio = { version = "0.2.22", features = ["full"] } diff --git a/cli/ast.rs b/cli/ast.rs index 255155e7b36a52..d4837bb6f0d0e2 100644 --- a/cli/ast.rs +++ b/cli/ast.rs @@ -556,6 +556,7 @@ impl swc_bundler::Hook for BundleHook { value: Box::new(ast::Expr::Lit(ast::Lit::Str(ast::Str { span, value: value.into(), + kind: ast::StrKind::Synthesized, has_escape: false, }))), }, From b091c6c8c9cf33cc27b25560feaeea1eb23dd345 Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Tue, 22 Dec 2020 16:42:32 +1100 Subject: [PATCH 097/135] fix(lsp): respect enable flag for requests (#8850) --- cli/lsp/language_server.rs | 38 ++++++++++++++++++- cli/tests/lsp/initialize_request.json | 6 +++ .../lsp/initialize_request_disabled.json | 29 ++++++++++++++ 3 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 cli/tests/lsp/initialize_request_disabled.json diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index c1e3ac8d5e3d4b..0a9d81bf396021 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -84,6 +84,11 @@ impl LanguageServer { } } + fn enabled(&self) -> bool { + let config = self.config.read().unwrap(); + config.settings.enable + } + pub async fn update_import_map(&self) -> Result<(), AnyError> { let (maybe_import_map, maybe_root_uri) = { let config = self.config.read().unwrap(); @@ -217,7 +222,7 @@ impl LanguageServer { } else { vec![] }; - if settings.enable { + if self.enabled() { diagnostics.extend( diagnostics_collection .diagnostics_for(file_id, DiagnosticSource::TypeScript) @@ -570,6 +575,9 @@ impl lspower::LanguageServer for LanguageServer { } async fn hover(&self, params: HoverParams) -> LSPResult> { + if !self.enabled() { + return Ok(None); + } let specifier = utils::normalize_url( params.text_document_position_params.text_document.uri, ); @@ -598,6 +606,9 @@ impl lspower::LanguageServer for LanguageServer { &self, params: DocumentHighlightParams, ) -> LSPResult>> { + if !self.enabled() { + return Ok(None); + } let specifier = utils::normalize_url( params.text_document_position_params.text_document.uri, ); @@ -635,6 +646,9 @@ impl lspower::LanguageServer for LanguageServer { &self, params: ReferenceParams, ) -> LSPResult>> { + if !self.enabled() { + return Ok(None); + } let specifier = utils::normalize_url(params.text_document_position.text_document.uri); // TODO(lucacasonato): handle error correctly @@ -673,6 +687,9 @@ impl lspower::LanguageServer for LanguageServer { &self, params: GotoDefinitionParams, ) -> LSPResult> { + if !self.enabled() { + return Ok(None); + } let specifier = utils::normalize_url( params.text_document_position_params.text_document.uri, ); @@ -706,6 +723,9 @@ impl lspower::LanguageServer for LanguageServer { &self, params: CompletionParams, ) -> LSPResult> { + if !self.enabled() { + return Ok(None); + } let specifier = utils::normalize_url(params.text_document_position.text_document.uri); // TODO(lucacasonato): handle error correctly @@ -978,4 +998,20 @@ mod tests { ]); harness.run().await; } + + #[tokio::test] + async fn test_hover_disabled() { + let mut harness = LspTestHarness::new(vec![ + ("initialize_request_disabled.json", LspResponse::RequestAny), + ("initialized_notification.json", LspResponse::None), + ("did_open_notification.json", LspResponse::None), + ("hover_request.json", LspResponse::Request(2, json!(null))), + ( + "shutdown_request.json", + LspResponse::Request(3, json!(null)), + ), + ("exit_notification.json", LspResponse::None), + ]); + harness.run().await; + } } diff --git a/cli/tests/lsp/initialize_request.json b/cli/tests/lsp/initialize_request.json index 960420bfd376ec..722a3c783f2394 100644 --- a/cli/tests/lsp/initialize_request.json +++ b/cli/tests/lsp/initialize_request.json @@ -9,6 +9,12 @@ "version": "1.0.0" }, "rootUri": null, + "initializationOptions": { + "enable": true, + "lint": true, + "importMap": null, + "unstable": false + }, "capabilities": { "textDocument": { "synchronization": { diff --git a/cli/tests/lsp/initialize_request_disabled.json b/cli/tests/lsp/initialize_request_disabled.json new file mode 100644 index 00000000000000..f763375f88d8e5 --- /dev/null +++ b/cli/tests/lsp/initialize_request_disabled.json @@ -0,0 +1,29 @@ +{ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": { + "processId": 0, + "clientInfo": { + "name": "test-harness", + "version": "1.0.0" + }, + "rootUri": null, + "initializationOptions": { + "enable": false, + "lint": true, + "importMap": null, + "unstable": false + }, + "capabilities": { + "textDocument": { + "synchronization": { + "dynamicRegistration": true, + "willSave": true, + "willSaveWaitUntil": true, + "didSave": true + } + } + } + } +} From 097c3379ba8a5dce5d9a73771693205d8178792d Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Tue, 22 Dec 2020 21:21:18 +1100 Subject: [PATCH 098/135] feat(lsp): support the unstable setting (#8851) --- cli/lsp/language_server.rs | 260 ++++++++++++------ .../lsp/did_open_notification_unstable.json | 12 + .../lsp/initialize_request_unstable.json | 30 ++ 3 files changed, 221 insertions(+), 81 deletions(-) create mode 100644 cli/tests/lsp/did_open_notification_unstable.json create mode 100644 cli/tests/lsp/initialize_request_unstable.json diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 0a9d81bf396021..7834cab7fd1674 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -89,49 +89,34 @@ impl LanguageServer { config.settings.enable } - pub async fn update_import_map(&self) -> Result<(), AnyError> { - let (maybe_import_map, maybe_root_uri) = { - let config = self.config.read().unwrap(); - (config.settings.import_map.clone(), config.root_uri.clone()) - }; - if let Some(import_map_str) = &maybe_import_map { - info!("update import map"); - let import_map_url = if let Ok(url) = Url::from_file_path(import_map_str) + pub async fn get_line_index( + &self, + specifier: ModuleSpecifier, + ) -> Result, AnyError> { + let line_index = if specifier.as_url().scheme() == "asset" { + let state_snapshot = self.snapshot(); + if let Some(source) = + tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await? { - Ok(url) - } else if let Some(root_uri) = &maybe_root_uri { - let root_path = root_uri - .to_file_path() - .map_err(|_| anyhow!("Bad root_uri: {}", root_uri))?; - let import_map_path = root_path.join(import_map_str); - Url::from_file_path(import_map_path).map_err(|_| { - anyhow!("Bad file path for import map: {:?}", import_map_str) - }) + text::index_lines(&source) } else { - Err(anyhow!( - "The path to the import map (\"{}\") is not resolvable.", - import_map_str - )) - }?; - let import_map_path = import_map_url - .to_file_path() - .map_err(|_| anyhow!("Bad file path."))?; - let import_map_json = - fs::read_to_string(import_map_path).await.map_err(|err| { - anyhow!( - "Failed to load the import map at: {}. [{}]", - import_map_url, - err - ) - })?; - let import_map = - ImportMap::from_json(&import_map_url.to_string(), &import_map_json)?; - *self.maybe_import_map_uri.write().unwrap() = Some(import_map_url); - *self.maybe_import_map.write().unwrap() = Some(import_map); + return Err(anyhow!("asset source missing: {}", specifier)); + } } else { - *self.maybe_import_map.write().unwrap() = None; - } - Ok(()) + let file_cache = self.file_cache.read().unwrap(); + if let Some(file_id) = file_cache.lookup(&specifier) { + let file_text = file_cache.get_contents(file_id)?; + text::index_lines(&file_text) + } else { + let mut sources = self.sources.write().unwrap(); + if let Some(line_index) = sources.get_line_index(&specifier) { + line_index + } else { + return Err(anyhow!("source for specifier not found: {}", specifier)); + } + } + }; + Ok(line_index) } async fn prepare_diagnostics(&self) -> Result<(), AnyError> { @@ -260,34 +245,76 @@ impl LanguageServer { } } - pub async fn get_line_index( - &self, - specifier: ModuleSpecifier, - ) -> Result, AnyError> { - let line_index = if specifier.as_url().scheme() == "asset" { - let state_snapshot = self.snapshot(); - if let Some(source) = - tsc::get_asset(&specifier, &self.ts_server, &state_snapshot).await? + pub async fn update_import_map(&self) -> Result<(), AnyError> { + let (maybe_import_map, maybe_root_uri) = { + let config = self.config.read().unwrap(); + (config.settings.import_map.clone(), config.root_uri.clone()) + }; + if let Some(import_map_str) = &maybe_import_map { + info!("update import map"); + let import_map_url = if let Ok(url) = Url::from_file_path(import_map_str) { - text::index_lines(&source) + Ok(url) + } else if let Some(root_uri) = &maybe_root_uri { + let root_path = root_uri + .to_file_path() + .map_err(|_| anyhow!("Bad root_uri: {}", root_uri))?; + let import_map_path = root_path.join(import_map_str); + Url::from_file_path(import_map_path).map_err(|_| { + anyhow!("Bad file path for import map: {:?}", import_map_str) + }) } else { - return Err(anyhow!("asset source missing: {}", specifier)); - } + Err(anyhow!( + "The path to the import map (\"{}\") is not resolvable.", + import_map_str + )) + }?; + let import_map_path = import_map_url + .to_file_path() + .map_err(|_| anyhow!("Bad file path."))?; + let import_map_json = + fs::read_to_string(import_map_path).await.map_err(|err| { + anyhow!( + "Failed to load the import map at: {}. [{}]", + import_map_url, + err + ) + })?; + let import_map = + ImportMap::from_json(&import_map_url.to_string(), &import_map_json)?; + *self.maybe_import_map_uri.write().unwrap() = Some(import_map_url); + *self.maybe_import_map.write().unwrap() = Some(import_map); } else { - let file_cache = self.file_cache.read().unwrap(); - if let Some(file_id) = file_cache.lookup(&specifier) { - let file_text = file_cache.get_contents(file_id)?; - text::index_lines(&file_text) - } else { - let mut sources = self.sources.write().unwrap(); - if let Some(line_index) = sources.get_line_index(&specifier) { - line_index - } else { - return Err(anyhow!("source for specifier not found: {}", specifier)); - } + *self.maybe_import_map.write().unwrap() = None; + } + Ok(()) + } + + async fn update_tsconfig(&self) -> Result<(), AnyError> { + let mut tsconfig = TsConfig::new(json!({ + "allowJs": true, + "experimentalDecorators": true, + "isolatedModules": true, + "lib": ["deno.ns", "deno.window"], + "module": "esnext", + "noEmit": true, + "strict": true, + "target": "esnext", + })); + { + let config = self.config.read().unwrap(); + if config.settings.unstable { + let unstable_libs = json!({ + "lib": ["deno.ns", "deno.window", "deno.unstable"] + }); + tsconfig.merge(&unstable_libs); } - }; - Ok(line_index) + } + self + .ts_server + .request(self.snapshot(), tsc::RequestMethod::Configure(tsconfig)) + .await?; + Ok(()) } } @@ -331,23 +358,9 @@ impl lspower::LanguageServer for LanguageServer { config.update_capabilities(¶ms.capabilities); } - // TODO(@kitsonk) need to make this configurable, respect unstable - let ts_config = TsConfig::new(json!({ - "allowJs": true, - "experimentalDecorators": true, - "isolatedModules": true, - "lib": ["deno.ns", "deno.window"], - "module": "esnext", - "noEmit": true, - "strict": true, - "target": "esnext", - })); - // TODO(lucacasonato): handle error correctly - self - .ts_server - .request(self.snapshot(), tsc::RequestMethod::Configure(ts_config)) - .await - .unwrap(); + if let Err(err) = self.update_tsconfig().await { + warn!("Updating tsconfig has errored: {}", err); + } Ok(InitializeResult { capabilities, @@ -502,6 +515,12 @@ impl lspower::LanguageServer for LanguageServer { .show_message(MessageType::Warning, err.to_string()) .await; } + if let Err(err) = self.update_tsconfig().await { + self + .client + .show_message(MessageType::Warning, err.to_string()) + .await; + } } _ => error!("received empty extension settings from the client"), } @@ -1014,4 +1033,83 @@ mod tests { ]); harness.run().await; } + + #[tokio::test] + async fn test_hover_unstable_disabled() { + let mut harness = LspTestHarness::new(vec![ + ("initialize_request.json", LspResponse::RequestAny), + ("initialized_notification.json", LspResponse::None), + ("did_open_notification_unstable.json", LspResponse::None), + ( + "hover_request.json", + LspResponse::Request( + 2, + json!({ + "contents": [ + { + "language": "typescript", + "value": "any" + } + ], + "range": { + "start": { + "line": 0, + "character": 17 + }, + "end": { + "line": 0, + "character": 28 + } + } + }), + ), + ), + ( + "shutdown_request.json", + LspResponse::Request(3, json!(null)), + ), + ("exit_notification.json", LspResponse::None), + ]); + harness.run().await; + } + + #[tokio::test] + async fn test_hover_unstable_enabled() { + let mut harness = LspTestHarness::new(vec![ + ("initialize_request_unstable.json", LspResponse::RequestAny), + ("initialized_notification.json", LspResponse::None), + ("did_open_notification_unstable.json", LspResponse::None), + ( + "hover_request.json", + LspResponse::Request( + 2, + json!({ + "contents": [ + { + "language": "typescript", + "value": "const Deno.permissions: Deno.Permissions" + }, + "**UNSTABLE**: Under consideration to move to `navigator.permissions` to\nmatch web API. It could look like `navigator.permissions.query({ name: Deno.symbols.read })`." + ], + "range": { + "start": { + "line": 0, + "character": 17 + }, + "end": { + "line": 0, + "character": 28 + } + } + }), + ), + ), + ( + "shutdown_request.json", + LspResponse::Request(3, json!(null)), + ), + ("exit_notification.json", LspResponse::None), + ]); + harness.run().await; + } } diff --git a/cli/tests/lsp/did_open_notification_unstable.json b/cli/tests/lsp/did_open_notification_unstable.json new file mode 100644 index 00000000000000..583eafdefb5ae3 --- /dev/null +++ b/cli/tests/lsp/did_open_notification_unstable.json @@ -0,0 +1,12 @@ +{ + "jsonrpc": "2.0", + "method": "textDocument/didOpen", + "params": { + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": "console.log(Deno.permissions);\n" + } + } +} diff --git a/cli/tests/lsp/initialize_request_unstable.json b/cli/tests/lsp/initialize_request_unstable.json new file mode 100644 index 00000000000000..8c086c838b7b9f --- /dev/null +++ b/cli/tests/lsp/initialize_request_unstable.json @@ -0,0 +1,30 @@ +{ + "jsonrpc": "2.0", + "id": 1, + "method": "initialize", + "params": { + "processId": 0, + "clientInfo": { + "name": "test-harness", + "version": "1.0.0" + }, + "rootUri": null, + "initializationOptions": { + "enable": true, + "lint": true, + "unstable": true, + "config": null, + "importMap": null + }, + "capabilities": { + "textDocument": { + "synchronization": { + "dynamicRegistration": true, + "willSave": true, + "willSaveWaitUntil": true, + "didSave": true + } + } + } + } +} From ddda669a02fa394627dda2ac3d7ea0ed8830b920 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Tue, 22 Dec 2020 14:14:23 +0100 Subject: [PATCH 099/135] fix: implement ReadableStream fetch body handling (#8855) --- cli/tests/unit/fetch_test.ts | 40 ++++++++++++++++++++++++++++++++++++ op_crates/fetch/26_fetch.js | 8 ++++++-- 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index 6f90a1847cc24d..359a24e9529990 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -1013,3 +1013,43 @@ MNf4EgWfK+tZMnuqfpfO9740KzfcVoMNo4QJD4yn5YxroUOO/Azi client.close(); }, ); + +unitTest( + { + perms: { net: true }, + }, + async function fetchPostBodyReadableStream(): Promise { + const addr = "127.0.0.1:4502"; + const buf = bufferServer(addr); + const stream = new TransformStream(); + const writer = stream.writable.getWriter(); + await writer.write(new TextEncoder().encode("hello ")); + await writer.write(new TextEncoder().encode("world")); + await writer.close(); + const response = await fetch(`http://${addr}/blah`, { + method: "POST", + headers: [ + ["Hello", "World"], + ["Foo", "Bar"], + ], + body: stream.readable, + }); + await response.arrayBuffer(); + assertEquals(response.status, 404); + assertEquals(response.headers.get("Content-Length"), "2"); + + const actual = new TextDecoder().decode(buf.bytes()); + const expected = [ + "POST /blah HTTP/1.1\r\n", + "hello: World\r\n", + "foo: Bar\r\n", + "accept: */*\r\n", + `user-agent: Deno/${Deno.version.deno}\r\n`, + "accept-encoding: gzip, br\r\n", + `host: ${addr}\r\n`, + `content-length: 11\r\n\r\n`, + "hello world", + ].join(""); + assertEquals(actual, expected); + }, +); diff --git a/op_crates/fetch/26_fetch.js b/op_crates/fetch/26_fetch.js index 95ee9681240fe6..0835e12a154b24 100644 --- a/op_crates/fetch/26_fetch.js +++ b/op_crates/fetch/26_fetch.js @@ -1246,8 +1246,12 @@ body = multipartBuilder.getBody(); contentType = multipartBuilder.getContentType(); } else { - // TODO: ReadableStream - throw new Error("Not implemented"); + // TODO(lucacasonato): do this in a streaming fashion once we support it + const buf = new Buffer(); + for await (const chunk of init.body) { + buf.write(chunk); + } + body = buf.bytes(); } if (contentType && !headers.has("content-type")) { headers.set("content-type", contentType); From 6ce310fa2729ec8ab81a5db6410d024b3f2585c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 22 Dec 2020 14:50:13 +0100 Subject: [PATCH 100/135] chore: release crates (#8854) --- Cargo.lock | 10 +++++----- cli/Cargo.toml | 10 +++++----- core/Cargo.toml | 2 +- op_crates/crypto/Cargo.toml | 4 ++-- op_crates/fetch/Cargo.toml | 4 ++-- op_crates/web/Cargo.toml | 4 ++-- runtime/Cargo.toml | 18 +++++++++--------- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3f6dd2d259303c..ed4c95b2e47fc6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -509,7 +509,7 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.73.0" +version = "0.74.0" dependencies = [ "anyhow", "futures", @@ -528,7 +528,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.7.0" +version = "0.8.0" dependencies = [ "deno_core", "rand 0.7.3", @@ -552,7 +552,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.16.0" +version = "0.17.0" dependencies = [ "deno_core", "reqwest", @@ -579,7 +579,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.3.0" +version = "0.4.0" dependencies = [ "atty", "deno_core", @@ -621,7 +621,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.24.0" +version = "0.25.0" dependencies = [ "deno_core", "futures", diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 16dc7d9d5772fa..a8decf67861e69 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -20,9 +20,9 @@ harness = false path = "./bench/main.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.73.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } -deno_web = { path = "../op_crates/web", version = "0.24.0" } +deno_core = { path = "../core", version = "0.74.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.17.0" } +deno_web = { path = "../op_crates/web", version = "0.25.0" } regex = "1.3.9" serde = { version = "1.0.116", features = ["derive"] } @@ -31,10 +31,10 @@ winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.73.0" } +deno_core = { path = "../core", version = "0.74.0" } deno_doc = "0.1.19" deno_lint = "0.2.14" -deno_runtime = { path = "../runtime", version = "0.3.0" } +deno_runtime = { path = "../runtime", version = "0.4.0" } atty = "0.2.14" base64 = "0.12.3" diff --git a/core/Cargo.toml b/core/Cargo.toml index f9ba05192da526..4e81f935ce869b 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,7 +1,7 @@ # Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. [package] name = "deno_core" -version = "0.73.0" +version = "0.74.0" edition = "2018" description = "A secure JavaScript/TypeScript runtime built with V8, Rust, and Tokio" authors = ["the Deno authors"] diff --git a/op_crates/crypto/Cargo.toml b/op_crates/crypto/Cargo.toml index d94913ca92b74b..a18b3a566cfef3 100644 --- a/op_crates/crypto/Cargo.toml +++ b/op_crates/crypto/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_crypto" -version = "0.7.0" +version = "0.8.0" edition = "2018" description = "Collection of WebCrypto APIs" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.73.0", path = "../../core" } +deno_core = { version = "0.74.0", path = "../../core" } rand = "0.7.3" diff --git a/op_crates/fetch/Cargo.toml b/op_crates/fetch/Cargo.toml index 778513ed5acaeb..ecef2f5fc36116 100644 --- a/op_crates/fetch/Cargo.toml +++ b/op_crates/fetch/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_fetch" -version = "0.16.0" +version = "0.17.0" edition = "2018" description = "provides fetch Web API to deno_core" authors = ["the Deno authors"] @@ -14,6 +14,6 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.73.0", path = "../../core" } +deno_core = { version = "0.74.0", path = "../../core" } reqwest = { version = "0.10.8", default-features = false, features = ["rustls-tls", "stream", "gzip", "brotli"] } serde = { version = "1.0.116", features = ["derive"] } diff --git a/op_crates/web/Cargo.toml b/op_crates/web/Cargo.toml index b8f21cbc989193..f12d7d0ffc8497 100644 --- a/op_crates/web/Cargo.toml +++ b/op_crates/web/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_web" -version = "0.24.0" +version = "0.25.0" edition = "2018" description = "Collection of Web APIs" authors = ["the Deno authors"] @@ -14,7 +14,7 @@ repository = "https://github.com/denoland/deno" path = "lib.rs" [dependencies] -deno_core = { version = "0.73.0", path = "../../core" } +deno_core = { version = "0.74.0", path = "../../core" } idna = "0.2.0" serde = { version = "1.0.116", features = ["derive"] } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index c62bce9947215e..6fe84c477c17fb 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_runtime" -version = "0.3.0" +version = "0.4.0" license = "MIT" authors = ["the Deno authors"] edition = "2018" @@ -18,20 +18,20 @@ name = "hello_runtime" path = "examples/hello_runtime.rs" [build-dependencies] -deno_core = { path = "../core", version = "0.73.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.7.0" } -deno_web = { path = "../op_crates/web", version = "0.24.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } +deno_core = { path = "../core", version = "0.74.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.8.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.17.0" } +deno_web = { path = "../op_crates/web", version = "0.25.0" } [target.'cfg(windows)'.build-dependencies] winres = "0.1.11" winapi = "0.3.9" [dependencies] -deno_core = { path = "../core", version = "0.73.0" } -deno_crypto = { path = "../op_crates/crypto", version = "0.7.0" } -deno_fetch = { path = "../op_crates/fetch", version = "0.16.0" } -deno_web = { path = "../op_crates/web", version = "0.24.0" } +deno_core = { path = "../core", version = "0.74.0" } +deno_crypto = { path = "../op_crates/crypto", version = "0.8.0" } +deno_fetch = { path = "../op_crates/fetch", version = "0.17.0" } +deno_web = { path = "../op_crates/web", version = "0.25.0" } atty = "0.2.14" dlopen = "0.1.8" From d199e45ad5dd180a6969abcd528023e173a1e1ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bartek=20Iwa=C5=84czuk?= Date: Tue, 22 Dec 2020 17:00:35 +0100 Subject: [PATCH 101/135] v1.6.2 --- Cargo.lock | 2 +- Releases.md | 21 +++++++++++++++++++++ cli/Cargo.toml | 2 +- std/version.ts | 2 +- 4 files changed, 24 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed4c95b2e47fc6..1ffadabe9608d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -452,7 +452,7 @@ dependencies = [ [[package]] name = "deno" -version = "1.6.1" +version = "1.6.2" dependencies = [ "atty", "base64 0.12.3", diff --git a/Releases.md b/Releases.md index 7127d03718cd39..166c5162de216b 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,27 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 1.6.2 / 2020.12.22 + +- feat(lsp): support the unstable setting (#8851) +- feat(unstable): record raw coverage into a directory (#8642) +- feat(unstable): support in memory certificate data for Deno.createHttpClient + (#8739) +- fix: atomically write files to $DENO_DIR (#8822) +- fix: implement ReadableStream fetch body handling (#8855) +- fix: make DNS resolution async (#8743) +- fix: make dynamic import errors catchable (#8750) +- fix: respect enable flag for requests in lsp (#8850) +- refactor: rename runtime/rt to runtime/js (#8806) +- refactor: rewrite lsp to be async (#8727) +- refactor: rewrite ops to use ResourceTable2 (#8512) +- refactor: optimise static assets in lsp (#8771) +- upgrade TypeScript to 4.1.3 (#8785) + +Changes in std version 0.82.0: + +- feat(std/node): Added os.type (#8591) + ### 1.6.1 / 2020.12.14 - feat(lsp): support import maps (#8683) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a8decf67861e69..0e56c61265d147 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "1.6.1" +version = "1.6.2" license = "MIT" authors = ["the Deno authors"] edition = "2018" diff --git a/std/version.ts b/std/version.ts index 05603ffc5b0cb6..c2e1243e5d7287 100644 --- a/std/version.ts +++ b/std/version.ts @@ -5,4 +5,4 @@ * the cli's API is stable. In the future when std becomes stable, likely we * will match versions with cli as we have in the past. */ -export const VERSION = "0.81.0"; +export const VERSION = "0.82.0"; From d77452c79ffe46074f151f6b9554e6c0e0bd2954 Mon Sep 17 00:00:00 2001 From: Ben Noordhuis Date: Tue, 22 Dec 2020 18:01:07 +0100 Subject: [PATCH 102/135] core: fix http_bench_json_ops, register Error (#8860) Fixes the following runtime error for me when benchmarking: thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: Error: Unregistered error class: "Error" Connection reset by peer (os error 104) Classes of errors returned from ops should be registered via Deno.core.registerErrorClass(). at processResponse (deno:core/core.js:219:13) at Object.jsonOpAsync (deno:core/core.js:240:12) at async read (http_bench_json_ops.js:29:21) at async serve (http_bench_json_ops.js:45:19)', core/examples/http_bench_json_ops.rs:260:28 --- core/examples/http_bench_json_ops.js | 1 + 1 file changed, 1 insertion(+) diff --git a/core/examples/http_bench_json_ops.js b/core/examples/http_bench_json_ops.js index 3a5dd05c5a5899..aba7da432b9caf 100644 --- a/core/examples/http_bench_json_ops.js +++ b/core/examples/http_bench_json_ops.js @@ -57,6 +57,7 @@ async function serve(rid) { async function main() { Deno.core.ops(); + Deno.core.registerErrorClass("Error", Error); const listenerRid = listen(); Deno.core.print(`http_bench_json_ops listening on http://127.0.0.1:4544/\n`); From 06fa5eb7f332e1a32bb6f13f438bb326413b733c Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Wed, 23 Dec 2020 02:01:29 +0800 Subject: [PATCH 103/135] fix(build): add generated coverage files to gitignore (#8862) --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index e82a62a84bb33c..00516bb41ff0e4 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,9 @@ gclient_config.py_entries # compiled wasm files std/wasi/testdata/snapshot_preview1/ +# generated v8 coverage files +cli/tests/.test_coverage/ + # MacOS generated files .DS_Store .DS_Store? From a4d557126e49108db4c0dc42561ae032d2418b04 Mon Sep 17 00:00:00 2001 From: Kitson Kelly Date: Thu, 24 Dec 2020 21:53:03 +1100 Subject: [PATCH 104/135] fix(lsp): provide diagnostics for unresolved modules (#8872) --- cli/lsp/analysis.rs | 62 +++++++++++++++++++++++------- cli/lsp/diagnostics.rs | 76 +++++++++++++++++++++++++++++++++++++ cli/lsp/language_server.rs | 33 +++++++++++++++- cli/lsp/sources.rs | 6 +-- cli/lsp/tsc.rs | 7 ++-- cli/tsc/99_main_compiler.js | 3 ++ 6 files changed, 166 insertions(+), 21 deletions(-) diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 7cf6aca371fae6..26f38ef3807ad3 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -86,7 +86,6 @@ pub fn references_to_diagnostics( severity: Some(lsp_types::DiagnosticSeverity::Warning), code: Some(lsp_types::NumberOrString::String(code)), code_description: None, - // TODO(@kitsonk) this won't make sense for every diagnostic source: Some("deno-lint".to_string()), message, related_information: None, @@ -100,12 +99,13 @@ pub fn references_to_diagnostics( #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct Dependency { pub is_dynamic: bool, - pub maybe_code: Option, - pub maybe_type: Option, + pub maybe_code: Option, + pub maybe_code_specifier_range: Option, + pub maybe_type: Option, } #[derive(Debug, Clone, PartialEq, Eq)] -pub enum ResolvedImport { +pub enum ResolvedDependency { Resolved(ModuleSpecifier), Err(String), } @@ -114,7 +114,7 @@ pub fn resolve_import( specifier: &str, referrer: &ModuleSpecifier, maybe_import_map: &Option, -) -> ResolvedImport { +) -> ResolvedDependency { let maybe_mapped = if let Some(import_map) = maybe_import_map { if let Ok(maybe_specifier) = import_map.resolve(specifier, referrer.as_str()) @@ -132,13 +132,13 @@ pub fn resolve_import( } else { match ModuleSpecifier::resolve_import(specifier, referrer.as_str()) { Ok(resolved) => resolved, - Err(err) => return ResolvedImport::Err(err.to_string()), + Err(err) => return ResolvedDependency::Err(err.to_string()), } }; let referrer_scheme = referrer.as_url().scheme(); let specifier_scheme = specifier.as_url().scheme(); if referrer_scheme == "https" && specifier_scheme == "http" { - return ResolvedImport::Err( + return ResolvedDependency::Err( "Modules imported via https are not allowed to import http modules." .to_string(), ); @@ -147,10 +147,10 @@ pub fn resolve_import( && !(specifier_scheme == "https" || specifier_scheme == "http") && !remapped { - return ResolvedImport::Err("Remote modules are not allowed to import local modules. Consider using a dynamic import instead.".to_string()); + return ResolvedDependency::Err("Remote modules are not allowed to import local modules. Consider using a dynamic import instead.".to_string()); } - ResolvedImport::Resolved(specifier) + ResolvedDependency::Resolved(specifier) } // TODO(@kitsonk) a lot of this logic is duplicated in module_graph.rs in @@ -160,7 +160,7 @@ pub fn analyze_dependencies( source: &str, media_type: &MediaType, maybe_import_map: &Option, -) -> Option<(HashMap, Option)> { +) -> Option<(HashMap, Option)> { let specifier_str = specifier.to_string(); let source_map = Rc::new(swc_common::SourceMap::default()); let mut maybe_type = None; @@ -222,7 +222,21 @@ pub fn analyze_dependencies( | swc_ecmascript::dep_graph::DependencyKind::ImportType => { dep.maybe_type = Some(resolved_import) } - _ => dep.maybe_code = Some(resolved_import), + _ => { + dep.maybe_code_specifier_range = Some(Range { + start: Position { + line: (desc.specifier_line - 1) as u32, + character: desc.specifier_col as u32, + }, + end: Position { + line: (desc.specifier_line - 1) as u32, + character: (desc.specifier_col + + desc.specifier.chars().count() + + 2) as u32, + }, + }); + dep.maybe_code = Some(resolved_import); + } } if maybe_resolved_type_import.is_some() && dep.maybe_type.is_none() { dep.maybe_type = maybe_resolved_type_import; @@ -293,27 +307,47 @@ mod tests { actual.get("https://cdn.skypack.dev/react").cloned(), Some(Dependency { is_dynamic: false, - maybe_code: Some(ResolvedImport::Resolved( + maybe_code: Some(ResolvedDependency::Resolved( ModuleSpecifier::resolve_url("https://cdn.skypack.dev/react") .unwrap() )), - maybe_type: Some(ResolvedImport::Resolved( + maybe_type: Some(ResolvedDependency::Resolved( ModuleSpecifier::resolve_url( "https://deno.land/x/types/react/index.d.ts" ) .unwrap() )), + maybe_code_specifier_range: Some(Range { + start: Position { + line: 8, + character: 27, + }, + end: Position { + line: 8, + character: 58, + } + }), }) ); assert_eq!( actual.get("https://deno.land/x/oak@v6.3.2/mod.ts").cloned(), Some(Dependency { is_dynamic: false, - maybe_code: Some(ResolvedImport::Resolved( + maybe_code: Some(ResolvedDependency::Resolved( ModuleSpecifier::resolve_url("https://deno.land/x/oak@v6.3.2/mod.ts") .unwrap() )), maybe_type: None, + maybe_code_specifier_range: Some(Range { + start: Position { + line: 5, + character: 11, + }, + end: Position { + line: 5, + character: 50, + } + }), }) ); } diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 1d0a1fac99ceea..c468fb0fac6f7d 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -2,6 +2,7 @@ use super::analysis::get_lint_references; use super::analysis::references_to_diagnostics; +use super::analysis::ResolvedDependency; use super::language_server::StateSnapshot; use super::memory_cache::FileId; use super::tsc; @@ -9,6 +10,7 @@ use super::tsc; use crate::diagnostics; use crate::media_type::MediaType; +use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::Value; @@ -19,6 +21,7 @@ use std::mem; #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum DiagnosticSource { + Deno, Lint, TypeScript, } @@ -261,3 +264,76 @@ pub async fn generate_ts_diagnostics( Ok(diagnostics) } + +pub async fn generate_dependency_diagnostics( + state_snapshot: StateSnapshot, + diagnostic_collection: DiagnosticCollection, +) -> Result { + tokio::task::spawn_blocking(move || { + let mut diagnostics = Vec::new(); + + let file_cache = state_snapshot.file_cache.read().unwrap(); + let mut sources = if let Ok(sources) = state_snapshot.sources.write() { + sources + } else { + return Err(custom_error("Deadlock", "deadlock locking sources")); + }; + for (specifier, doc_data) in state_snapshot.doc_data.iter() { + let file_id = file_cache.lookup(specifier).unwrap(); + let version = doc_data.version; + let current_version = diagnostic_collection.get_version(&file_id); + if version != current_version { + let mut diagnostic_list = Vec::new(); + if let Some(dependencies) = &doc_data.dependencies { + for (_, dependency) in dependencies.iter() { + if let (Some(code), Some(range)) = ( + &dependency.maybe_code, + &dependency.maybe_code_specifier_range, + ) { + match code.clone() { + ResolvedDependency::Err(message) => { + diagnostic_list.push(lsp_types::Diagnostic { + range: *range, + severity: Some(lsp_types::DiagnosticSeverity::Error), + code: None, + code_description: None, + source: Some("deno".to_string()), + message, + related_information: None, + tags: None, + data: None, + }) + } + ResolvedDependency::Resolved(specifier) => { + if !(state_snapshot.doc_data.contains_key(&specifier) || sources.contains(&specifier)) { + let is_local = specifier.as_url().scheme() == "file"; + diagnostic_list.push(lsp_types::Diagnostic { + range: *range, + severity: Some(lsp_types::DiagnosticSeverity::Error), + code: None, + code_description: None, + source: Some("deno".to_string()), + message: if is_local { + format!("Unable to load a local module: \"{}\".\n Please check the file path.", specifier) + } else { + format!("Unable to load the module: \"{}\".\n If the module exists, running `deno cache {}` should resolve this error.", specifier, specifier) + }, + related_information: None, + tags: None, + data: None, + }) + } + }, + } + } + } + } + diagnostics.push((file_id, version, diagnostic_list)) + } + } + + Ok(diagnostics) + }) + .await + .unwrap() +} diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 7834cab7fd1674..72d1b1ad386768 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -178,9 +178,35 @@ impl LanguageServer { Ok::<(), AnyError>(()) }; - let (lint_res, ts_res) = tokio::join!(lint, ts); + let deps = async { + if enabled { + let diagnostics_collection = self.diagnostics.read().unwrap().clone(); + let diagnostics = diagnostics::generate_dependency_diagnostics( + self.snapshot(), + diagnostics_collection, + ) + .await?; + { + let mut diagnostics_collection = self.diagnostics.write().unwrap(); + for (file_id, version, diagnostics) in diagnostics { + diagnostics_collection.set( + file_id, + DiagnosticSource::Deno, + version, + diagnostics, + ); + } + } + self.publish_diagnostics().await? + }; + + Ok::<(), AnyError>(()) + }; + + let (lint_res, ts_res, deps_res) = tokio::join!(lint, ts, deps); lint_res?; ts_res?; + deps_res?; Ok(()) } @@ -213,6 +239,11 @@ impl LanguageServer { .diagnostics_for(file_id, DiagnosticSource::TypeScript) .cloned(), ); + diagnostics.extend( + diagnostics_collection + .diagnostics_for(file_id, DiagnosticSource::Deno) + .cloned(), + ); } let specifier = { let file_cache = self.file_cache.read().unwrap(); diff --git a/cli/lsp/sources.rs b/cli/lsp/sources.rs index 63b4ebd9946027..c6ab87f218af6e 100644 --- a/cli/lsp/sources.rs +++ b/cli/lsp/sources.rs @@ -23,7 +23,7 @@ use std::time::SystemTime; #[derive(Debug, Clone, Default)] struct Metadata { dependencies: Option>, - maybe_types: Option, + maybe_types: Option, media_type: MediaType, source: String, version: String, @@ -255,7 +255,7 @@ impl Sources { let dependencies = &metadata.dependencies?; let dependency = dependencies.get(specifier)?; if let Some(type_dependency) = &dependency.maybe_type { - if let analysis::ResolvedImport::Resolved(resolved_specifier) = + if let analysis::ResolvedDependency::Resolved(resolved_specifier) = type_dependency { self.resolution_result(resolved_specifier) @@ -264,7 +264,7 @@ impl Sources { } } else { let code_dependency = &dependency.maybe_code.clone()?; - if let analysis::ResolvedImport::Resolved(resolved_specifier) = + if let analysis::ResolvedDependency::Resolved(resolved_specifier) = code_dependency { self.resolution_result(resolved_specifier) diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 4cd13f70d2ac0d..2a0f7d76cf20fc 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -1,6 +1,6 @@ // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. -use super::analysis::ResolvedImport; +use super::analysis::ResolvedDependency; use super::language_server::StateSnapshot; use super::text; use super::utils; @@ -839,9 +839,10 @@ fn resolve(state: &mut State, args: Value) -> Result { } else if let Some(resolved_import) = &dependency.maybe_code { resolved_import.clone() } else { - ResolvedImport::Err("missing dependency".to_string()) + ResolvedDependency::Err("missing dependency".to_string()) }; - if let ResolvedImport::Resolved(resolved_specifier) = resolved_import + if let ResolvedDependency::Resolved(resolved_specifier) = + resolved_import { if state .state_snapshot diff --git a/cli/tsc/99_main_compiler.js b/cli/tsc/99_main_compiler.js index 9b08dee93c1fad..de9e74d2ef440f 100644 --- a/cli/tsc/99_main_compiler.js +++ b/cli/tsc/99_main_compiler.js @@ -128,6 +128,9 @@ delete Object.prototype.__proto__; // TS2691: An import path cannot end with a '.ts' extension. Consider // importing 'bad-module' instead. 2691, + // TS2792: Cannot find module. Did you mean to set the 'moduleResolution' + // option to 'node', or to add aliases to the 'paths' option? + 2792, // TS5009: Cannot find the common subdirectory path for the input files. 5009, // TS5055: Cannot write file From b15539587e7cf6b67c2ae7d4dc39901634bf6bd5 Mon Sep 17 00:00:00 2001 From: Yosi Pramajaya Date: Thu, 24 Dec 2020 20:11:32 +0700 Subject: [PATCH 105/135] refactor(test_util): replace "warp" with "hyper" (#8846) This commit rewrites "test_server" to use "hyper" instead of "warp" in an effort to reduce number of dependencies. --- Cargo.lock | 374 ++-------------- cli/tests/websocket_test.ts | 4 +- test_util/Cargo.toml | 4 +- test_util/src/lib.rs | 847 ++++++++++++++++++++++++------------ 4 files changed, 599 insertions(+), 630 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1ffadabe9608d3..0f8f997735ad54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -144,12 +144,6 @@ dependencies = [ "syn 1.0.48", ] -[[package]] -name = "autocfg" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" - [[package]] name = "autocfg" version = "1.0.1" @@ -174,34 +168,13 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" -[[package]] -name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding", - "byte-tools", - "byteorder", - "generic-array 0.12.3", -] - [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array 0.14.4", -] - -[[package]] -name = "block-padding" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -dependencies = [ - "byte-tools", + "generic-array", ] [[package]] @@ -225,16 +198,6 @@ dependencies = [ "alloc-stdlib", ] -[[package]] -name = "buf_redux" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" -dependencies = [ - "memchr", - "safemem", -] - [[package]] name = "build_const" version = "0.2.1" @@ -247,12 +210,6 @@ version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e8c087f005730276d1096a652e92a8bacee2e2472bcc9715a74d2bec38b5820" -[[package]] -name = "byte-tools" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" - [[package]] name = "byteorder" version = "1.3.4" @@ -321,15 +278,6 @@ dependencies = [ "vec_map", ] -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -dependencies = [ - "bitflags", -] - [[package]] name = "cloudabi" version = "0.1.0" @@ -399,7 +347,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ - "autocfg 1.0.1", + "autocfg", "cfg-if 0.1.10", "lazy_static", ] @@ -531,7 +479,7 @@ name = "deno_crypto" version = "0.8.0" dependencies = [ "deno_core", - "rand 0.7.3", + "rand", ] [[package]] @@ -640,22 +588,13 @@ dependencies = [ "syn 1.0.48", ] -[[package]] -name = "digest" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" -dependencies = [ - "generic-array 0.12.3", -] - [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.4", + "generic-array", ] [[package]] @@ -796,12 +735,6 @@ dependencies = [ "libc", ] -[[package]] -name = "fake-simd" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" - [[package]] name = "filetime" version = "0.2.12" @@ -879,12 +812,6 @@ dependencies = [ "libc", ] -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - [[package]] name = "fuchsia-zircon" version = "0.3.3" @@ -1021,15 +948,6 @@ version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" -[[package]] -name = "generic-array" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" -dependencies = [ - "typenum", -] - [[package]] name = "generic-array" version = "0.14.4" @@ -1076,31 +994,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00d63df3d41950fb462ed38308eea019113ad1508da725bbedcd0fa5a85ef5f7" -[[package]] -name = "headers" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed18eb2459bf1a09ad2d6b1547840c3e5e62882fa09b9a6a20b1de8e3228848f" -dependencies = [ - "base64 0.12.3", - "bitflags", - "bytes 0.5.6", - "headers-core", - "http", - "mime", - "sha-1 0.8.2", - "time", -] - -[[package]] -name = "headers-core" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" -dependencies = [ - "http", -] - [[package]] name = "heck" version = "0.3.1" @@ -1230,7 +1123,7 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2" dependencies = [ - "autocfg 1.0.1", + "autocfg", "hashbrown", ] @@ -1481,7 +1374,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c60c0dfe32c10b43a144bad8fc83538c52f58302c92300ea7ec7bf7b38d5a7b9" dependencies = [ "adler", - "autocfg 1.0.1", + "autocfg", ] [[package]] @@ -1573,24 +1466,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "multipart" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8209c33c951f07387a8497841122fc6f712165e3f9bda3e6be4645b58188f676" -dependencies = [ - "buf_redux", - "httparse", - "log", - "mime", - "mime_guess", - "quick-error", - "rand 0.6.5", - "safemem", - "tempfile", - "twoway", -] - [[package]] name = "net2" version = "0.2.35" @@ -1666,7 +1541,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" dependencies = [ - "autocfg 1.0.1", + "autocfg", "num-integer", "num-traits", "serde", @@ -1678,7 +1553,7 @@ version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d59457e662d541ba17869cf51cf177c0b5f0cbf476c66bdc90bf1edac4f875b" dependencies = [ - "autocfg 1.0.1", + "autocfg", "num-traits", ] @@ -1688,7 +1563,7 @@ version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611" dependencies = [ - "autocfg 1.0.1", + "autocfg", ] [[package]] @@ -1707,12 +1582,6 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13bd41f508810a131401606d54ac32a467c97172d74ba7662562ebba5ad07fa0" -[[package]] -name = "opaque-debug" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" - [[package]] name = "opaque-debug" version = "0.3.0" @@ -1765,7 +1634,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" dependencies = [ "cfg-if 0.1.10", - "cloudabi 0.1.0", + "cloudabi", "instant", "libc", "redox_syscall", @@ -1807,7 +1676,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" dependencies = [ "phf_shared", - "rand 0.7.3", + "rand", ] [[package]] @@ -2008,25 +1877,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce082a9940a7ace2ad4a8b7d0b1eac6aa378895f18be598230c5f2284ac05426" -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -dependencies = [ - "autocfg 0.1.7", - "libc", - "rand_chacha 0.1.1", - "rand_core 0.4.2", - "rand_hc 0.1.0", - "rand_isaac", - "rand_jitter", - "rand_os", - "rand_pcg 0.1.2", - "rand_xorshift", - "winapi 0.3.9", -] - [[package]] name = "rand" version = "0.7.3" @@ -2035,20 +1885,10 @@ checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ "getrandom", "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc 0.2.0", - "rand_pcg 0.2.1", -] - -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -dependencies = [ - "autocfg 0.1.7", - "rand_core 0.3.1", + "rand_chacha", + "rand_core", + "rand_hc", + "rand_pcg", ] [[package]] @@ -2058,24 +1898,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", + "rand_core", ] -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.5.1" @@ -2085,66 +1910,13 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "rand_hc" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" -dependencies = [ - "libc", - "rand_core 0.4.2", - "winapi 0.3.9", -] - -[[package]] -name = "rand_os" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -dependencies = [ - "cloudabi 0.0.3", - "fuchsia-cprng", - "libc", - "rand_core 0.4.2", - "rdrand", - "winapi 0.3.9", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -dependencies = [ - "autocfg 0.1.7", - "rand_core 0.4.2", + "rand_core", ] [[package]] @@ -2153,25 +1925,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", + "rand_core", ] [[package]] @@ -2342,12 +2096,6 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" -[[package]] -name = "safemem" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" - [[package]] name = "same-file" version = "1.0.6" @@ -2455,29 +2203,17 @@ dependencies = [ "url", ] -[[package]] -name = "sha-1" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - [[package]] name = "sha-1" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "170a36ea86c864a3f16dd2687712dd6646f7019f301e57537c7f4dc9f5916770" dependencies = [ - "block-buffer 0.9.0", + "block-buffer", "cfg-if 0.1.10", "cpuid-bool", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest", + "opaque-debug", ] [[package]] @@ -2924,7 +2660,7 @@ checksum = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" dependencies = [ "cfg-if 0.1.10", "libc", - "rand 0.7.3", + "rand", "redox_syscall", "remove_dir_all", "winapi 0.3.9", @@ -2954,13 +2690,15 @@ version = "0.1.0" dependencies = [ "bytes 0.5.6", "futures", + "hyper", "lazy_static", "os_pipe", "pty", "regex", "tempfile", "tokio 0.2.22", - "warp", + "tokio-rustls", + "tokio-tungstenite", ] [[package]] @@ -3048,7 +2786,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a12a3eb39ee2c231be64487f1fcbe726c8f2514876a55480a5ab8559fc374252" dependencies = [ - "autocfg 1.0.1", + "autocfg", "bytes 0.6.0", "futures-core", "lazy_static", @@ -3191,16 +2929,6 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "tracing-futures" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c" -dependencies = [ - "pin-project 0.4.23", - "tracing", -] - [[package]] name = "try-lock" version = "0.2.3" @@ -3220,21 +2948,12 @@ dependencies = [ "httparse", "input_buffer", "log", - "rand 0.7.3", - "sha-1 0.9.1", + "rand", + "sha-1", "url", "utf-8", ] -[[package]] -name = "twoway" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" -dependencies = [ - "memchr", -] - [[package]] name = "typenum" version = "1.12.0" @@ -3310,12 +3029,6 @@ dependencies = [ "serde", ] -[[package]] -name = "urlencoding" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9232eb53352b4442e40d7900465dfc534e8cb2dc8f18656fcb2ac16112b5593" - [[package]] name = "utf-8" version = "0.7.5" @@ -3334,7 +3047,7 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fde2f6a4bea1d6e007c4ad38c6839fa71cbb63b6dbf5b595aa38dc9b1093c11" dependencies = [ - "rand 0.7.3", + "rand", ] [[package]] @@ -3370,35 +3083,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "warp" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f41be6df54c97904af01aa23e613d4521eed7ab23537cede692d4058f6449407" -dependencies = [ - "bytes 0.5.6", - "futures", - "headers", - "http", - "hyper", - "log", - "mime", - "mime_guess", - "multipart", - "pin-project 0.4.23", - "scoped-tls", - "serde", - "serde_json", - "serde_urlencoded", - "tokio 0.2.22", - "tokio-rustls", - "tokio-tungstenite", - "tower-service", - "tracing", - "tracing-futures", - "urlencoding", -] - [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" diff --git a/cli/tests/websocket_test.ts b/cli/tests/websocket_test.ts index 51876dc59222ea..a220b37ea5b224 100644 --- a/cli/tests/websocket_test.ts +++ b/cli/tests/websocket_test.ts @@ -159,7 +159,9 @@ Deno.test("websocket error", async () => { ws.onopen = () => fail(); ws.onerror = (err): void => { assert(err instanceof ErrorEvent); - assertEquals(err.message, "InvalidData: received corrupt message"); + + // Error message got changed because we don't use warp in test_util + assertEquals(err.message, "UnexpectedEof: tls handshake eof"); promise1.resolve(); }; await promise1; diff --git a/test_util/Cargo.toml b/test_util/Cargo.toml index fefe7f3d8ebaf1..6eee12ebce5af7 100644 --- a/test_util/Cargo.toml +++ b/test_util/Cargo.toml @@ -19,7 +19,9 @@ lazy_static = "1.4.0" os_pipe = "0.9.2" regex = "1.3.9" tempfile = "3.1.0" -warp = { version = "0.2.5", features = ["tls"] } +hyper = "0.13" +tokio-tungstenite = "0.11" +tokio-rustls = "0.14" [target.'cfg(unix)'.dependencies] pty = "0.2.2" diff --git a/test_util/src/lib.rs b/test_util/src/lib.rs index 8a47eb139cbbae..dbb184fed056d7 100644 --- a/test_util/src/lib.rs +++ b/test_util/src/lib.rs @@ -5,32 +5,47 @@ #[macro_use] extern crate lazy_static; -use futures::future::{self, FutureExt}; +use futures::FutureExt; +use futures::Stream; +use futures::StreamExt; +use futures::TryStreamExt; +use hyper::header::HeaderValue; +use hyper::service::make_service_fn; +use hyper::service::service_fn; +use hyper::Body; +use hyper::Request; +use hyper::Response; +use hyper::Server; +use hyper::StatusCode; use os_pipe::pipe; #[cfg(unix)] pub use pty; use regex::Regex; use std::collections::HashMap; use std::env; +use std::io; use std::io::Read; use std::io::Write; use std::mem::replace; +use std::net::SocketAddr; use std::path::PathBuf; +use std::pin::Pin; use std::process::Child; use std::process::Command; use std::process::Output; use std::process::Stdio; +use std::result::Result; +use std::sync::Arc; use std::sync::Mutex; use std::sync::MutexGuard; +use std::task::Context; +use std::task::Poll; use tempfile::TempDir; -use warp::http::HeaderValue; -use warp::http::Response; -use warp::http::StatusCode; -use warp::http::Uri; -use warp::hyper::Body; -use warp::reply::with_header; -use warp::reply::Reply; -use warp::Filter; +use tokio::net::TcpListener; +use tokio::net::TcpStream; +use tokio_rustls::rustls; +use tokio_rustls::TlsAcceptor; +use tokio_tungstenite::accept_async; const PORT: u16 = 4545; const REDIRECT_PORT: u16 = 4546; @@ -117,132 +132,225 @@ pub fn test_server_path() -> PathBuf { /// Benchmark server that just serves "hello world" responses. async fn hyper_hello(port: u16) { println!("hyper hello"); - let route = warp::any().map(|| "Hello World!"); - warp::serve(route).bind(([127, 0, 0, 1], port)).await; + let addr = SocketAddr::from(([127, 0, 0, 1], port)); + let hello_svc = make_service_fn(|_| async move { + Ok::<_, hyper::error::Error>(service_fn( + move |_: Request| async move { + Ok::<_, hyper::error::Error>(Response::new(Body::from("Hello World!"))) + }, + )) + }); + + let server = Server::bind(&addr).serve(hello_svc); + if let Err(e) = server.await { + eprintln!("server error: {}", e); + } } -#[tokio::main] -pub async fn run_all_servers() { - if let Some(port) = env::args().nth(1) { - return hyper_hello(port.parse::().unwrap()).await; +fn redirect_resp(url: String) -> Response { + let mut redirect_resp = Response::new(Body::empty()); + *redirect_resp.status_mut() = StatusCode::MOVED_PERMANENTLY; + redirect_resp.headers_mut().insert( + hyper::header::LOCATION, + HeaderValue::from_str(&url[..]).unwrap(), + ); + + redirect_resp +} + +async fn redirect(req: Request) -> hyper::Result> { + let p = req.uri().path(); + assert_eq!(&p[0..1], "/"); + let url = format!("http://localhost:{}{}", PORT, p); + + Ok(redirect_resp(url)) +} + +async fn double_redirects(req: Request) -> hyper::Result> { + let p = req.uri().path(); + assert_eq!(&p[0..1], "/"); + let url = format!("http://localhost:{}{}", REDIRECT_PORT, p); + + Ok(redirect_resp(url)) +} + +async fn inf_redirects(req: Request) -> hyper::Result> { + let p = req.uri().path(); + assert_eq!(&p[0..1], "/"); + let url = format!("http://localhost:{}{}", INF_REDIRECTS_PORT, p); + + Ok(redirect_resp(url)) +} + +async fn another_redirect(req: Request) -> hyper::Result> { + let p = req.uri().path(); + assert_eq!(&p[0..1], "/"); + let url = format!("http://localhost:{}/cli/tests/subdir{}", PORT, p); + + Ok(redirect_resp(url)) +} + +async fn run_ws_server(addr: &SocketAddr) { + let mut listener = TcpListener::bind(addr).await.unwrap(); + while let Ok((stream, _addr)) = listener.accept().await { + tokio::spawn(async move { + let ws_stream_fut = accept_async(stream); + + let ws_stream = ws_stream_fut.await; + if let Ok(ws_stream) = ws_stream { + let (tx, rx) = ws_stream.split(); + rx.forward(tx) + .map(|result| { + if let Err(e) = result { + println!("websocket server error: {:?}", e); + } + }) + .await; + } + }); } +} - let routes = warp::path::full().map(|path: warp::path::FullPath| { - let p = path.as_str(); - assert_eq!(&p[0..1], "/"); - let url = format!("http://localhost:{}{}", PORT, p); - let u = url.parse::().unwrap(); - warp::redirect(u) - }); - let redirect_server_fut = - warp::serve(routes).bind(([127, 0, 0, 1], REDIRECT_PORT)); - - let websocket_route = warp::ws().map(|ws: warp::ws::Ws| { - ws.on_upgrade(|websocket| { - use futures::stream::StreamExt; - let (tx, rx) = websocket.split(); - rx.forward(tx).map(|result| { - if let Err(e) = result { - println!("websocket server error: {:?}", e); - } - }) - }) - }); - let ws_server_fut = - warp::serve(websocket_route).bind(([127, 0, 0, 1], WS_PORT)); - let wss_server_fut = warp::serve(websocket_route) - .tls() - .cert_path("std/http/testdata/tls/localhost.crt") - .key_path("std/http/testdata/tls/localhost.key") - .bind(([127, 0, 0, 1], WSS_PORT)); - - let routes = warp::path::full().map(|path: warp::path::FullPath| { - let p = path.as_str(); - assert_eq!(&p[0..1], "/"); - let url = format!("http://localhost:{}/cli/tests/subdir{}", PORT, p); - let u = url.parse::().unwrap(); - warp::redirect(u) - }); - let another_redirect_server_fut = - warp::serve(routes).bind(([127, 0, 0, 1], ANOTHER_REDIRECT_PORT)); - - let routes = warp::path::full().map(|path: warp::path::FullPath| { - let p = path.as_str(); - assert_eq!(&p[0..1], "/"); - let url = format!("http://localhost:{}{}", REDIRECT_PORT, p); - let u = url.parse::().unwrap(); - warp::redirect(u) - }); - let double_redirect_server_fut = - warp::serve(routes).bind(([127, 0, 0, 1], DOUBLE_REDIRECTS_PORT)); - - let routes = warp::path::full().map(|path: warp::path::FullPath| { - let p = path.as_str(); - assert_eq!(&p[0..1], "/"); - let url = format!("http://localhost:{}{}", INF_REDIRECTS_PORT, p); - let u = url.parse::().unwrap(); - warp::redirect(u) - }); - let inf_redirect_server_fut = - warp::serve(routes).bind(([127, 0, 0, 1], INF_REDIRECTS_PORT)); - - // redirect server that redirect to absolute paths under same host - // redirects /REDIRECT/file_name to /file_name - let routes = warp::path("REDIRECT") - .and(warp::path::peek()) - .map(|path: warp::path::Peek| { - let p = path.as_str(); - let url = format!("/{}", p); - let u = url.parse::().unwrap(); - warp::redirect(u) - }) - .or( - warp::path!("a" / "b" / "c") - .and(warp::header::("x-location")) - .map(|token: String| { - let uri: Uri = token.parse().unwrap(); - warp::redirect(uri) - }), - ) - .or( - warp::any() - .and(warp::path::peek()) - .and(warp::fs::dir(root_path())) - .map(custom_headers), - ); - let absolute_redirect_server_fut = - warp::serve(routes).bind(([127, 0, 0, 1], REDIRECT_ABSOLUTE_PORT)); - - let echo_server = warp::path("echo_server") - .and(warp::post()) - .and(warp::body::bytes()) - .and(warp::header::optional::("x-status")) - .and(warp::header::optional::("content-type")) - .and(warp::header::optional::("user-agent")) - .map( - |bytes: bytes::Bytes, - status: Option, - content_type: Option, - user_agent: Option| - -> Box { - let mut res = Response::new(Body::from(bytes)); - if let Some(v) = status { - *res.status_mut() = StatusCode::from_bytes(v.as_bytes()).unwrap(); - } - let h = res.headers_mut(); - if let Some(v) = content_type { - h.insert("content-type", HeaderValue::from_str(&v).unwrap()); +async fn get_tls_config( + cert: &str, + key: &str, +) -> io::Result> { + let mut cert_path = root_path(); + let mut key_path = root_path(); + cert_path.push(cert); + key_path.push(key); + + let cert_file = std::fs::File::open(cert_path)?; + let key_file = std::fs::File::open(key_path)?; + + let mut cert_reader = io::BufReader::new(cert_file); + let cert = rustls::internal::pemfile::certs(&mut cert_reader) + .expect("Cannot load certificate"); + let mut key_reader = io::BufReader::new(key_file); + let key = { + let pkcs8_key = + rustls::internal::pemfile::pkcs8_private_keys(&mut key_reader) + .expect("Cannot load key file"); + let rsa_key = rustls::internal::pemfile::rsa_private_keys(&mut key_reader) + .expect("Cannot load key file"); + if !pkcs8_key.is_empty() { + Some(pkcs8_key[0].clone()) + } else if !rsa_key.is_empty() { + Some(rsa_key[0].clone()) + } else { + None + } + }; + + match key { + Some(key) => { + let mut config = rustls::ServerConfig::new(rustls::NoClientAuth::new()); + config + .set_single_cert(cert, key) + .map_err(|e| { + eprintln!("Error setting cert: {:?}", e); + }) + .unwrap(); + + return Ok(Arc::new(config)); + } + None => { + return Err(io::Error::new(io::ErrorKind::Other, "Cannot find key")); + } + } +} + +async fn run_wss_server(addr: &SocketAddr) { + let cert_file = "std/http/testdata/tls/localhost.crt"; + let key_file = "std/http/testdata/tls/localhost.key"; + + let tls_config = get_tls_config(cert_file, key_file).await.unwrap(); + let tls_acceptor = TlsAcceptor::from(tls_config); + let mut listener = TcpListener::bind(addr).await.unwrap(); + + while let Ok((stream, _addr)) = listener.accept().await { + let acceptor = tls_acceptor.clone(); + tokio::spawn(async move { + match acceptor.accept(stream).await { + Ok(tls_stream) => { + let ws_stream_fut = accept_async(tls_stream); + let ws_stream = ws_stream_fut.await; + if let Ok(ws_stream) = ws_stream { + let (tx, rx) = ws_stream.split(); + rx.forward(tx) + .map(|result| { + if let Err(e) = result { + println!("Websocket server error: {:?}", e); + } + }) + .await; + } } - if let Some(v) = user_agent { - h.insert("user-agent", HeaderValue::from_str(&v).unwrap()); + Err(e) => { + eprintln!("TLS accept error: {:?}", e); } - Box::new(res) - }, - ); - let echo_multipart_file = warp::path("echo_multipart_file") - .and(warp::post()) - .and(warp::body::bytes()) - .map(|bytes: bytes::Bytes| -> Box { + } + }); + } +} + +async fn absolute_redirect( + req: Request, +) -> hyper::Result> { + let path = req.uri().path(); + + if path.starts_with("/REDIRECT") { + let url = &req.uri().path()[9..]; + println!("URL: {:?}", url); + let redirect = redirect_resp(url.to_string()); + return Ok(redirect); + } + + if path.starts_with("/a/b/c") { + if let Some(x_loc) = req.headers().get("x-location") { + let loc = x_loc.to_str().unwrap(); + return Ok(redirect_resp(loc.to_string())); + } + } + + let mut file_path = root_path(); + file_path.push(&req.uri().path()[1..]); + if file_path.is_dir() || !file_path.exists() { + let mut not_found_resp = Response::new(Body::empty()); + *not_found_resp.status_mut() = StatusCode::NOT_FOUND; + return Ok(not_found_resp); + } + + let file = tokio::fs::read(file_path).await.unwrap(); + let file_resp = custom_headers(req.uri().path(), file); + return Ok(file_resp); +} + +async fn main_server(req: Request) -> hyper::Result> { + return match (req.method(), req.uri().path()) { + (&hyper::Method::POST, "/echo_server") => { + let (parts, body) = req.into_parts(); + let mut response = Response::new(body); + + if let Some(status) = parts.headers.get("x-status") { + *response.status_mut() = + StatusCode::from_bytes(status.as_bytes()).unwrap(); + } + if let Some(content_type) = parts.headers.get("content-type") { + response + .headers_mut() + .insert("content-type", content_type.clone()); + } + if let Some(user_agent) = parts.headers.get("user-agent") { + response + .headers_mut() + .insert("user-agent", user_agent.clone()); + } + Ok(response) + } + (&hyper::Method::POST, "/echo_multipart_file") => { + let body = req.into_body(); + let bytes = &hyper::body::to_bytes(body).await.unwrap()[0..]; let start = b"--boundary\t \r\n\ Content-Disposition: form-data; name=\"field_1\"\r\n\ \r\n\ @@ -253,236 +361,383 @@ pub async fn run_all_servers() { Content-Type: application/octet-stream\r\n\ \r\n"; let end = b"\r\n--boundary--\r\n"; - let b = [start as &[u8], &bytes, end].concat(); + let b = [start as &[u8], bytes, end].concat(); - let mut res = Response::new(Body::from(b)); - let h = res.headers_mut(); - h.insert( + let mut response = Response::new(Body::from(b)); + response.headers_mut().insert( "content-type", HeaderValue::from_static("multipart/form-data;boundary=boundary"), ); - Box::new(res) - }); - let multipart_form_data = - warp::path("multipart_form_data.txt").map(|| -> Box { + Ok(response) + } + (_, "/multipart_form_data.txt") => { let b = "Preamble\r\n\ - --boundary\t \r\n\ - Content-Disposition: form-data; name=\"field_1\"\r\n\ - \r\n\ - value_1 \r\n\ - \r\n--boundary\r\n\ - Content-Disposition: form-data; name=\"field_2\";\ - filename=\"file.js\"\r\n\ - Content-Type: text/javascript\r\n\ - \r\n\ - console.log(\"Hi\")\ - \r\n--boundary--\r\n\ - Epilogue"; + --boundary\t \r\n\ + Content-Disposition: form-data; name=\"field_1\"\r\n\ + \r\n\ + value_1 \r\n\ + \r\n--boundary\r\n\ + Content-Disposition: form-data; name=\"field_2\";\ + filename=\"file.js\"\r\n\ + Content-Type: text/javascript\r\n\ + \r\n\ + console.log(\"Hi\")\ + \r\n--boundary--\r\n\ + Epilogue"; let mut res = Response::new(Body::from(b)); res.headers_mut().insert( "content-type", HeaderValue::from_static("multipart/form-data;boundary=boundary"), ); - Box::new(res) - }); - let bad_redirect = warp::path("bad_redirect").map(|| -> Box { - let mut res = Response::new(Body::empty()); - *res.status_mut() = StatusCode::FOUND; - Box::new(res) - }); - let non_ascii_redirect = - warp::path("non_ascii_redirect").map(|| -> Box { + Ok(res) + } + (_, "/bad_redirect") => { + let mut res = Response::new(Body::empty()); + *res.status_mut() = StatusCode::FOUND; + Ok(res) + } + (_, "/non_ascii_redirect") => { let mut res = Response::new(Body::empty()); *res.status_mut() = StatusCode::MOVED_PERMANENTLY; res.headers_mut().insert( "location", HeaderValue::from_bytes(b"/redirect\xae").unwrap(), ); - Box::new(res) - }); + Ok(res) + } + (_, "/etag_script.ts") => { + let if_none_match = req.headers().get("if-none-match"); + if if_none_match == Some(&HeaderValue::from_static("33a64df551425fcc55e")) + { + let mut resp = Response::new(Body::empty()); + *resp.status_mut() = StatusCode::NOT_MODIFIED; + resp.headers_mut().insert( + "Content-type", + HeaderValue::from_static("application/typescript"), + ); + resp + .headers_mut() + .insert("ETag", HeaderValue::from_static("33a64df551425fcc55e")); - let etag_script = warp::path!("etag_script.ts") - .and(warp::header::optional::("if-none-match")) - .map(|if_none_match| -> Box { - if if_none_match == Some("33a64df551425fcc55e".to_string()) { - let r = - warp::reply::with_status(warp::reply(), StatusCode::NOT_MODIFIED); - let r = with_header(r, "Content-type", "application/typescript"); - let r = with_header(r, "ETag", "33a64df551425fcc55e"); - Box::new(r) + Ok(resp) } else { - let mut res = Response::new(Body::from("console.log('etag')")); - let h = res.headers_mut(); - h.insert( + let mut resp = Response::new(Body::from("console.log('etag')")); + resp.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - h.insert("ETag", HeaderValue::from_static("33a64df551425fcc55e")); - Box::new(res) + resp + .headers_mut() + .insert("ETag", HeaderValue::from_static("33a64df551425fcc55e")); + Ok(resp) } - }); - let xtypescripttypes = warp::path!("xTypeScriptTypes.js") - .map(|| { + } + (_, "/xTypeScriptTypes.js") => { let mut res = Response::new(Body::from("export const foo = 'foo';")); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/javascript"), ); - h.insert( + res.headers_mut().insert( "X-TypeScript-Types", HeaderValue::from_static("./xTypeScriptTypes.d.ts"), ); - res - }) - .or(warp::path!("xTypeScriptTypes.d.ts").map(|| { + Ok(res) + } + (_, "/xTypeScriptTypes.d.ts") => { let mut res = Response::new(Body::from("export const foo: 'foo';")); res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("type_directives_redirect.js").map(|| { + Ok(res) + } + (_, "/type_directives_redirect.js") => { let mut res = Response::new(Body::from("export const foo = 'foo';")); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/javascript"), ); - h.insert( + res.headers_mut().insert( "X-TypeScript-Types", HeaderValue::from_static( "http://localhost:4547/xTypeScriptTypesRedirect.d.ts", ), ); - res - })) - .or(warp::path!("type_headers_deno_types.foo.js").map(|| { - let mut res = Response::new(Body::from("export function foo(text) { console.log(text); }")); - let h = res.headers_mut(); - h.insert( + Ok(res) + } + (_, "/type_headers_deno_types.foo.js") => { + let mut res = Response::new(Body::from( + "export function foo(text) { console.log(text); }", + )); + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/javascript"), ); - h.insert( + res.headers_mut().insert( "X-TypeScript-Types", HeaderValue::from_static( "http://localhost:4545/type_headers_deno_types.d.ts", ), ); - res - })) - .or(warp::path!("type_headers_deno_types.d.ts").map(|| { - let mut res = Response::new(Body::from("export function foo(text: number): void;")); - let h = res.headers_mut(); - h.insert( + Ok(res) + } + (_, "/type_headers_deno_types.d.ts") => { + let mut res = + Response::new(Body::from("export function foo(text: number): void;")); + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("type_headers_deno_types.foo.d.ts").map(|| { - let mut res = Response::new(Body::from("export function foo(text: string): void;")); - let h = res.headers_mut(); - h.insert( + Ok(res) + } + (_, "/type_headers_deno_types.foo.d.ts") => { + let mut res = + Response::new(Body::from("export function foo(text: string): void;")); + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("cli"/"tests"/"subdir"/"xTypeScriptTypesRedirect.d.ts").map(|| { + Ok(res) + } + (_, "/cli/tests/subdir/xTypeScriptTypesRedirect.d.ts") => { let mut res = Response::new(Body::from( "import './xTypeScriptTypesRedirected.d.ts';", )); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("cli"/"tests"/"subdir"/"xTypeScriptTypesRedirected.d.ts").map(|| { + Ok(res) + } + (_, "/cli/tests/subdir/xTypeScriptTypesRedirected.d.ts") => { let mut res = Response::new(Body::from("export const foo: 'foo';")); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("referenceTypes.js").map(|| { + Ok(res) + } + (_, "/referenceTypes.js") => { let mut res = Response::new(Body::from("/// \r\nexport const foo = \"foo\";\r\n")); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/javascript"), ); - res - })) - .or(warp::path!("cli"/"tests"/"subdir"/"file_with_:_in_name.ts").map(|| { + Ok(res) + } + (_, "/cli/tests/subdir/file_with_:_in_name.ts") => { let mut res = Response::new(Body::from( "console.log('Hello from file_with_:_in_name.ts');", )); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/typescript"), ); - res - })) - .or(warp::path!("cli"/"tests"/"subdir"/"no_js_ext@1.0.0").map(|| { + Ok(res) + } + (_, "/cli/tests/subdir/no_js_ext@1.0.0") => { let mut res = Response::new(Body::from( r#"import { printHello } from "./mod2.ts"; printHello(); "#, )); - let h = res.headers_mut(); - h.insert( + res.headers_mut().insert( "Content-type", HeaderValue::from_static("application/javascript"), ); - res - })); - - let content_type_handler = warp::any() - .and(warp::path::peek()) - .and(warp::fs::dir(root_path())) - .map(custom_headers) - .or(etag_script) - .or(xtypescripttypes) - .or(echo_server) - .or(echo_multipart_file) - .or(multipart_form_data) - .or(bad_redirect) - .or(non_ascii_redirect); - - let http_fut = - warp::serve(content_type_handler.clone()).bind(([127, 0, 0, 1], PORT)); - - let https_fut = warp::serve(content_type_handler.clone()) - .tls() - .cert_path("std/http/testdata/tls/localhost.crt") - .key_path("std/http/testdata/tls/localhost.key") - .bind(([127, 0, 0, 1], HTTPS_PORT)); + Ok(res) + } + _ => { + let mut file_path = root_path(); + file_path.push(&req.uri().path()[1..]); + if let Ok(file) = tokio::fs::read(file_path).await { + let file_resp = custom_headers(&req.uri().path()[1..], file); + return Ok(file_resp); + } + + return Ok(Response::new(Body::empty())); + } + }; +} + +/// Taken from example in https://github.com/ctz/hyper-rustls/blob/a02ef72a227dcdf102f86e905baa7415c992e8b3/examples/server.rs +struct HyperAcceptor<'a> { + acceptor: Pin< + Box< + dyn Stream>> + + 'a, + >, + >, +} + +impl hyper::server::accept::Accept for HyperAcceptor<'_> { + type Conn = tokio_rustls::server::TlsStream; + type Error = io::Error; + + fn poll_accept( + mut self: Pin<&mut Self>, + cx: &mut Context, + ) -> Poll>> { + Pin::new(&mut self.acceptor).poll_next(cx) + } +} + +unsafe impl std::marker::Send for HyperAcceptor<'_> {} + +async fn wrap_redirect_server() { + let redirect_svc = + make_service_fn(|_| async { Ok::<_, hyper::Error>(service_fn(redirect)) }); + let redirect_addr = SocketAddr::from(([127, 0, 0, 1], REDIRECT_PORT)); + let redirect_server = Server::bind(&redirect_addr).serve(redirect_svc); + if let Err(e) = redirect_server.await { + eprintln!("Redirect error: {:?}", e); + } +} + +async fn wrap_double_redirect_server() { + let double_redirects_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(double_redirects)) + }); + let double_redirects_addr = + SocketAddr::from(([127, 0, 0, 1], DOUBLE_REDIRECTS_PORT)); + let double_redirects_server = + Server::bind(&double_redirects_addr).serve(double_redirects_svc); + if let Err(e) = double_redirects_server.await { + eprintln!("Double redirect error: {:?}", e); + } +} + +async fn wrap_inf_redirect_server() { + let inf_redirects_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(inf_redirects)) + }); + let inf_redirects_addr = + SocketAddr::from(([127, 0, 0, 1], INF_REDIRECTS_PORT)); + let inf_redirects_server = + Server::bind(&inf_redirects_addr).serve(inf_redirects_svc); + if let Err(e) = inf_redirects_server.await { + eprintln!("Inf redirect error: {:?}", e); + } +} + +async fn wrap_another_redirect_server() { + let another_redirect_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(another_redirect)) + }); + let another_redirect_addr = + SocketAddr::from(([127, 0, 0, 1], ANOTHER_REDIRECT_PORT)); + let another_redirect_server = + Server::bind(&another_redirect_addr).serve(another_redirect_svc); + if let Err(e) = another_redirect_server.await { + eprintln!("Another redirect error: {:?}", e); + } +} + +async fn wrap_abs_redirect_server() { + let abs_redirect_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(absolute_redirect)) + }); + let abs_redirect_addr = + SocketAddr::from(([127, 0, 0, 1], REDIRECT_ABSOLUTE_PORT)); + let abs_redirect_server = + Server::bind(&abs_redirect_addr).serve(abs_redirect_svc); + if let Err(e) = abs_redirect_server.await { + eprintln!("Absolute redirect error: {:?}", e); + } +} + +async fn wrap_main_server() { + let main_server_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(main_server)) + }); + let main_server_addr = SocketAddr::from(([127, 0, 0, 1], PORT)); + let main_server = Server::bind(&main_server_addr).serve(main_server_svc); + if let Err(e) = main_server.await { + eprintln!("HTTP server error: {:?}", e); + } +} + +async fn wrap_main_https_server() { + let main_server_https_addr = SocketAddr::from(([127, 0, 0, 1], HTTPS_PORT)); + let cert_file = "std/http/testdata/tls/localhost.crt"; + let key_file = "std/http/testdata/tls/localhost.key"; + let tls_config = get_tls_config(cert_file, key_file) + .await + .expect("Cannot get TLS config"); + let mut tcp = TcpListener::bind(&main_server_https_addr) + .await + .expect("Cannot bind TCP"); + loop { + let tls_acceptor = TlsAcceptor::from(tls_config.clone()); + // Prepare a long-running future stream to accept and serve cients. + let incoming_tls_stream = tcp + .incoming() + .map_err(|e| { + eprintln!("Error Incoming: {:?}", e); + io::Error::new(io::ErrorKind::Other, e) + }) + .and_then(move |s| { + use futures::TryFutureExt; + tls_acceptor.accept(s).map_err(|e| { + eprintln!("TLS Error {:?}", e); + e + }) + }) + .boxed(); + + let main_server_https_svc = make_service_fn(|_| async { + Ok::<_, hyper::Error>(service_fn(main_server)) + }); + let main_server_https = Server::builder(HyperAcceptor { + acceptor: incoming_tls_stream, + }) + .serve(main_server_https_svc); + + //continue to prevent TLS error stopping the server + if main_server_https.await.is_err() { + continue; + } + } +} + +#[tokio::main] +pub async fn run_all_servers() { + if let Some(port) = env::args().nth(1) { + return hyper_hello(port.parse::().unwrap()).await; + } + + let redirect_server_fut = wrap_redirect_server(); + let double_redirects_server_fut = wrap_double_redirect_server(); + let inf_redirects_server_fut = wrap_inf_redirect_server(); + let another_redirect_server_fut = wrap_another_redirect_server(); + let abs_redirect_server_fut = wrap_abs_redirect_server(); + + let ws_addr = SocketAddr::from(([127, 0, 0, 1], WS_PORT)); + let ws_server_fut = run_ws_server(&ws_addr); + let wss_addr = SocketAddr::from(([127, 0, 0, 1], WSS_PORT)); + let wss_server_fut = run_wss_server(&wss_addr); + + let main_server_fut = wrap_main_server(); + let main_server_https_fut = wrap_main_https_server(); let mut server_fut = async { futures::join!( - http_fut, - https_fut, redirect_server_fut, ws_server_fut, wss_server_fut, another_redirect_server_fut, - inf_redirect_server_fut, - double_redirect_server_fut, - absolute_redirect_server_fut, + inf_redirects_server_fut, + double_redirects_server_fut, + abs_redirect_server_fut, + main_server_fut, + main_server_https_fut, ) } .boxed(); let mut did_print_ready = false; - future::poll_fn(move |cx| { + futures::future::poll_fn(move |cx| { let poll_result = server_fut.poll_unpin(cx); if !replace(&mut did_print_ready, true) { println!("ready"); @@ -492,38 +747,61 @@ pub async fn run_all_servers() { .await; } -fn custom_headers(path: warp::path::Peek, f: warp::fs::File) -> Box { - let p = path.as_str(); +fn custom_headers(p: &str, body: Vec) -> Response { + let mut response = Response::new(Body::from(body)); if p.ends_with("cli/tests/x_deno_warning.js") { - let f = with_header(f, "Content-Type", "application/javascript"); - let f = with_header(f, "X-Deno-Warning", "foobar"); - return Box::new(f); + response.headers_mut().insert( + "Content-Type", + HeaderValue::from_static("application/javascript"), + ); + response + .headers_mut() + .insert("X-Deno-Warning", HeaderValue::from_static("foobar")); + return response; } if p.ends_with("cli/tests/053_import_compression/brotli") { - let f = with_header(f, "Content-Encoding", "br"); - let f = with_header(f, "Content-Type", "application/javascript"); - let f = with_header(f, "Content-Length", "26"); - return Box::new(f); + response + .headers_mut() + .insert("Content-Encoding", HeaderValue::from_static("br")); + response.headers_mut().insert( + "Content-Type", + HeaderValue::from_static("application/javascript"), + ); + response + .headers_mut() + .insert("Content-Length", HeaderValue::from_static("26")); + return response; } if p.ends_with("cli/tests/053_import_compression/gziped") { - let f = with_header(f, "Content-Encoding", "gzip"); - let f = with_header(f, "Content-Type", "application/javascript"); - let f = with_header(f, "Content-Length", "39"); - return Box::new(f); + response + .headers_mut() + .insert("Content-Encoding", HeaderValue::from_static("gzip")); + response.headers_mut().insert( + "Content-Type", + HeaderValue::from_static("application/javascript"), + ); + response + .headers_mut() + .insert("Content-Length", HeaderValue::from_static("39")); + return response; } + if p.contains("cli/tests/encoding/") { let charset = p .split_terminator('/') .last() .unwrap() .trim_end_matches(".ts"); - let f = with_header( - f, + + response.headers_mut().insert( "Content-Type", - &format!("application/typescript;charset={}", charset)[..], + HeaderValue::from_str( + &format!("application/typescript;charset={}", charset)[..], + ) + .unwrap(), ); - return Box::new(f); + return response; } let content_type = if p.contains(".t1.") { @@ -559,10 +837,13 @@ fn custom_headers(path: warp::path::Peek, f: warp::fs::File) -> Box { }; if let Some(t) = content_type { - Box::new(with_header(f, "Content-Type", t)) - } else { - Box::new(f) + response + .headers_mut() + .insert("Content-Type", HeaderValue::from_str(t).unwrap()); + return response; } + + response } #[derive(Default)] @@ -790,7 +1071,7 @@ pub fn deno_cmd() -> Command { pub fn run_powershell_script_file( script_file_path: &str, args: Vec<&str>, -) -> Result<(), i64> { +) -> std::result::Result<(), i64> { let deno_dir = new_deno_dir(); let mut command = Command::new("powershell.exe"); From bfe1b053815f68b121691e9690b8353178d86128 Mon Sep 17 00:00:00 2001 From: Zheyu Zhang Date: Sat, 26 Dec 2020 21:02:37 +0800 Subject: [PATCH 106/135] ci: setup latest version of Node (#8888) --- .github/workflows/ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1387ae7efe66bc..662808af932e3e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -93,6 +93,12 @@ jobs: python-version: "2.7" architecture: x64 + - name: Install Node + uses: actions/setup-node@v2 + with: + node-version: "14" + check-latest: true + - name: Remove unused versions of Python if: startsWith(matrix.os, 'windows') run: |- From c1fdb30394ab336ec2e004d563be40180e218b0d Mon Sep 17 00:00:00 2001 From: Yosi Pramajaya Date: Sat, 26 Dec 2020 20:06:00 +0700 Subject: [PATCH 107/135] fix: fetch bad URL will not panic (#8884) --- Cargo.lock | 15 +++++++++++++-- cli/tests/unit/fetch_test.ts | 36 ++++++++++++++++++++++++++++++++---- core/Cargo.toml | 2 +- op_crates/fetch/lib.rs | 8 +++++--- 4 files changed, 51 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0f8f997735ad54..f74fe8436ad2dd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -771,6 +771,16 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece68d15c92e84fa4f19d3780f1294e5ca82a78a6d515f1efaabcc144688be00" +dependencies = [ + "matches", + "percent-encoding", +] + [[package]] name = "from_variant" version = "0.1.2" @@ -3019,10 +3029,11 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "url" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" +checksum = "5909f2b0817350449ed73e8bcd81c8c3c8d9a7a5d8acba4b27db277f1868976e" dependencies = [ + "form_urlencoded", "idna", "matches", "percent-encoding", diff --git a/cli/tests/unit/fetch_test.ts b/cli/tests/unit/fetch_test.ts index 359a24e9529990..2355d08133e2b4 100644 --- a/cli/tests/unit/fetch_test.ts +++ b/cli/tests/unit/fetch_test.ts @@ -27,12 +27,37 @@ unitTest( async (): Promise => { await fetch("http://localhost:4000"); }, - Deno.errors.Http, + TypeError, "error trying to connect", ); }, ); +unitTest( + { perms: { net: true } }, + async function fetchDnsError(): Promise { + await assertThrowsAsync( + async (): Promise => { + await fetch("http://nil/"); + }, + TypeError, + "error trying to connect", + ); + }, +); + +unitTest( + { perms: { net: true } }, + async function fetchInvalidUriError(): Promise { + await assertThrowsAsync( + async (): Promise => { + await fetch("http:///"); + }, + URIError, + ); + }, +); + unitTest({ perms: { net: true } }, async function fetchJsonSuccess(): Promise< void > { @@ -199,9 +224,12 @@ unitTest({ perms: { net: true } }, async function responseClone(): Promise< unitTest({ perms: { net: true } }, async function fetchEmptyInvalid(): Promise< void > { - await assertThrowsAsync(async () => { - await fetch(""); - }, URIError); + await assertThrowsAsync( + async () => { + await fetch(""); + }, + URIError, + ); }); unitTest( diff --git a/core/Cargo.toml b/core/Cargo.toml index 4e81f935ce869b..2d4abc2cf20dbb 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -23,7 +23,7 @@ rusty_v8 = "0.14.0" serde_json = { version = "1.0", features = ["preserve_order"] } serde = { version = "1.0", features = ["derive"] } smallvec = "1.4.2" -url = { version = "2.1.1", features = ["serde"] } +url = { version = "2.2", features = ["serde"] } pin-project = "1.0.2" [[example]] diff --git a/op_crates/fetch/lib.rs b/op_crates/fetch/lib.rs index 4bc37b998cef4d..91e44f75c287b5 100644 --- a/op_crates/fetch/lib.rs +++ b/op_crates/fetch/lib.rs @@ -8,7 +8,6 @@ use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; -use deno_core::url; use deno_core::url::Url; use deno_core::AsyncRefCell; use deno_core::BufVec; @@ -126,7 +125,7 @@ where None => Method::GET, }; - let url_ = url::Url::parse(&url)?; + let url_ = Url::parse(&url)?; // Check scheme before asking for net permission let scheme = url_.scheme(); @@ -155,7 +154,10 @@ where } //debug!("Before fetch {}", url); - let res = request.send().await?; + let res = match request.send().await { + Ok(res) => res, + Err(e) => return Err(type_error(e.to_string())), + }; //debug!("Fetch response {}", url); let status = res.status(); From e8587c86bf94eec35ccdae5b5aa0cffc70d59563 Mon Sep 17 00:00:00 2001 From: Casper Beyer Date: Sat, 26 Dec 2020 21:29:46 +0800 Subject: [PATCH 108/135] test(cli): ensure await Promise.all does not race (#8868) --- cli/tests/stdout_write_all.out | 4 +--- cli/tests/stdout_write_all.ts | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/cli/tests/stdout_write_all.out b/cli/tests/stdout_write_all.out index 49a6d64e94b97c..af5626b4a114ab 100644 --- a/cli/tests/stdout_write_all.out +++ b/cli/tests/stdout_write_all.out @@ -1,3 +1 @@ -done -done -complete +Hello, world! diff --git a/cli/tests/stdout_write_all.ts b/cli/tests/stdout_write_all.ts index c82a0ca7d13452..623bd8f5381e67 100644 --- a/cli/tests/stdout_write_all.ts +++ b/cli/tests/stdout_write_all.ts @@ -1,8 +1,8 @@ const encoder = new TextEncoder(); const pending = [ - Deno.stdout.write(encoder.encode("done\n")), - Deno.stdout.write(encoder.encode("done\n")), + Deno.stdout.write(encoder.encode("Hello, ")), + Deno.stdout.write(encoder.encode("world!")), ]; await Promise.all(pending); -await Deno.stdout.write(encoder.encode("complete\n")); +await Deno.stdout.write(encoder.encode("\n")); From 9419c06ab5e9f7fb258e7a48093f17f6d2511ea6 Mon Sep 17 00:00:00 2001 From: Luca Casonato Date: Sat, 26 Dec 2020 18:15:30 +0100 Subject: [PATCH 109/135] chore: move ProgressEvent type to deno.web lib (#8878) --- cli/dts/lib.deno.shared_globals.d.ts | 17 ----------------- op_crates/web/lib.deno_web.d.ts | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/cli/dts/lib.deno.shared_globals.d.ts b/cli/dts/lib.deno.shared_globals.d.ts index 6eb9019ad89689..433e90113aee2f 100644 --- a/cli/dts/lib.deno.shared_globals.d.ts +++ b/cli/dts/lib.deno.shared_globals.d.ts @@ -643,12 +643,6 @@ interface PostMessageOptions { transfer?: any[]; } -interface ProgressEventInit extends EventInit { - lengthComputable?: boolean; - loaded?: number; - total?: number; -} - interface AbstractWorkerEventMap { "error": ErrorEvent; } @@ -840,17 +834,6 @@ declare class PerformanceMeasure extends PerformanceEntry { readonly entryType: "measure"; } -/** Events measuring progress of an underlying process, like an HTTP request - * (for an XMLHttpRequest, or the loading of the underlying resource of an - * ,