diff --git a/.travis.yml b/.travis.yml index 7d937dfb22870..bfc07e2b5100c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,19 +7,33 @@ node_js: sudo: false -os: - - linux - - osx +env: + - workerCount=3 matrix: fast_finish: true - exclude: + include: - os: osx - node_js: '4' + node_js: stable + osx_image: xcode7.3 + env: workerCount=2 + allow_failures: - os: osx - node_js: '0.10' branches: only: - master - - transforms \ No newline at end of file + - transforms + +install: + - npm uninstall typescript + - npm uninstall tslint + - npm install + - npm update + +cache: + directories: + - node_modules + +git: + depth: 1 diff --git a/Gulpfile.ts b/Gulpfile.ts index e31bc7d268bb5..1b90256283264 100644 --- a/Gulpfile.ts +++ b/Gulpfile.ts @@ -17,7 +17,7 @@ declare module "gulp-typescript" { stripInternal?: boolean; types?: string[]; } - interface CompileStream extends NodeJS.ReadWriteStream {} // Either gulp or gulp-typescript has some odd typings which don't reflect reality, making this required + interface CompileStream extends NodeJS.ReadWriteStream { } // Either gulp or gulp-typescript has some odd typings which don't reflect reality, making this required } import * as insert from "gulp-insert"; import * as sourcemaps from "gulp-sourcemaps"; @@ -34,7 +34,7 @@ import through2 = require("through2"); import merge2 = require("merge2"); import intoStream = require("into-stream"); import * as os from "os"; -import Linter = require("tslint"); +import fold = require("travis-fold"); const gulp = helpMaker(originalGulp); const mochaParallel = require("./scripts/mocha-parallel.js"); const {runTestsInParallel} = mochaParallel; @@ -59,14 +59,13 @@ const cmdLineOptions = minimist(process.argv.slice(2), { browser: process.env.browser || process.env.b || "IE", tests: process.env.test || process.env.tests || process.env.t, light: process.env.light || false, - port: process.env.port || process.env.p || "8888", reporter: process.env.reporter || process.env.r, lint: process.env.lint || true, files: process.env.f || process.env.file || process.env.files || "", } }); -function exec(cmd: string, args: string[], complete: () => void = (() => {}), error: (e: any, status: number) => void = (() => {})) { +function exec(cmd: string, args: string[], complete: () => void = (() => { }), error: (e: any, status: number) => void = (() => { })) { console.log(`${cmd} ${args.join(" ")}`); // TODO (weswig): Update child_process types to add windowsVerbatimArguments to the type definition const subshellFlag = isWin ? "/c" : "-c"; @@ -117,12 +116,12 @@ const es2015LibrarySources = [ ]; const es2015LibrarySourceMap = es2015LibrarySources.map(function(source) { - return { target: "lib." + source, sources: ["header.d.ts", source] }; + return { target: "lib." + source, sources: ["header.d.ts", source] }; }); -const es2016LibrarySource = [ "es2016.array.include.d.ts" ]; +const es2016LibrarySource = ["es2016.array.include.d.ts"]; -const es2016LibrarySourceMap = es2016LibrarySource.map(function (source) { +const es2016LibrarySourceMap = es2016LibrarySource.map(function(source) { return { target: "lib." + source, sources: ["header.d.ts", source] }; }); @@ -131,38 +130,38 @@ const es2017LibrarySource = [ "es2017.sharedmemory.d.ts" ]; -const es2017LibrarySourceMap = es2017LibrarySource.map(function (source) { +const es2017LibrarySourceMap = es2017LibrarySource.map(function(source) { return { target: "lib." + source, sources: ["header.d.ts", source] }; }); const hostsLibrarySources = ["dom.generated.d.ts", "webworker.importscripts.d.ts", "scripthost.d.ts"]; const librarySourceMap = [ - // Host library - { target: "lib.dom.d.ts", sources: ["header.d.ts", "dom.generated.d.ts"] }, - { target: "lib.dom.iterable.d.ts", sources: ["header.d.ts", "dom.iterable.d.ts"] }, - { target: "lib.webworker.d.ts", sources: ["header.d.ts", "webworker.generated.d.ts"] }, - { target: "lib.scripthost.d.ts", sources: ["header.d.ts", "scripthost.d.ts"] }, - - // JavaScript library - { target: "lib.es5.d.ts", sources: ["header.d.ts", "es5.d.ts"] }, - { target: "lib.es2015.d.ts", sources: ["header.d.ts", "es2015.d.ts"] }, - { target: "lib.es2016.d.ts", sources: ["header.d.ts", "es2016.d.ts"] }, - { target: "lib.es2017.d.ts", sources: ["header.d.ts", "es2017.d.ts"] }, - - // JavaScript + all host library - { target: "lib.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(hostsLibrarySources) }, - { target: "lib.es6.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(es2015LibrarySources, hostsLibrarySources, "dom.iterable.d.ts") } + // Host library + { target: "lib.dom.d.ts", sources: ["header.d.ts", "dom.generated.d.ts"] }, + { target: "lib.dom.iterable.d.ts", sources: ["header.d.ts", "dom.iterable.d.ts"] }, + { target: "lib.webworker.d.ts", sources: ["header.d.ts", "webworker.generated.d.ts"] }, + { target: "lib.scripthost.d.ts", sources: ["header.d.ts", "scripthost.d.ts"] }, + + // JavaScript library + { target: "lib.es5.d.ts", sources: ["header.d.ts", "es5.d.ts"] }, + { target: "lib.es2015.d.ts", sources: ["header.d.ts", "es2015.d.ts"] }, + { target: "lib.es2016.d.ts", sources: ["header.d.ts", "es2016.d.ts"] }, + { target: "lib.es2017.d.ts", sources: ["header.d.ts", "es2017.d.ts"] }, + + // JavaScript + all host library + { target: "lib.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(hostsLibrarySources) }, + { target: "lib.es6.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(es2015LibrarySources, hostsLibrarySources, "dom.iterable.d.ts") } ].concat(es2015LibrarySourceMap, es2016LibrarySourceMap, es2017LibrarySourceMap); -const libraryTargets = librarySourceMap.map(function (f) { +const libraryTargets = librarySourceMap.map(function(f) { return path.join(builtLocalDirectory, f.target); }); for (const i in libraryTargets) { const entry = librarySourceMap[i]; const target = libraryTargets[i]; - const sources = [copyright].concat(entry.sources.map(function (s) { + const sources = [copyright].concat(entry.sources.map(function(s) { return path.join(libraryDirectory, s); })); gulp.task(target, false, [], function() { @@ -392,7 +391,7 @@ gulp.task(servicesFile, false, ["lib", "generate-diagnostics"], () => { .pipe(sourcemaps.init()) .pipe(tsc(servicesProject)); const completedJs = js.pipe(prependCopyright()) - .pipe(sourcemaps.write(".")); + .pipe(sourcemaps.write(".")); const completedDts = dts.pipe(prependCopyright(/*outputCopyright*/true)) .pipe(insert.transform((contents, file) => { file.path = standaloneDefinitionsFile; @@ -435,22 +434,22 @@ const tsserverLibraryDefinitionFile = path.join(builtLocalDirectory, "tsserverli gulp.task(tsserverLibraryFile, false, [servicesFile], (done) => { const serverLibraryProject = tsc.createProject("src/server/tsconfig.library.json", getCompilerSettings({}, /*useBuiltCompiler*/ true)); - const {js, dts}: {js: NodeJS.ReadableStream, dts: NodeJS.ReadableStream} = serverLibraryProject.src() + const {js, dts}: { js: NodeJS.ReadableStream, dts: NodeJS.ReadableStream } = serverLibraryProject.src() .pipe(sourcemaps.init()) .pipe(newer(tsserverLibraryFile)) .pipe(tsc(serverLibraryProject)); return merge2([ js.pipe(prependCopyright()) - .pipe(sourcemaps.write(".")) - .pipe(gulp.dest(builtLocalDirectory)), + .pipe(sourcemaps.write(".")) + .pipe(gulp.dest(builtLocalDirectory)), dts.pipe(prependCopyright()) - .pipe(gulp.dest(builtLocalDirectory)) + .pipe(gulp.dest(builtLocalDirectory)) ]); }); gulp.task("lssl", "Builds language service server library", [tsserverLibraryFile]); -gulp.task("local", "Builds the full compiler and services", [builtLocalCompiler, servicesFile, serverFile, builtGeneratedDiagnosticMessagesJSON]); +gulp.task("local", "Builds the full compiler and services", [builtLocalCompiler, servicesFile, serverFile, builtGeneratedDiagnosticMessagesJSON, tsserverLibraryFile]); gulp.task("tsc", "Builds only the compiler", [builtLocalCompiler]); @@ -477,7 +476,7 @@ gulp.task(specMd, false, [word2mdJs], (done) => { const specMDFullPath = path.resolve(specMd); const cmd = "cscript //nologo " + word2mdJs + " \"" + specWordFullPath + "\" " + "\"" + specMDFullPath + "\""; console.log(cmd); - cp.exec(cmd, function () { + cp.exec(cmd, function() { done(); }); }); @@ -493,18 +492,18 @@ gulp.task("dontUseDebugMode", false, [], (done) => { useDebugMode = false; done( gulp.task("VerifyLKG", false, [], () => { const expectedFiles = [builtLocalCompiler, servicesFile, serverFile, nodePackageFile, nodeDefinitionsFile, standaloneDefinitionsFile, tsserverLibraryFile, tsserverLibraryDefinitionFile].concat(libraryTargets); - const missingFiles = expectedFiles.filter(function (f) { + const missingFiles = expectedFiles.filter(function(f) { return !fs.existsSync(f); }); if (missingFiles.length > 0) { throw new Error("Cannot replace the LKG unless all built targets are present in directory " + builtLocalDirectory + - ". The following files are missing:\n" + missingFiles.join("\n")); + ". The following files are missing:\n" + missingFiles.join("\n")); } // Copy all the targets into the LKG directory return gulp.src(expectedFiles).pipe(gulp.dest(LKGDirectory)); }); -gulp.task("LKGInternal", false, ["lib", "local", "lssl"]); +gulp.task("LKGInternal", false, ["lib", "local"]); gulp.task("LKG", "Makes a new LKG out of the built js files", ["clean", "dontUseDebugMode"], () => { return runSequence("LKGInternal", "VerifyLKG"); @@ -532,8 +531,6 @@ const localRwcBaseline = path.join(internalTests, "baselines/rwc/local"); const refRwcBaseline = path.join(internalTests, "baselines/rwc/reference"); const localTest262Baseline = path.join(internalTests, "baselines/test262/local"); -const refTest262Baseline = path.join(internalTests, "baselines/test262/reference"); - gulp.task("tests", "Builds the test infrastructure using the built compiler", [run]); gulp.task("tests-debug", "Builds the test sources and automation in debug mode", () => { @@ -628,7 +625,7 @@ function runConsoleTests(defaultReporter: string, runInParallel: boolean, done: } args.push(run); setNodeEnvToDevelopment(); - runTestsInParallel(taskConfigsFolder, run, { testTimeout: testTimeout, noColors: colors === " --no-colors " }, function (err) { + runTestsInParallel(taskConfigsFolder, run, { testTimeout: testTimeout, noColors: colors === " --no-colors " }, function(err) { // last worker clean everything and runs linter in case if there were no errors del(taskConfigsFolder).then(() => { if (!err) { @@ -680,7 +677,7 @@ gulp.task("runtests", ["build-rules", "tests"], (done) => { runConsoleTests("mocha-fivemat-progress-reporter", /*runInParallel*/ false, done); -}); + }); const nodeServerOutFile = "tests/webTestServer.js"; const nodeServerInFile = "tests/webTestServer.ts"; @@ -766,7 +763,7 @@ function writeTestConfigFile(tests: string, light: boolean, taskConfigsFolder?: } -gulp.task("runtests-browser", "Runs the tests using the built run.js file like 'gulp runtests'. Syntax is gulp runtests-browser. Additional optional parameters --tests=[regex], --port=, --browser=[chrome|IE]", ["browserify", nodeServerOutFile], (done) => { +gulp.task("runtests-browser", "Runs the tests using the built run.js file like 'gulp runtests'. Syntax is gulp runtests-browser. Additional optional parameters --tests=[regex], --browser=[chrome|IE]", ["browserify", nodeServerOutFile], (done) => { cleanTestDirs((err) => { if (err) { console.error(err); done(err); process.exit(1); } host = "node"; @@ -781,9 +778,6 @@ gulp.task("runtests-browser", "Runs the tests using the built run.js file like ' } const args = [nodeServerOutFile]; - if (cmdLineOptions["port"]) { - args.push(cmdLineOptions["port"]); - } if (cmdLineOptions["browser"]) { args.push(cmdLineOptions["browser"]); } @@ -815,32 +809,36 @@ gulp.task("diff-rwc", "Diffs the RWC baselines using the diff tool specified by exec(getDiffTool(), [refRwcBaseline, localRwcBaseline], done, done); }); - -gulp.task("baseline-accept", "Makes the most recent test results the new baseline, overwriting the old baseline", (done) => { - const softAccept = cmdLineOptions["soft"]; - if (!softAccept) { - del(refBaseline).then(() => { - fs.renameSync(localBaseline, refBaseline); - done(); - }, done); - } - else { - gulp.src(localBaseline) - .pipe(gulp.dest(refBaseline)) - .on("end", () => { - del(path.join(refBaseline, "local")).then(() => done(), done); - }); - } +gulp.task("baseline-accept", "Makes the most recent test results the new baseline, overwriting the old baseline", () => { + return baselineAccept(""); }); + +function baselineAccept(subfolder = "") { + return merge2(baselineCopy(subfolder), baselineDelete(subfolder)); +} + +function baselineCopy(subfolder = "") { + return gulp.src([`tests/baselines/local/${subfolder}/**`, `!tests/baselines/local/${subfolder}/**/*.delete`]) + .pipe(gulp.dest(refBaseline)); +} + +function baselineDelete(subfolder = "") { + return gulp.src(["tests/baselines/local/**/*.delete"]) + .pipe(insert.transform((content, fileObj) => { + const target = path.join(refBaseline, fileObj.relative.substr(0, fileObj.relative.length - ".delete".length)); + del.sync(target); + del.sync(fileObj.path); + return ""; + })); +} + gulp.task("baseline-accept-rwc", "Makes the most recent rwc test results the new baseline, overwriting the old baseline", () => { - return del(refRwcBaseline).then(() => { - fs.renameSync(localRwcBaseline, refRwcBaseline); - }); + return baselineAccept("rwc"); }); + + gulp.task("baseline-accept-test262", "Makes the most recent test262 test results the new baseline, overwriting the old baseline", () => { - return del(refTest262Baseline).then(() => { - fs.renameSync(localTest262Baseline, refTest262Baseline); - }); + return baselineAccept("test262"); }); @@ -932,26 +930,6 @@ gulp.task("build-rules", "Compiles tslint rules to js", () => { .pipe(gulp.dest(dest)); }); -function getLinterOptions() { - return { - configuration: require("./tslint.json"), - formatter: "prose", - formattersDirectory: undefined, - rulesDirectory: "built/local/tslint" - }; -} - -function lintFileContents(options, path, contents) { - const ll = new Linter(path, contents, options); - console.log("Linting '" + path + "'."); - return ll.lint(); -} - -function lintFile(options, path) { - const contents = fs.readFileSync(path, "utf8"); - return lintFileContents(options, path, contents); -} - const lintTargets = [ "Gulpfile.ts", "src/compiler/**/*.ts", @@ -960,29 +938,75 @@ const lintTargets = [ "src/server/**/*.ts", "scripts/tslint/**/*.ts", "src/services/**/*.ts", + "tests/*.ts", "tests/webhost/*.ts" // Note: does *not* descend recursively ]; +function sendNextFile(files: {path: string}[], child: cp.ChildProcess, callback: (failures: number) => void, failures: number) { + const file = files.pop(); + if (file) { + console.log(`Linting '${file.path}'.`); + child.send({ kind: "file", name: file.path }); + } + else { + child.send({ kind: "close" }); + callback(failures); + } +} + +function spawnLintWorker(files: {path: string}[], callback: (failures: number) => void) { + const child = cp.fork("./scripts/parallel-lint"); + let failures = 0; + child.on("message", function(data) { + switch (data.kind) { + case "result": + if (data.failures > 0) { + failures += data.failures; + console.log(data.output); + } + sendNextFile(files, child, callback, failures); + break; + case "error": + console.error(data.error); + failures++; + sendNextFile(files, child, callback, failures); + break; + } + }); + sendNextFile(files, child, callback, failures); +} gulp.task("lint", "Runs tslint on the compiler sources. Optional arguments are: --f[iles]=regex", ["build-rules"], () => { const fileMatcher = RegExp(cmdLineOptions["files"]); - const lintOptions = getLinterOptions(); - let failed = 0; - return gulp.src(lintTargets) - .pipe(insert.transform((contents, file) => { - if (!fileMatcher.test(file.path)) return contents; - const result = lintFile(lintOptions, file.path); - if (result.failureCount > 0) { - console.log(result.output); - failed += result.failureCount; + if (fold.isTravis()) console.log(fold.start("lint")); + + let files: {stat: fs.Stats, path: string}[] = []; + return gulp.src(lintTargets, { read: false }) + .pipe(through2.obj((chunk, enc, cb) => { + files.push(chunk); + cb(); + }, (cb) => { + files = files.filter(file => fileMatcher.test(file.path)).sort((filea, fileb) => filea.stat.size - fileb.stat.size); + const workerCount = (process.env.workerCount && +process.env.workerCount) || os.cpus().length; + for (let i = 0; i < workerCount; i++) { + spawnLintWorker(files, finished); } - return contents; // TODO (weswig): Automatically apply fixes? :3 - })) - .on("end", () => { - if (failed > 0) { - console.error("Linter errors."); - process.exit(1); + + let completed = 0; + let failures = 0; + function finished(fails) { + completed++; + failures += fails; + if (completed === workerCount) { + if (fold.isTravis()) console.log(fold.end("lint")); + if (failures > 0) { + throw new Error(`Linter errors: ${failures}`); + } + else { + cb(); + } + } } - }); + })); }); diff --git a/Jakefile.js b/Jakefile.js index f5012a9a7a446..6f940ca2ab382 100644 --- a/Jakefile.js +++ b/Jakefile.js @@ -4,13 +4,14 @@ var fs = require("fs"); var os = require("os"); var path = require("path"); var child_process = require("child_process"); -var Linter = require("tslint"); +var fold = require("travis-fold"); var runTestsInParallel = require("./scripts/mocha-parallel").runTestsInParallel; // Variables var compilerDirectory = "src/compiler/"; var servicesDirectory = "src/services/"; var serverDirectory = "src/server/"; +var typingsInstallerDirectory = "src/server/typingsInstaller"; var harnessDirectory = "src/harness/"; var libraryDirectory = "src/lib/"; var scriptsDirectory = "scripts/"; @@ -27,9 +28,31 @@ var thirdParty = "ThirdPartyNoticeText.txt"; // add node_modules to path so we don't need global modules, prefer the modules by adding them first var nodeModulesPathPrefix = path.resolve("./node_modules/.bin/") + path.delimiter; if (process.env.path !== undefined) { - process.env.path = nodeModulesPathPrefix + process.env.path; + process.env.path = nodeModulesPathPrefix + process.env.path; } else if (process.env.PATH !== undefined) { - process.env.PATH = nodeModulesPathPrefix + process.env.PATH; + process.env.PATH = nodeModulesPathPrefix + process.env.PATH; +} + +function toNs(diff) { + return diff[0] * 1e9 + diff[1]; +} + +function mark() { + if (!fold.isTravis()) return; + var stamp = process.hrtime(); + var id = Math.floor(Math.random() * 0xFFFFFFFF).toString(16); + console.log("travis_time:start:" + id + "\r"); + return { + stamp: stamp, + id: id + }; +} + +function measure(marker) { + if (!fold.isTravis()) return; + var diff = process.hrtime(marker.stamp); + var total = [marker.stamp[0] + diff[0], marker.stamp[1] + diff[1]]; + console.log("travis_time:end:" + marker.id + ":start=" + toNs(marker.stamp) + ",finish=" + toNs(total) + ",duration=" + toNs(diff) + "\r"); } var compilerSources = [ @@ -102,8 +125,10 @@ var servicesSources = [ })); var serverCoreSources = [ + "types.d.ts", "utilities.ts", "scriptVersionCache.ts", + "typingsCache.ts", "scriptInfo.ts", "lsHost.ts", "project.ts", @@ -121,6 +146,14 @@ var cancellationTokenSources = [ return path.join(serverDirectory, f); }); +var typingsInstallerSources = [ + "../types.d.ts", + "typingsInstaller.ts", + "nodeTypingsInstaller.ts" +].map(function (f) { + return path.join(typingsInstallerDirectory, f); +}); + var serverSources = serverCoreSources.concat(servicesSources); var languageServiceLibrarySources = [ @@ -176,7 +209,8 @@ var harnessSources = harnessCoreSources.concat([ "convertCompilerOptionsFromJson.ts", "convertTypingOptionsFromJson.ts", "tsserverProjectSystem.ts", - "matchFiles.ts" + "matchFiles.ts", + "initializeTSConfig.ts", ].map(function (f) { return path.join(unittestsDirectory, f); })).concat([ @@ -186,6 +220,7 @@ var harnessSources = harnessCoreSources.concat([ "scriptInfo.ts", "lsHost.ts", "project.ts", + "typingsCache.ts", "editorServices.ts", "protocol.d.ts", "session.ts", @@ -205,11 +240,11 @@ var es2015LibrarySources = [ "es2015.symbol.wellknown.d.ts" ]; -var es2015LibrarySourceMap = es2015LibrarySources.map(function(source) { - return { target: "lib." + source, sources: ["header.d.ts", source] }; +var es2015LibrarySourceMap = es2015LibrarySources.map(function (source) { + return { target: "lib." + source, sources: ["header.d.ts", source] }; }); -var es2016LibrarySource = [ "es2016.array.include.d.ts" ]; +var es2016LibrarySource = ["es2016.array.include.d.ts"]; var es2016LibrarySourceMap = es2016LibrarySource.map(function (source) { return { target: "lib." + source, sources: ["header.d.ts", source] }; @@ -227,21 +262,21 @@ var es2017LibrarySourceMap = es2017LibrarySource.map(function (source) { var hostsLibrarySources = ["dom.generated.d.ts", "webworker.importscripts.d.ts", "scripthost.d.ts"]; var librarySourceMap = [ - // Host library - { target: "lib.dom.d.ts", sources: ["header.d.ts", "dom.generated.d.ts"] }, - { target: "lib.dom.iterable.d.ts", sources: ["header.d.ts", "dom.iterable.d.ts"] }, - { target: "lib.webworker.d.ts", sources: ["header.d.ts", "webworker.generated.d.ts"] }, - { target: "lib.scripthost.d.ts", sources: ["header.d.ts", "scripthost.d.ts"] }, - - // JavaScript library - { target: "lib.es5.d.ts", sources: ["header.d.ts", "es5.d.ts"] }, - { target: "lib.es2015.d.ts", sources: ["header.d.ts", "es2015.d.ts"] }, - { target: "lib.es2016.d.ts", sources: ["header.d.ts", "es2016.d.ts"] }, - { target: "lib.es2017.d.ts", sources: ["header.d.ts", "es2017.d.ts"] }, - - // JavaScript + all host library - { target: "lib.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(hostsLibrarySources) }, - { target: "lib.es6.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(es2015LibrarySources, hostsLibrarySources, "dom.iterable.d.ts") } + // Host library + { target: "lib.dom.d.ts", sources: ["header.d.ts", "dom.generated.d.ts"] }, + { target: "lib.dom.iterable.d.ts", sources: ["header.d.ts", "dom.iterable.d.ts"] }, + { target: "lib.webworker.d.ts", sources: ["header.d.ts", "webworker.generated.d.ts"] }, + { target: "lib.scripthost.d.ts", sources: ["header.d.ts", "scripthost.d.ts"] }, + + // JavaScript library + { target: "lib.es5.d.ts", sources: ["header.d.ts", "es5.d.ts"] }, + { target: "lib.es2015.d.ts", sources: ["header.d.ts", "es2015.d.ts"] }, + { target: "lib.es2016.d.ts", sources: ["header.d.ts", "es2016.d.ts"] }, + { target: "lib.es2017.d.ts", sources: ["header.d.ts", "es2017.d.ts"] }, + + // JavaScript + all host library + { target: "lib.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(hostsLibrarySources) }, + { target: "lib.es6.d.ts", sources: ["header.d.ts", "es5.d.ts"].concat(es2015LibrarySources, hostsLibrarySources, "dom.iterable.d.ts") } ].concat(es2015LibrarySourceMap, es2016LibrarySourceMap, es2017LibrarySourceMap); var libraryTargets = librarySourceMap.map(function (f) { @@ -257,7 +292,7 @@ function prependFile(prefixFile, destinationFile) { fail(destinationFile + " failed to be created!"); } var temp = "temptemp"; - jake.cpR(prefixFile, temp, {silent: true}); + jake.cpR(prefixFile, temp, { silent: true }); fs.appendFileSync(temp, fs.readFileSync(destinationFile)); fs.renameSync(temp, destinationFile); } @@ -269,11 +304,11 @@ function concatenateFiles(destinationFile, sourceFiles) { if (!fs.existsSync(sourceFiles[0])) { fail(sourceFiles[0] + " does not exist!"); } - jake.cpR(sourceFiles[0], temp, {silent: true}); + jake.cpR(sourceFiles[0], temp, { silent: true }); // append all files in sequence for (var i = 1; i < sourceFiles.length; i++) { if (!fs.existsSync(sourceFiles[i])) { - fail(sourceFiles[i] + " does not exist!"); + fail(sourceFiles[i] + " does not exist!"); } fs.appendFileSync(temp, fs.readFileSync(sourceFiles[i])); } @@ -307,10 +342,11 @@ var builtLocalCompiler = path.join(builtLocalDirectory, compilerFilename); * @param callback: a function to execute after the compilation process ends */ function compileFile(outFile, sources, prereqs, prefixes, useBuiltCompiler, opts, callback) { - file(outFile, prereqs, function() { + file(outFile, prereqs, function () { + var startCompileTime = mark(); opts = opts || {}; var compilerPath = useBuiltCompiler ? builtLocalCompiler : LKGCompiler; - var options = "--noImplicitAny --noImplicitThis --noEmitOnError --types " + var options = "--noImplicitAny --noImplicitThis --noEmitOnError --types " if (opts.types) { options += opts.types.join(","); } @@ -340,7 +376,7 @@ function compileFile(outFile, sources, prereqs, prefixes, useBuiltCompiler, opts options += " --module commonjs"; } - if(opts.noResolve) { + if (opts.noResolve) { options += " --noResolve"; } @@ -367,13 +403,13 @@ function compileFile(outFile, sources, prereqs, prefixes, useBuiltCompiler, opts var ex = jake.createExec([cmd]); // Add listeners for output and error - ex.addListener("stdout", function(output) { + ex.addListener("stdout", function (output) { process.stdout.write(output); }); - ex.addListener("stderr", function(error) { + ex.addListener("stderr", function (error) { process.stderr.write(error); }); - ex.addListener("cmdEnd", function() { + ex.addListener("cmdEnd", function () { if (!useDebugMode && prefixes && fs.existsSync(outFile)) { for (var i in prefixes) { prependFile(prefixes[i], outFile); @@ -384,14 +420,16 @@ function compileFile(outFile, sources, prereqs, prefixes, useBuiltCompiler, opts callback(); } + measure(startCompileTime); complete(); }); - ex.addListener("error", function() { + ex.addListener("error", function () { fs.unlinkSync(outFile); fail("Compilation of " + outFile + " unsuccessful"); + measure(startCompileTime); }); ex.run(); - }, {async: true}); + }, { async: true }); } // Prerequisite task for built directory and library typings @@ -404,7 +442,7 @@ for (var i in libraryTargets) { var sources = [copyright].concat(entry.sources.map(function (s) { return path.join(libraryDirectory, s); })); - file(target, [builtLocalDirectory].concat(sources), function() { + file(target, [builtLocalDirectory].concat(sources), function () { concatenateFiles(target, sources); }); })(i); @@ -427,30 +465,30 @@ file(processDiagnosticMessagesTs); // processDiagnosticMessages script compileFile(processDiagnosticMessagesJs, - [processDiagnosticMessagesTs], - [processDiagnosticMessagesTs], - [], + [processDiagnosticMessagesTs], + [processDiagnosticMessagesTs], + [], /*useBuiltCompiler*/ false); // The generated diagnostics map; built for the compiler and for the 'generate-diagnostics' task file(diagnosticInfoMapTs, [processDiagnosticMessagesJs, diagnosticMessagesJson], function () { - var cmd = host + " " + processDiagnosticMessagesJs + " " + diagnosticMessagesJson; + var cmd = host + " " + processDiagnosticMessagesJs + " " + diagnosticMessagesJson; console.log(cmd); var ex = jake.createExec([cmd]); // Add listeners for output and error - ex.addListener("stdout", function(output) { + ex.addListener("stdout", function (output) { process.stdout.write(output); }); - ex.addListener("stderr", function(error) { + ex.addListener("stderr", function (error) { process.stderr.write(error); }); - ex.addListener("cmdEnd", function() { + ex.addListener("cmdEnd", function () { complete(); }); ex.run(); -}, {async: true}); +}, { async: true }); -file(builtGeneratedDiagnosticMessagesJSON,[generatedDiagnosticMessagesJSON], function() { +file(builtGeneratedDiagnosticMessagesJSON, [generatedDiagnosticMessagesJSON], function () { if (fs.existsSync(builtLocalDirectory)) { jake.cpR(generatedDiagnosticMessagesJSON, builtGeneratedDiagnosticMessagesJSON); } @@ -468,17 +506,17 @@ var programTs = path.join(compilerDirectory, "program.ts"); file(configureNightlyTs); compileFile(/*outfile*/configureNightlyJs, - /*sources*/ [configureNightlyTs], - /*prereqs*/ [configureNightlyTs], - /*prefixes*/ [], + /*sources*/[configureNightlyTs], + /*prereqs*/[configureNightlyTs], + /*prefixes*/[], /*useBuiltCompiler*/ false, - { noOutFile: false, generateDeclarations: false, keepComments: false, noResolve: false, stripInternal: false }); + { noOutFile: false, generateDeclarations: false, keepComments: false, noResolve: false, stripInternal: false }); -task("setDebugMode", function() { +task("setDebugMode", function () { useDebugMode = true; }); -task("configure-nightly", [configureNightlyJs], function() { +task("configure-nightly", [configureNightlyJs], function () { var cmd = host + " " + configureNightlyJs + " " + packageJson + " " + programTs; console.log(cmd); exec(cmd); @@ -519,66 +557,71 @@ var nodePackageFile = path.join(builtLocalDirectory, "typescript.js"); var nodeDefinitionsFile = path.join(builtLocalDirectory, "typescript.d.ts"); var nodeStandaloneDefinitionsFile = path.join(builtLocalDirectory, "typescript_standalone.d.ts"); -compileFile(servicesFile, servicesSources,[builtLocalDirectory, copyright].concat(servicesSources), - /*prefixes*/ [copyright], +compileFile(servicesFile, servicesSources, [builtLocalDirectory, copyright].concat(servicesSources), + /*prefixes*/[copyright], /*useBuiltCompiler*/ true, - /*opts*/ { noOutFile: false, - generateDeclarations: true, - preserveConstEnums: true, - keepComments: true, - noResolve: false, - stripInternal: true - }, + /*opts*/ { + noOutFile: false, + generateDeclarations: true, + preserveConstEnums: true, + keepComments: true, + noResolve: false, + stripInternal: true + }, /*callback*/ function () { - jake.cpR(servicesFile, nodePackageFile, {silent: true}); + jake.cpR(servicesFile, nodePackageFile, { silent: true }); - prependFile(copyright, standaloneDefinitionsFile); + prependFile(copyright, standaloneDefinitionsFile); - // Stanalone/web definition file using global 'ts' namespace - jake.cpR(standaloneDefinitionsFile, nodeDefinitionsFile, {silent: true}); - var definitionFileContents = fs.readFileSync(nodeDefinitionsFile).toString(); - definitionFileContents = definitionFileContents.replace(/^(\s*)(export )?const enum (\S+) {(\s*)$/gm, '$1$2enum $3 {$4'); - fs.writeFileSync(standaloneDefinitionsFile, definitionFileContents); + // Stanalone/web definition file using global 'ts' namespace + jake.cpR(standaloneDefinitionsFile, nodeDefinitionsFile, { silent: true }); + var definitionFileContents = fs.readFileSync(nodeDefinitionsFile).toString(); + definitionFileContents = definitionFileContents.replace(/^(\s*)(export )?const enum (\S+) {(\s*)$/gm, '$1$2enum $3 {$4'); + fs.writeFileSync(standaloneDefinitionsFile, definitionFileContents); - // Official node package definition file, pointed to by 'typings' in package.json - // Created by appending 'export = ts;' at the end of the standalone file to turn it into an external module - var nodeDefinitionsFileContents = definitionFileContents + "\r\nexport = ts;"; - fs.writeFileSync(nodeDefinitionsFile, nodeDefinitionsFileContents); + // Official node package definition file, pointed to by 'typings' in package.json + // Created by appending 'export = ts;' at the end of the standalone file to turn it into an external module + var nodeDefinitionsFileContents = definitionFileContents + "\r\nexport = ts;"; + fs.writeFileSync(nodeDefinitionsFile, nodeDefinitionsFileContents); - // Node package definition file to be distributed without the package. Created by replacing - // 'ts' namespace with '"typescript"' as a module. - var nodeStandaloneDefinitionsFileContents = definitionFileContents.replace(/declare (namespace|module) ts/g, 'declare module "typescript"'); - fs.writeFileSync(nodeStandaloneDefinitionsFile, nodeStandaloneDefinitionsFileContents); - }); + // Node package definition file to be distributed without the package. Created by replacing + // 'ts' namespace with '"typescript"' as a module. + var nodeStandaloneDefinitionsFileContents = definitionFileContents.replace(/declare (namespace|module) ts/g, 'declare module "typescript"'); + fs.writeFileSync(nodeStandaloneDefinitionsFile, nodeStandaloneDefinitionsFileContents); + }); compileFile( servicesFileInBrowserTest, servicesSources, [builtLocalDirectory, copyright].concat(servicesSources), - /*prefixes*/ [copyright], + /*prefixes*/[copyright], /*useBuiltCompiler*/ true, - { noOutFile: false, - generateDeclarations: true, - preserveConstEnums: true, - keepComments: true, - noResolve: false, - stripInternal: true, - noMapRoot: true, - inlineSourceMap: true - }); + { + noOutFile: false, + generateDeclarations: true, + preserveConstEnums: true, + keepComments: true, + noResolve: false, + stripInternal: true, + noMapRoot: true, + inlineSourceMap: true + }); var cancellationTokenFile = path.join(builtLocalDirectory, "cancellationToken.js"); compileFile(cancellationTokenFile, cancellationTokenSources, [builtLocalDirectory].concat(cancellationTokenSources), /*prefixes*/ [copyright], /*useBuiltCompiler*/ true, { outDir: builtLocalDirectory, noOutFile: true }); +var typingsInstallerFile = path.join(builtLocalDirectory, "typingsInstaller.js"); +compileFile(typingsInstallerFile, typingsInstallerSources, [builtLocalDirectory].concat(typingsInstallerSources), /*prefixes*/ [copyright], /*useBuiltCompiler*/ true, { outDir: builtLocalDirectory, noOutFile: false }); + var serverFile = path.join(builtLocalDirectory, "tsserver.js"); -compileFile(serverFile, serverSources,[builtLocalDirectory, copyright, cancellationTokenFile].concat(serverSources), /*prefixes*/ [copyright], /*useBuiltCompiler*/ true, { types: ["node"] }); +compileFile(serverFile, serverSources, [builtLocalDirectory, copyright, cancellationTokenFile, typingsInstallerFile].concat(serverSources), /*prefixes*/ [copyright], /*useBuiltCompiler*/ true, { types: ["node"] }); var tsserverLibraryFile = path.join(builtLocalDirectory, "tsserverlibrary.js"); var tsserverLibraryDefinitionFile = path.join(builtLocalDirectory, "tsserverlibrary.d.ts"); compileFile( tsserverLibraryFile, languageServiceLibrarySources, - [builtLocalDirectory, copyright].concat(languageServiceLibrarySources), - /*prefixes*/ [copyright], + [builtLocalDirectory, copyright, builtLocalCompiler].concat(languageServiceLibrarySources).concat(libraryTargets), + /*prefixes*/[copyright], /*useBuiltCompiler*/ true, { noOutFile: false, generateDeclarations: true }); @@ -586,9 +629,19 @@ compileFile( desc("Builds language service server library"); task("lssl", [tsserverLibraryFile, tsserverLibraryDefinitionFile]); +desc("Emit the start of the build fold"); +task("build-fold-start", [], function () { + if (fold.isTravis()) console.log(fold.start("build")); +}); + +desc("Emit the end of the build fold"); +task("build-fold-end", [], function () { + if (fold.isTravis()) console.log(fold.end("build")); +}); + // Local target to build the compiler and services desc("Builds the full compiler and services"); -task("local", ["generate-diagnostics", "lib", tscFile, servicesFile, nodeDefinitionsFile, serverFile, builtGeneratedDiagnosticMessagesJSON]); +task("local", ["build-fold-start", "generate-diagnostics", "lib", tscFile, servicesFile, nodeDefinitionsFile, serverFile, builtGeneratedDiagnosticMessagesJSON, "lssl", "build-fold-end"]); // Local target to build only tsc.js desc("Builds only the compiler"); @@ -596,7 +649,7 @@ task("tsc", ["generate-diagnostics", "lib", tscFile]); // Local target to build the compiler and services desc("Sets release mode flag"); -task("release", function() { +task("release", function () { useDebugMode = false; }); @@ -606,7 +659,7 @@ task("default", ["local"]); // Cleans the built directory desc("Cleans the compiler output, declare files, and tests"); -task("clean", function() { +task("clean", function () { jake.rmRf(builtDirectory); }); @@ -620,9 +673,9 @@ file(word2mdTs); // word2md script compileFile(word2mdJs, - [word2mdTs], - [word2mdTs], - [], + [word2mdTs], + [word2mdTs], + [], /*useBuiltCompiler*/ false); // The generated spec.md; built for the 'generate-spec' task @@ -634,7 +687,7 @@ file(specMd, [word2mdJs, specWord], function () { child_process.exec(cmd, function () { complete(); }); -}, {async: true}); +}, { async: true }); desc("Generates a Markdown version of the Language Specification"); @@ -643,14 +696,14 @@ task("generate-spec", [specMd]); // Makes a new LKG. This target does not build anything, but errors if not all the outputs are present in the built/local directory desc("Makes a new LKG out of the built js files"); -task("LKG", ["clean", "release", "local", "lssl"].concat(libraryTargets), function() { +task("LKG", ["clean", "release", "local"].concat(libraryTargets), function () { var expectedFiles = [tscFile, servicesFile, serverFile, nodePackageFile, nodeDefinitionsFile, standaloneDefinitionsFile, tsserverLibraryFile, tsserverLibraryDefinitionFile].concat(libraryTargets); var missingFiles = expectedFiles.filter(function (f) { return !fs.existsSync(f); }); if (missingFiles.length > 0) { fail("Cannot replace the LKG unless all built targets are present in directory " + builtLocalDirectory + - ". The following files are missing:\n" + missingFiles.join("\n")); + ". The following files are missing:\n" + missingFiles.join("\n")); } // Copy all the targets into the LKG directory jake.mkdirP(LKGDirectory); @@ -671,8 +724,8 @@ var run = path.join(builtLocalDirectory, "run.js"); compileFile( /*outFile*/ run, /*source*/ harnessSources, - /*prereqs*/ [builtLocalDirectory, tscFile].concat(libraryTargets).concat(servicesSources).concat(harnessSources), - /*prefixes*/ [], + /*prereqs*/[builtLocalDirectory, tscFile].concat(libraryTargets).concat(servicesSources).concat(harnessSources), + /*prefixes*/[], /*useBuiltCompiler:*/ true, /*opts*/ { inlineSourceMap: true, types: ["node", "mocha", "chai"] }); @@ -691,22 +744,22 @@ desc("Builds the test infrastructure using the built compiler"); task("tests", ["local", run].concat(libraryTargets)); function exec(cmd, completeHandler, errorHandler) { - var ex = jake.createExec([cmd], {windowsVerbatimArguments: true}); + var ex = jake.createExec([cmd], { windowsVerbatimArguments: true }); // Add listeners for output and error - ex.addListener("stdout", function(output) { + ex.addListener("stdout", function (output) { process.stdout.write(output); }); - ex.addListener("stderr", function(error) { + ex.addListener("stderr", function (error) { process.stderr.write(error); }); - ex.addListener("cmdEnd", function() { + ex.addListener("cmdEnd", function () { if (completeHandler) { completeHandler(); } complete(); }); - ex.addListener("error", function(e, status) { - if(errorHandler) { + ex.addListener("error", function (e, status) { + if (errorHandler) { errorHandler(e, status); } else { fail("Process exited with code " + status); @@ -750,7 +803,7 @@ function runConsoleTests(defaultReporter, runInParallel) { tests = process.env.test || process.env.tests || process.env.t; var light = process.env.light || false; var testConfigFile = 'test.config'; - if(fs.existsSync(testConfigFile)) { + if (fs.existsSync(testConfigFile)) { fs.unlinkSync(testConfigFile); } var workerCount, taskConfigsFolder; @@ -783,7 +836,8 @@ function runConsoleTests(defaultReporter, runInParallel) { // timeout normally isn't necessary but Travis-CI has been timing out on compiler baselines occasionally // default timeout is 2sec which really should be enough, but maybe we just need a small amount longer - if(!runInParallel) { + if (!runInParallel) { + var startTime = mark(); tests = tests ? ' -g "' + tests + '"' : ''; var cmd = "mocha" + (debug ? " --debug-brk" : "") + " -R " + reporter + tests + colors + bail + ' -t ' + testTimeout + ' ' + run; console.log(cmd); @@ -792,10 +846,12 @@ function runConsoleTests(defaultReporter, runInParallel) { process.env.NODE_ENV = "development"; exec(cmd, function () { process.env.NODE_ENV = savedNodeEnv; + measure(startTime); runLinter(); finish(); - }, function(e, status) { + }, function (e, status) { process.env.NODE_ENV = savedNodeEnv; + measure(startTime); finish(status); }); @@ -803,9 +859,10 @@ function runConsoleTests(defaultReporter, runInParallel) { else { var savedNodeEnv = process.env.NODE_ENV; process.env.NODE_ENV = "development"; + var startTime = mark(); runTestsInParallel(taskConfigsFolder, run, { testTimeout: testTimeout, noColors: colors === " --no-colors " }, function (err) { process.env.NODE_ENV = savedNodeEnv; - + measure(startTime); // last worker clean everything and runs linter in case if there were no errors deleteTemporaryProjectOutput(); jake.rmRf(taskConfigsFolder); @@ -846,14 +903,14 @@ function runConsoleTests(defaultReporter, runInParallel) { var testTimeout = 20000; desc("Runs all the tests in parallel using the built run.js file. Optional arguments are: t[ests]=category1|category2|... d[ebug]=true."); -task("runtests-parallel", ["build-rules", "tests", builtLocalDirectory], function() { +task("runtests-parallel", ["build-rules", "tests", builtLocalDirectory], function () { runConsoleTests('min', /*runInParallel*/ true); -}, {async: true}); +}, { async: true }); desc("Runs the tests using the built run.js file. Optional arguments are: t[ests]=regex r[eporter]=[list|spec|json|] d[ebug]=true color[s]=false lint=true bail=false."); -task("runtests", ["build-rules", "tests", builtLocalDirectory], function() { +task("runtests", ["build-rules", "tests", builtLocalDirectory], function () { runConsoleTests('mocha-fivemat-progress-reporter', /*runInParallel*/ false); -}, {async: true}); +}, { async: true }); desc("Generates code coverage data via instanbul"); task("generate-code-coverage", ["tests", builtLocalDirectory], function () { @@ -868,32 +925,31 @@ var nodeServerInFile = "tests/webTestServer.ts"; compileFile(nodeServerOutFile, [nodeServerInFile], [builtLocalDirectory, tscFile], [], /*useBuiltCompiler:*/ true, { noOutFile: true }); desc("Runs browserify on run.js to produce a file suitable for running tests in the browser"); -task("browserify", ["tests", builtLocalDirectory, nodeServerOutFile], function() { +task("browserify", ["tests", builtLocalDirectory, nodeServerOutFile], function () { var cmd = 'browserify built/local/run.js -d -o built/local/bundle.js'; exec(cmd); -}, {async: true}); +}, { async: true }); -desc("Runs the tests using the built run.js file like 'jake runtests'. Syntax is jake runtests-browser. Additional optional parameters tests=[regex], port=, browser=[chrome|IE]"); -task("runtests-browser", ["tests", "browserify", builtLocalDirectory, servicesFileInBrowserTest], function() { +desc("Runs the tests using the built run.js file like 'jake runtests'. Syntax is jake runtests-browser. Additional optional parameters tests=[regex], browser=[chrome|IE]"); +task("runtests-browser", ["tests", "browserify", builtLocalDirectory, servicesFileInBrowserTest], function () { cleanTestDirs(); host = "node"; - port = process.env.port || process.env.p || '8888'; browser = process.env.browser || process.env.b || "IE"; tests = process.env.test || process.env.tests || process.env.t; var light = process.env.light || false; var testConfigFile = 'test.config'; - if(fs.existsSync(testConfigFile)) { + if (fs.existsSync(testConfigFile)) { fs.unlinkSync(testConfigFile); } - if(tests || light) { + if (tests || light) { writeTestConfigFile(tests, light); } tests = tests ? tests : ''; - var cmd = host + " tests/webTestServer.js " + port + " " + browser + " " + JSON.stringify(tests); + var cmd = host + " tests/webTestServer.js " + browser + " " + JSON.stringify(tests); console.log(cmd); exec(cmd); -}, {async: true}); +}, { async: true }); function getDiffTool() { var program = process.env['DIFF']; @@ -906,17 +962,17 @@ function getDiffTool() { // Baseline Diff desc("Diffs the compiler baselines using the diff tool specified by the 'DIFF' environment variable"); task('diff', function () { - var cmd = '"' + getDiffTool() + '" ' + refBaseline + ' ' + localBaseline; + var cmd = '"' + getDiffTool() + '" ' + refBaseline + ' ' + localBaseline; console.log(cmd); exec(cmd); -}, {async: true}); +}, { async: true }); desc("Diffs the RWC baselines using the diff tool specified by the 'DIFF' environment variable"); task('diff-rwc', function () { - var cmd = '"' + getDiffTool() + '" ' + refRwcBaseline + ' ' + localRwcBaseline; + var cmd = '"' + getDiffTool() + '" ' + refRwcBaseline + ' ' + localRwcBaseline; console.log(cmd); exec(cmd); -}, {async: true}); +}, { async: true }); desc("Builds the test sources and automation in debug mode"); task("tests-debug", ["setDebugMode", "tests"]); @@ -924,30 +980,39 @@ task("tests-debug", ["setDebugMode", "tests"]); // Makes the test results the new baseline desc("Makes the most recent test results the new baseline, overwriting the old baseline"); -task("baseline-accept", function(hardOrSoft) { - if (!hardOrSoft || hardOrSoft === "hard") { - jake.rmRf(refBaseline); - fs.renameSync(localBaseline, refBaseline); - } - else if (hardOrSoft === "soft") { - var files = jake.readdirR(localBaseline); - for (var i in files) { - jake.cpR(files[i], refBaseline); +task("baseline-accept", function () { + acceptBaseline(""); +}); + +function acceptBaseline(containerFolder) { + var sourceFolder = path.join(localBaseline, containerFolder); + var targetFolder = path.join(refBaseline, containerFolder); + console.log('Accept baselines from ' + sourceFolder + ' to ' + targetFolder); + var files = fs.readdirSync(sourceFolder); + var deleteEnding = '.delete'; + for (var i in files) { + var filename = files[i]; + if (filename.substr(filename.length - deleteEnding.length) === deleteEnding) { + filename = filename.substr(0, filename.length - deleteEnding.length); + fs.unlinkSync(path.join(targetFolder, filename)); + } else { + var target = path.join(targetFolder, filename); + if (fs.existsSync(target)) { + fs.unlinkSync(target); + } + fs.renameSync(path.join(sourceFolder, filename), target); } - jake.rmRf(path.join(refBaseline, "local")); } -}); +} desc("Makes the most recent rwc test results the new baseline, overwriting the old baseline"); -task("baseline-accept-rwc", function() { - jake.rmRf(refRwcBaseline); - fs.renameSync(localRwcBaseline, refRwcBaseline); +task("baseline-accept-rwc", function () { + acceptBaseline("rwc"); }); desc("Makes the most recent test262 test results the new baseline, overwriting the old baseline"); -task("baseline-accept-test262", function() { - jake.rmRf(refTest262Baseline); - fs.renameSync(localTest262Baseline, refTest262Baseline); +task("baseline-accept-test262", function () { + acceptBaseline("test262"); }); @@ -957,8 +1022,8 @@ var webhostJsPath = "tests/webhost/webtsc.js"; compileFile(webhostJsPath, [webhostPath], [tscFile, webhostPath].concat(libraryTargets), [], /*useBuiltCompiler*/true); desc("Builds the tsc web host"); -task("webhost", [webhostJsPath], function() { - jake.cpR(path.join(builtLocalDirectory, "lib.d.ts"), "tests/webhost/", {silent: true}); +task("webhost", [webhostJsPath], function () { + jake.cpR(path.join(builtLocalDirectory, "lib.d.ts"), "tests/webhost/", { silent: true }); }); // Perf compiler @@ -971,38 +1036,38 @@ task("perftsc", [perftscJsPath]); // Instrumented compiler var loggedIOpath = harnessDirectory + 'loggedIO.ts'; var loggedIOJsPath = builtLocalDirectory + 'loggedIO.js'; -file(loggedIOJsPath, [builtLocalDirectory, loggedIOpath], function() { +file(loggedIOJsPath, [builtLocalDirectory, loggedIOpath], function () { var temp = builtLocalDirectory + 'temp'; jake.mkdirP(temp); var options = "--outdir " + temp + ' ' + loggedIOpath; var cmd = host + " " + LKGDirectory + compilerFilename + " " + options + " "; console.log(cmd + "\n"); var ex = jake.createExec([cmd]); - ex.addListener("cmdEnd", function() { + ex.addListener("cmdEnd", function () { fs.renameSync(temp + '/harness/loggedIO.js', loggedIOJsPath); jake.rmRf(temp); complete(); }); ex.run(); -}, {async: true}); +}, { async: true }); var instrumenterPath = harnessDirectory + 'instrumenter.ts'; var instrumenterJsPath = builtLocalDirectory + 'instrumenter.js'; compileFile(instrumenterJsPath, [instrumenterPath], [tscFile, instrumenterPath].concat(libraryTargets), [], /*useBuiltCompiler*/ true); desc("Builds an instrumented tsc.js"); -task('tsc-instrumented', [loggedIOJsPath, instrumenterJsPath, tscFile], function() { +task('tsc-instrumented', [loggedIOJsPath, instrumenterJsPath, tscFile], function () { var cmd = host + ' ' + instrumenterJsPath + ' record iocapture ' + builtLocalDirectory + compilerFilename; console.log(cmd); var ex = jake.createExec([cmd]); - ex.addListener("cmdEnd", function() { + ex.addListener("cmdEnd", function () { complete(); }); ex.run(); }, { async: true }); desc("Updates the sublime plugin's tsserver"); -task("update-sublime", ["local", serverFile], function() { +task("update-sublime", ["local", serverFile], function () { jake.cpR(serverFile, "../TypeScript-Sublime-Plugin/tsserver/"); jake.cpR(serverFile + ".map", "../TypeScript-Sublime-Plugin/tsserver/"); }); @@ -1018,123 +1083,113 @@ var tslintRules = [ "objectLiteralSurroundingSpaceRule", "noTypeAssertionWhitespaceRule" ]; -var tslintRulesFiles = tslintRules.map(function(p) { +var tslintRulesFiles = tslintRules.map(function (p) { return path.join(tslintRuleDir, p + ".ts"); }); -var tslintRulesOutFiles = tslintRules.map(function(p) { +var tslintRulesOutFiles = tslintRules.map(function (p) { return path.join(builtLocalDirectory, "tslint", p + ".js"); }); desc("Compiles tslint rules to js"); -task("build-rules", tslintRulesOutFiles); -tslintRulesFiles.forEach(function(ruleFile, i) { +task("build-rules", ["build-rules-start"].concat(tslintRulesOutFiles).concat(["build-rules-end"])); +tslintRulesFiles.forEach(function (ruleFile, i) { compileFile(tslintRulesOutFiles[i], [ruleFile], [ruleFile], [], /*useBuiltCompiler*/ false, - { noOutFile: true, generateDeclarations: false, outDir: path.join(builtLocalDirectory, "tslint")}); + { noOutFile: true, generateDeclarations: false, outDir: path.join(builtLocalDirectory, "tslint") }); }); -function getLinterOptions() { - return { - configuration: require("./tslint.json"), - formatter: "prose", - formattersDirectory: undefined, - rulesDirectory: "built/local/tslint" - }; -} - -function lintFileContents(options, path, contents) { - var ll = new Linter(path, contents, options); - console.log("Linting '" + path + "'."); - return ll.lint(); -} - -function lintFile(options, path) { - var contents = fs.readFileSync(path, "utf8"); - return lintFileContents(options, path, contents); -} +desc("Emit the start of the build-rules fold"); +task("build-rules-start", [], function () { + if (fold.isTravis()) console.log(fold.start("build-rules")); +}); -function lintFileAsync(options, path, cb) { - fs.readFile(path, "utf8", function(err, contents) { - if (err) { - return cb(err); - } - var result = lintFileContents(options, path, contents); - cb(undefined, result); - }); -} +desc("Emit the end of the build-rules fold"); +task("build-rules-end", [], function () { + if (fold.isTravis()) console.log(fold.end("build-rules")); +}); var lintTargets = compilerSources .concat(harnessSources) // Other harness sources - .concat(["instrumenter.ts"].map(function(f) { return path.join(harnessDirectory, f) })) + .concat(["instrumenter.ts"].map(function (f) { return path.join(harnessDirectory, f) })) .concat(serverCoreSources) .concat(tslintRulesFiles) .concat(servicesSources) - .concat(["Gulpfile.ts"]); + .concat(typingsInstallerSources) + .concat(cancellationTokenSources) + .concat(["Gulpfile.ts"]) + .concat([nodeServerInFile, perftscPath, "tests/perfsys.ts", webhostPath]); + +function sendNextFile(files, child, callback, failures) { + var file = files.pop(); + if (file) { + console.log("Linting '" + file + "'."); + child.send({ kind: "file", name: file }); + } + else { + child.send({ kind: "close" }); + callback(failures); + } +} +function spawnLintWorker(files, callback) { + var child = child_process.fork("./scripts/parallel-lint"); + var failures = 0; + child.on("message", function (data) { + switch (data.kind) { + case "result": + if (data.failures > 0) { + failures += data.failures; + console.log(data.output); + } + sendNextFile(files, child, callback, failures); + break; + case "error": + console.error(data.error); + failures++; + sendNextFile(files, child, callback, failures); + break; + } + }); + sendNextFile(files, child, callback, failures); +} desc("Runs tslint on the compiler sources. Optional arguments are: f[iles]=regex"); -task("lint", ["build-rules"], function() { - var lintOptions = getLinterOptions(); +task("lint", ["build-rules"], function () { + if (fold.isTravis()) console.log(fold.start("lint")); + var startTime = mark(); var failed = 0; var fileMatcher = RegExp(process.env.f || process.env.file || process.env.files || ""); var done = {}; for (var i in lintTargets) { var target = lintTargets[i]; if (!done[target] && fileMatcher.test(target)) { - var result = lintFile(lintOptions, target); - if (result.failureCount > 0) { - console.log(result.output); - failed += result.failureCount; - } - done[target] = true; + done[target] = fs.statSync(target).size; } } - if (failed > 0) { - fail('Linter errors.', failed); - } -}); -/** - * This is required because file watches on Windows get fires _twice_ - * when a file changes on some node/windows version configuations - * (node v4 and win 10, for example). By not running a lint for a file - * which already has a pending lint, we avoid duplicating our work. - * (And avoid printing duplicate results!) - */ -var lintSemaphores = {}; - -function lintWatchFile(filename) { - fs.watch(filename, {persistent: true}, function(event) { - if (event !== "change") { - return; - } + var workerCount = (process.env.workerCount && +process.env.workerCount) || os.cpus().length; - if (!lintSemaphores[filename]) { - lintSemaphores[filename] = true; - lintFileAsync(getLinterOptions(), filename, function(err, result) { - delete lintSemaphores[filename]; - if (err) { - console.log(err); - return; - } - if (result.failureCount > 0) { - console.log("***Lint failure***"); - for (var i = 0; i < result.failures.length; i++) { - var failure = result.failures[i]; - var start = failure.startPosition.lineAndCharacter; - var end = failure.endPosition.lineAndCharacter; - console.log("warning " + filename + " (" + (start.line + 1) + "," + (start.character + 1) + "," + (end.line + 1) + "," + (end.character + 1) + "): " + failure.failure); - } - console.log("*** Total " + result.failureCount + " failures."); - } - }); - } + var names = Object.keys(done).sort(function (namea, nameb) { + return done[namea] - done[nameb]; }); -} -desc("Watches files for changes to rerun a lint pass"); -task("lint-server", ["build-rules"], function() { - console.log("Watching ./src for changes to linted files"); - for (var i = 0; i < lintTargets.length; i++) { - lintWatchFile(lintTargets[i]); + for (var i = 0; i < workerCount; i++) { + spawnLintWorker(names, finished); } -}); + + var completed = 0; + var failures = 0; + function finished(fails) { + completed++; + failures += fails; + if (completed === workerCount) { + measure(startTime); + if (fold.isTravis()) console.log(fold.end("lint")); + if (failures > 0) { + fail('Linter errors.', failed); + } + else { + complete(); + } + } + } +}, { async: true }); diff --git a/README.md b/README.md index fca2890bc7783..d16bc363b269e 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![Join the chat at https://gitter.im/Microsoft/TypeScript](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/Microsoft/TypeScript?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -[TypeScript](http://www.typescriptlang.org/) is a language for application-scale JavaScript. TypeScript adds optional types, classes, and modules to JavaScript. TypeScript supports tools for large-scale JavaScript applications for any browser, for any host, on any OS. TypeScript compiles to readable, standards-based JavaScript. Try it out at the [playground](http://www.typescriptlang.org/Playground), and stay up to date via [our blog](http://blogs.msdn.com/typescript) and [Twitter account](https://twitter.com/typescriptlang). +[TypeScript](http://www.typescriptlang.org/) is a language for application-scale JavaScript. TypeScript adds optional types, classes, and modules to JavaScript. TypeScript supports tools for large-scale JavaScript applications for any browser, for any host, on any OS. TypeScript compiles to readable, standards-based JavaScript. Try it out at the [playground](http://www.typescriptlang.org/Playground), and stay up to date via [our blog](https://blogs.msdn.microsoft.com/typescript) and [Twitter account](https://twitter.com/typescriptlang). ## Installing diff --git a/package.json b/package.json index 5606a423a0e10..eb34b6bffbb57 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ }, "devDependencies": { "@types/browserify": "latest", - "@types/convert-source-map": "latest", "@types/chai": "latest", + "@types/convert-source-map": "latest", "@types/del": "latest", "@types/glob": "latest", "@types/gulp": "latest", @@ -69,16 +69,18 @@ "mkdirp": "latest", "mocha": "latest", "mocha-fivemat-progress-reporter": "latest", + "q": "latest", "run-sequence": "latest", "sorcery": "latest", "through2": "latest", + "travis-fold": "latest", "ts-node": "latest", "tslint": "next", "typescript": "next" }, "scripts": { "pretest": "jake tests", - "test": "jake runtests", + "test": "jake runtests-parallel", "build": "npm run build:compiler && npm run build:tests", "build:compiler": "jake local", "build:tests": "jake tests", diff --git a/scripts/ior.ts b/scripts/ior.ts index eb67e62a275cf..91580203350f5 100644 --- a/scripts/ior.ts +++ b/scripts/ior.ts @@ -1,4 +1,4 @@ -/// +/// import fs = require('fs'); import path = require('path'); diff --git a/scripts/parallel-lint.js b/scripts/parallel-lint.js new file mode 100644 index 0000000000000..a9aec06c2dfa0 --- /dev/null +++ b/scripts/parallel-lint.js @@ -0,0 +1,45 @@ +var Linter = require("tslint"); +var fs = require("fs"); + +function getLinterOptions() { + return { + configuration: require("../tslint.json"), + formatter: "prose", + formattersDirectory: undefined, + rulesDirectory: "built/local/tslint" + }; +} + +function lintFileContents(options, path, contents) { + var ll = new Linter(path, contents, options); + return ll.lint(); +} + +function lintFileAsync(options, path, cb) { + fs.readFile(path, "utf8", function (err, contents) { + if (err) { + return cb(err); + } + var result = lintFileContents(options, path, contents); + cb(undefined, result); + }); +} + +process.on("message", function (data) { + switch (data.kind) { + case "file": + var target = data.name; + var lintOptions = getLinterOptions(); + lintFileAsync(lintOptions, target, function (err, result) { + if (err) { + process.send({ kind: "error", error: err.toString() }); + return; + } + process.send({ kind: "result", failures: result.failureCount, output: result.output }); + }); + break; + case "close": + process.exit(0); + break; + } +}); \ No newline at end of file diff --git a/scripts/processDiagnosticMessages.ts b/scripts/processDiagnosticMessages.ts index 26632ba6bab3c..431cf46018050 100644 --- a/scripts/processDiagnosticMessages.ts +++ b/scripts/processDiagnosticMessages.ts @@ -69,7 +69,7 @@ function checkForUniqueCodes(messages: string[], diagnosticTable: InputDiagnosti } function buildUniqueNameMap(names: string[]): ts.Map { - var nameMap: ts.Map = {}; + var nameMap = ts.createMap(); var uniqueNames = NameGenerator.ensureUniqueness(names, /* isCaseSensitive */ false, /* isFixed */ undefined); diff --git a/scripts/tslint/preferConstRule.ts b/scripts/tslint/preferConstRule.ts index aaa1b0e53d5b8..1d3166924686a 100644 --- a/scripts/tslint/preferConstRule.ts +++ b/scripts/tslint/preferConstRule.ts @@ -1,7 +1,6 @@ import * as Lint from "tslint/lib/lint"; import * as ts from "typescript"; - export class Rule extends Lint.Rules.AbstractRule { public static FAILURE_STRING_FACTORY = (identifier: string) => `Identifier '${identifier}' never appears on the LHS of an assignment - use const instead of let for its declaration.`; @@ -64,7 +63,7 @@ interface DeclarationUsages { } class PreferConstWalker extends Lint.RuleWalker { - private inScopeLetDeclarations: ts.Map[] = []; + private inScopeLetDeclarations: ts.MapLike[] = []; private errors: Lint.RuleFailure[] = []; private markAssignment(identifier: ts.Identifier) { const name = identifier.text; @@ -172,7 +171,7 @@ class PreferConstWalker extends Lint.RuleWalker { } private visitAnyForStatement(node: ts.ForOfStatement | ts.ForInStatement) { - const names: ts.Map = {}; + const names: ts.MapLike = {}; if (isLet(node.initializer)) { if (node.initializer.kind === ts.SyntaxKind.VariableDeclarationList) { this.collectLetIdentifiers(node.initializer as ts.VariableDeclarationList, names); @@ -194,7 +193,7 @@ class PreferConstWalker extends Lint.RuleWalker { } visitBlock(node: ts.Block) { - const names: ts.Map = {}; + const names: ts.MapLike = {}; for (const statement of node.statements) { if (statement.kind === ts.SyntaxKind.VariableStatement) { this.collectLetIdentifiers((statement as ts.VariableStatement).declarationList, names); @@ -205,7 +204,7 @@ class PreferConstWalker extends Lint.RuleWalker { this.popDeclarations(); } - private collectLetIdentifiers(list: ts.VariableDeclarationList, ret: ts.Map) { + private collectLetIdentifiers(list: ts.VariableDeclarationList, ret: ts.MapLike) { for (const node of list.declarations) { if (isLet(node) && !isExported(node)) { this.collectNameIdentifiers(node, node.name, ret); @@ -213,7 +212,7 @@ class PreferConstWalker extends Lint.RuleWalker { } } - private collectNameIdentifiers(declaration: ts.VariableDeclaration, node: ts.Identifier | ts.BindingPattern, table: ts.Map) { + private collectNameIdentifiers(declaration: ts.VariableDeclaration, node: ts.Identifier | ts.BindingPattern, table: ts.MapLike) { if (node.kind === ts.SyntaxKind.Identifier) { table[(node as ts.Identifier).text] = { declaration, usages: 0 }; } @@ -222,7 +221,7 @@ class PreferConstWalker extends Lint.RuleWalker { } } - private collectBindingPatternIdentifiers(value: ts.VariableDeclaration, pattern: ts.BindingPattern, table: ts.Map) { + private collectBindingPatternIdentifiers(value: ts.VariableDeclaration, pattern: ts.BindingPattern, table: ts.MapLike) { for (const element of pattern.elements) { this.collectNameIdentifiers(value, element.name, table); } diff --git a/scripts/types/ambient.d.ts b/scripts/types/ambient.d.ts index e77e3fe8c5a6d..4f4b118c432db 100644 --- a/scripts/types/ambient.d.ts +++ b/scripts/types/ambient.d.ts @@ -10,7 +10,7 @@ declare module "gulp-insert" { export function append(text: string | Buffer): NodeJS.ReadWriteStream; export function prepend(text: string | Buffer): NodeJS.ReadWriteStream; export function wrap(text: string | Buffer, tail: string | Buffer): NodeJS.ReadWriteStream; - export function transform(cb: (contents: string, file: {path: string}) => string): NodeJS.ReadWriteStream; // file is a vinyl file + export function transform(cb: (contents: string, file: {path: string, relative: string}) => string): NodeJS.ReadWriteStream; // file is a vinyl file } declare module "into-stream" { @@ -22,3 +22,4 @@ declare module "into-stream" { } declare module "sorcery"; +declare module "travis-fold"; diff --git a/src/compiler/binder.ts b/src/compiler/binder.ts index 8d8666a67abac..d8017d601adb0 100644 --- a/src/compiler/binder.ts +++ b/src/compiler/binder.ts @@ -89,9 +89,10 @@ namespace ts { const binder = createBinder(); export function bindSourceFile(file: SourceFile, options: CompilerOptions) { - const start = performance.mark(); + performance.mark("beforeBind"); binder(file, options); - performance.measure("Bind", start); + performance.mark("afterBind"); + performance.measure("Bind", "beforeBind", "afterBind"); } function createBinder(): (file: SourceFile, options: CompilerOptions) => void { @@ -135,7 +136,7 @@ namespace ts { options = opts; languageVersion = getEmitScriptTarget(options); inStrictMode = !!file.externalModuleIndicator; - classifiableNames = {}; + classifiableNames = createMap(); symbolCount = 0; Symbol = objectAllocator.getSymbolConstructor(); @@ -183,11 +184,11 @@ namespace ts { symbol.declarations.push(node); if (symbolFlags & SymbolFlags.HasExports && !symbol.exports) { - symbol.exports = {}; + symbol.exports = createMap(); } if (symbolFlags & SymbolFlags.HasMembers && !symbol.members) { - symbol.members = {}; + symbol.members = createMap(); } if (symbolFlags & SymbolFlags.Value) { @@ -298,8 +299,10 @@ namespace ts { const name = isDefaultExport && parent ? "default" : getDeclarationName(node); let symbol: Symbol; - if (name !== undefined) { - + if (name === undefined) { + symbol = createSymbol(SymbolFlags.None, "__missing"); + } + else { // Check and see if the symbol table already has a symbol with this name. If not, // create a new symbol with this name and add it to the table. Note that we don't // give the new symbol any flags *yet*. This ensures that it will not conflict @@ -311,6 +314,11 @@ namespace ts { // declaration we have for this symbol, and then create a new symbol for this // declaration. // + // Note that when properties declared in Javascript constructors + // (marked by isReplaceableByMethod) conflict with another symbol, the property loses. + // Always. This allows the common Javascript pattern of overwriting a prototype method + // with an bound instance method of the same type: `this.method = this.method.bind(this)` + // // If we created a new symbol, either because we didn't have a symbol with this name // in the symbol table, or we conflicted with an existing symbol, then just add this // node as the sole declaration of the new symbol. @@ -318,42 +326,44 @@ namespace ts { // Otherwise, we'll be merging into a compatible existing symbol (for example when // you have multiple 'vars' with the same name in the same container). In this case // just add this node into the declarations list of the symbol. - symbol = hasProperty(symbolTable, name) - ? symbolTable[name] - : (symbolTable[name] = createSymbol(SymbolFlags.None, name)); + symbol = symbolTable[name] || (symbolTable[name] = createSymbol(SymbolFlags.None, name)); if (name && (includes & SymbolFlags.Classifiable)) { classifiableNames[name] = name; } if (symbol.flags & excludes) { - if (node.name) { - node.name.parent = node; + if (symbol.isReplaceableByMethod) { + // Javascript constructor-declared symbols can be discarded in favor of + // prototype symbols like methods. + symbol = symbolTable[name] = createSymbol(SymbolFlags.None, name); } + else { + if (node.name) { + node.name.parent = node; + } - // Report errors every position with duplicate declaration - // Report errors on previous encountered declarations - let message = symbol.flags & SymbolFlags.BlockScopedVariable - ? Diagnostics.Cannot_redeclare_block_scoped_variable_0 - : Diagnostics.Duplicate_identifier_0; + // Report errors every position with duplicate declaration + // Report errors on previous encountered declarations + let message = symbol.flags & SymbolFlags.BlockScopedVariable + ? Diagnostics.Cannot_redeclare_block_scoped_variable_0 + : Diagnostics.Duplicate_identifier_0; - forEach(symbol.declarations, declaration => { - if (declaration.flags & NodeFlags.Default) { - message = Diagnostics.A_module_cannot_have_multiple_default_exports; - } - }); + forEach(symbol.declarations, declaration => { + if (declaration.flags & NodeFlags.Default) { + message = Diagnostics.A_module_cannot_have_multiple_default_exports; + } + }); - forEach(symbol.declarations, declaration => { - file.bindDiagnostics.push(createDiagnosticForNode(declaration.name || declaration, message, getDisplayName(declaration))); - }); - file.bindDiagnostics.push(createDiagnosticForNode(node.name || node, message, getDisplayName(node))); + forEach(symbol.declarations, declaration => { + file.bindDiagnostics.push(createDiagnosticForNode(declaration.name || declaration, message, getDisplayName(declaration))); + }); + file.bindDiagnostics.push(createDiagnosticForNode(node.name || node, message, getDisplayName(node))); - symbol = createSymbol(SymbolFlags.None, name); + symbol = createSymbol(SymbolFlags.None, name); + } } } - else { - symbol = createSymbol(SymbolFlags.None, "__missing"); - } addDeclarationToSymbol(symbol, node, includes); symbol.parent = parent; @@ -434,7 +444,7 @@ namespace ts { if (containerFlags & ContainerFlags.IsContainer) { container = blockScopeContainer = node; if (containerFlags & ContainerFlags.HasLocals) { - container.locals = {}; + container.locals = createMap(); } addToContainerChain(container); } @@ -1399,7 +1409,8 @@ namespace ts { const typeLiteralSymbol = createSymbol(SymbolFlags.TypeLiteral, "__type"); addDeclarationToSymbol(typeLiteralSymbol, node, SymbolFlags.TypeLiteral); - typeLiteralSymbol.members = { [symbol.name]: symbol }; + typeLiteralSymbol.members = createMap(); + typeLiteralSymbol.members[symbol.name] = symbol; } function bindObjectLiteralExpression(node: ObjectLiteralExpression) { @@ -1409,7 +1420,7 @@ namespace ts { } if (inStrictMode) { - const seen: Map = {}; + const seen = createMap(); for (const prop of node.properties) { if (prop.name.kind !== SyntaxKind.Identifier) { @@ -1465,7 +1476,7 @@ namespace ts { // fall through. default: if (!blockScopeContainer.locals) { - blockScopeContainer.locals = {}; + blockScopeContainer.locals = createMap(); addToContainerChain(blockScopeContainer); } declareSymbol(blockScopeContainer.locals, undefined, node, symbolFlags, symbolExcludes); @@ -1887,18 +1898,17 @@ namespace ts { } function bindExportAssignment(node: ExportAssignment | BinaryExpression) { - const boundExpression = node.kind === SyntaxKind.ExportAssignment ? (node).expression : (node).right; if (!container.symbol || !container.symbol.exports) { // Export assignment in some sort of block construct bindAnonymousDeclaration(node, SymbolFlags.Alias, getDeclarationName(node)); } - else if (boundExpression.kind === SyntaxKind.Identifier && node.kind === SyntaxKind.ExportAssignment) { - // An export default clause with an identifier exports all meanings of that identifier - declareSymbol(container.symbol.exports, container.symbol, node, SymbolFlags.Alias, SymbolFlags.PropertyExcludes | SymbolFlags.AliasExcludes); - } else { - // An export default clause with an expression exports a value - declareSymbol(container.symbol.exports, container.symbol, node, SymbolFlags.Property, SymbolFlags.PropertyExcludes | SymbolFlags.AliasExcludes); + const flags = node.kind === SyntaxKind.ExportAssignment && exportAssignmentIsAlias(node) + // An export default clause with an EntityNameExpression exports all meanings of that identifier + ? SymbolFlags.Alias + // An export default clause with any other expression exports a value + : SymbolFlags.Property; + declareSymbol(container.symbol.exports, container.symbol, node, flags, SymbolFlags.PropertyExcludes | SymbolFlags.AliasExcludes); } } @@ -1925,7 +1935,7 @@ namespace ts { } } - file.symbol.globalExports = file.symbol.globalExports || {}; + file.symbol.globalExports = file.symbol.globalExports || createMap(); declareSymbol(file.symbol.globalExports, file.symbol, node, SymbolFlags.Alias, SymbolFlags.AliasExcludes); } @@ -1967,20 +1977,25 @@ namespace ts { } function bindThisPropertyAssignment(node: BinaryExpression) { - // Declare a 'member' in case it turns out the container was an ES5 class or ES6 constructor - let assignee: Node; + Debug.assert(isInJavaScriptFile(node)); + // Declare a 'member' if the container is an ES5 class or ES6 constructor if (container.kind === SyntaxKind.FunctionDeclaration || container.kind === SyntaxKind.FunctionExpression) { - assignee = container; + container.symbol.members = container.symbol.members || createMap(); + // It's acceptable for multiple 'this' assignments of the same identifier to occur + declareSymbol(container.symbol.members, container.symbol, node, SymbolFlags.Property, SymbolFlags.PropertyExcludes & ~SymbolFlags.Property); } else if (container.kind === SyntaxKind.Constructor) { - assignee = container.parent; - } - else { - return; + // this.foo assignment in a JavaScript class + // Bind this property to the containing class + const saveContainer = container; + container = container.parent; + const symbol = bindPropertyOrMethodOrAccessor(node, SymbolFlags.Property, SymbolFlags.None); + if (symbol) { + // constructor-declared symbols can be overwritten by subsequent method declarations + (symbol as Symbol).isReplaceableByMethod = true; + } + container = saveContainer; } - assignee.symbol.members = assignee.symbol.members || {}; - // It's acceptable for multiple 'this' assignments of the same identifier to occur - declareSymbol(assignee.symbol.members, assignee.symbol, node, SymbolFlags.Property, SymbolFlags.PropertyExcludes & ~SymbolFlags.Property); } function bindPrototypePropertyAssignment(node: BinaryExpression) { @@ -2004,7 +2019,7 @@ namespace ts { // Set up the members collection if it doesn't exist already if (!funcSymbol.members) { - funcSymbol.members = {}; + funcSymbol.members = createMap(); } // Declare the method/property @@ -2053,7 +2068,7 @@ namespace ts { // module might have an exported variable called 'prototype'. We can't allow that as // that would clash with the built-in 'prototype' for the class. const prototypeSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Prototype, "prototype"); - if (hasProperty(symbol.exports, prototypeSymbol.name)) { + if (symbol.exports[prototypeSymbol.name]) { if (node.name) { node.name.parent = node; } diff --git a/src/compiler/checker.ts b/src/compiler/checker.ts index 0cdd0b21b6b85..8568cd5feed2f 100644 --- a/src/compiler/checker.ts +++ b/src/compiler/checker.ts @@ -44,7 +44,7 @@ namespace ts { let symbolCount = 0; const emptyArray: any[] = []; - const emptySymbols: SymbolTable = {}; + const emptySymbols = createMap(); const compilerOptions = host.getCompilerOptions(); const languageVersion = compilerOptions.target || ScriptTarget.ES3; @@ -106,11 +106,11 @@ namespace ts { isOptionalParameter }; - const tupleTypes: Map = {}; - const unionTypes: Map = {}; - const intersectionTypes: Map = {}; - const stringLiteralTypes: Map = {}; - const numericLiteralTypes: Map = {}; + const tupleTypes = createMap(); + const unionTypes = createMap(); + const intersectionTypes = createMap(); + const stringLiteralTypes = createMap(); + const numericLiteralTypes = createMap(); const unknownSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Transient, "unknown"); const resolvingSymbol = createSymbol(SymbolFlags.Transient, "__resolving__"); @@ -132,7 +132,7 @@ namespace ts { const emptyObjectType = createAnonymousType(undefined, emptySymbols, emptyArray, emptyArray, undefined, undefined); const emptyGenericType = createAnonymousType(undefined, emptySymbols, emptyArray, emptyArray, undefined, undefined); - emptyGenericType.instantiations = {}; + emptyGenericType.instantiations = createMap(); const anyFunctionType = createAnonymousType(undefined, emptySymbols, emptyArray, emptyArray, undefined, undefined); // The anyFunctionType contains the anyFunctionType by definition. The flag is further propagated @@ -146,7 +146,7 @@ namespace ts { const enumNumberIndexInfo = createIndexInfo(stringType, /*isReadonly*/ true); - const globals: SymbolTable = {}; + const globals = createMap(); /** * List of every ambient module with a "*" wildcard. * Unlike other ambient modules, these can't be stored in `globals` because symbol tables only deal with exact matches. @@ -215,7 +215,7 @@ namespace ts { const flowLoopKeys: string[] = []; const flowLoopTypes: Type[][] = []; const visitedFlowNodes: FlowNode[] = []; - const visitedFlowTypes: Type[] = []; + const visitedFlowTypes: FlowType[] = []; const potentialThisCollisions: Node[] = []; const awaitedTypeStack: number[] = []; @@ -245,7 +245,8 @@ namespace ts { NEUndefinedOrNull = 1 << 19, // x != undefined / x != null Truthy = 1 << 20, // x Falsy = 1 << 21, // !x - All = (1 << 22) - 1, + Discriminatable = 1 << 22, // May have discriminant property + All = (1 << 23) - 1, // The following members encode facts about particular kinds of types for use in the getTypeFacts function. // The presence of a particular fact means that the given test is true for some (and possibly all) values // of that kind of type. @@ -275,15 +276,15 @@ namespace ts { TrueFacts = BaseBooleanFacts | Truthy, SymbolStrictFacts = TypeofEQSymbol | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNEObject | TypeofNEFunction | TypeofNEHostObject | NEUndefined | NENull | NEUndefinedOrNull | Truthy, SymbolFacts = SymbolStrictFacts | EQUndefined | EQNull | EQUndefinedOrNull | Falsy, - ObjectStrictFacts = TypeofEQObject | TypeofEQHostObject | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEFunction | NEUndefined | NENull | NEUndefinedOrNull | Truthy, + ObjectStrictFacts = TypeofEQObject | TypeofEQHostObject | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEFunction | NEUndefined | NENull | NEUndefinedOrNull | Truthy | Discriminatable, ObjectFacts = ObjectStrictFacts | EQUndefined | EQNull | EQUndefinedOrNull | Falsy, - FunctionStrictFacts = TypeofEQFunction | TypeofEQHostObject | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEObject | NEUndefined | NENull | NEUndefinedOrNull | Truthy, + FunctionStrictFacts = TypeofEQFunction | TypeofEQHostObject | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEObject | NEUndefined | NENull | NEUndefinedOrNull | Truthy | Discriminatable, FunctionFacts = FunctionStrictFacts | EQUndefined | EQNull | EQUndefinedOrNull | Falsy, UndefinedFacts = TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEObject | TypeofNEFunction | TypeofNEHostObject | EQUndefined | EQUndefinedOrNull | NENull | Falsy, NullFacts = TypeofEQObject | TypeofNEString | TypeofNENumber | TypeofNEBoolean | TypeofNESymbol | TypeofNEFunction | TypeofNEHostObject | EQNull | EQUndefinedOrNull | NEUndefined | Falsy, } - const typeofEQFacts: Map = { + const typeofEQFacts = createMap({ "string": TypeFacts.TypeofEQString, "number": TypeFacts.TypeofEQNumber, "boolean": TypeFacts.TypeofEQBoolean, @@ -291,9 +292,9 @@ namespace ts { "undefined": TypeFacts.EQUndefined, "object": TypeFacts.TypeofEQObject, "function": TypeFacts.TypeofEQFunction - }; + }); - const typeofNEFacts: Map = { + const typeofNEFacts = createMap({ "string": TypeFacts.TypeofNEString, "number": TypeFacts.TypeofNENumber, "boolean": TypeFacts.TypeofNEBoolean, @@ -301,19 +302,19 @@ namespace ts { "undefined": TypeFacts.NEUndefined, "object": TypeFacts.TypeofNEObject, "function": TypeFacts.TypeofNEFunction - }; + }); - const typeofTypesByName: Map = { + const typeofTypesByName = createMap({ "string": stringType, "number": numberType, "boolean": booleanType, "symbol": esSymbolType, "undefined": undefinedType - }; + }); let jsxElementType: ObjectType; /** Things we lazy load from the JSX namespace */ - const jsxTypes: Map = {}; + const jsxTypes = createMap(); const JsxNames = { JSX: "JSX", IntrinsicElements: "IntrinsicElements", @@ -324,10 +325,10 @@ namespace ts { IntrinsicClassAttributes: "IntrinsicClassAttributes" }; - const subtypeRelation: Map = {}; - const assignableRelation: Map = {}; - const comparableRelation: Map = {}; - const identityRelation: Map = {}; + const subtypeRelation = createMap(); + const assignableRelation = createMap(); + const comparableRelation = createMap(); + const identityRelation = createMap(); // This is for caching the result of getSymbolDisplayBuilder. Do not access directly. let _displayBuilder: SymbolDisplayBuilder; @@ -341,9 +342,8 @@ namespace ts { ResolvedReturnType } - const builtinGlobals: SymbolTable = { - [undefinedSymbol.name]: undefinedSymbol - }; + const builtinGlobals = createMap(); + builtinGlobals[undefinedSymbol.name] = undefinedSymbol; initializeTypeChecker(); @@ -403,8 +403,8 @@ namespace ts { result.parent = symbol.parent; if (symbol.valueDeclaration) result.valueDeclaration = symbol.valueDeclaration; if (symbol.constEnumOnlyModule) result.constEnumOnlyModule = true; - if (symbol.members) result.members = cloneSymbolTable(symbol.members); - if (symbol.exports) result.exports = cloneSymbolTable(symbol.exports); + if (symbol.members) result.members = cloneMap(symbol.members); + if (symbol.exports) result.exports = cloneMap(symbol.exports); recordMergedSymbol(result, symbol); return result; } @@ -426,11 +426,11 @@ namespace ts { target.declarations.push(node); }); if (source.members) { - if (!target.members) target.members = {}; + if (!target.members) target.members = createMap(); mergeSymbolTable(target.members, source.members); } if (source.exports) { - if (!target.exports) target.exports = {}; + if (!target.exports) target.exports = createMap(); mergeSymbolTable(target.exports, source.exports); } recordMergedSymbol(target, source); @@ -447,29 +447,17 @@ namespace ts { } } - function cloneSymbolTable(symbolTable: SymbolTable): SymbolTable { - const result: SymbolTable = {}; - for (const id in symbolTable) { - if (hasProperty(symbolTable, id)) { - result[id] = symbolTable[id]; - } - } - return result; - } - function mergeSymbolTable(target: SymbolTable, source: SymbolTable) { for (const id in source) { - if (hasProperty(source, id)) { - if (!hasProperty(target, id)) { - target[id] = source[id]; - } - else { - let symbol = target[id]; - if (!(symbol.flags & SymbolFlags.Merged)) { - target[id] = symbol = cloneSymbol(symbol); - } - mergeSymbol(symbol, source[id]); + let targetSymbol = target[id]; + if (!targetSymbol) { + target[id] = source[id]; + } + else { + if (!(targetSymbol.flags & SymbolFlags.Merged)) { + target[id] = targetSymbol = cloneSymbol(targetSymbol); } + mergeSymbol(targetSymbol, source[id]); } } } @@ -513,14 +501,12 @@ namespace ts { function addToSymbolTable(target: SymbolTable, source: SymbolTable, message: DiagnosticMessage) { for (const id in source) { - if (hasProperty(source, id)) { - if (hasProperty(target, id)) { - // Error on redeclarations - forEach(target[id].declarations, addDeclarationDiagnostic(id, message)); - } - else { - target[id] = source[id]; - } + if (target[id]) { + // Error on redeclarations + forEach(target[id].declarations, addDeclarationDiagnostic(id, message)); + } + else { + target[id] = source[id]; } } @@ -537,7 +523,7 @@ namespace ts { function getNodeLinks(node: Node): NodeLinks { const nodeId = getNodeId(node); - return nodeLinks[nodeId] || (nodeLinks[nodeId] = {}); + return nodeLinks[nodeId] || (nodeLinks[nodeId] = { flags: 0 }); } function isGlobalSourceFile(node: Node) { @@ -545,18 +531,20 @@ namespace ts { } function getSymbol(symbols: SymbolTable, name: string, meaning: SymbolFlags): Symbol { - if (meaning && hasProperty(symbols, name)) { + if (meaning) { const symbol = symbols[name]; - Debug.assert((symbol.flags & SymbolFlags.Instantiated) === 0, "Should never get an instantiated symbol here."); - if (symbol.flags & meaning) { - return symbol; - } - if (symbol.flags & SymbolFlags.Alias) { - const target = resolveAlias(symbol); - // Unknown symbol means an error occurred in alias resolution, treat it as positive answer to avoid cascading errors - if (target === unknownSymbol || target.flags & meaning) { + if (symbol) { + Debug.assert((symbol.flags & SymbolFlags.Instantiated) === 0, "Should never get an instantiated symbol here."); + if (symbol.flags & meaning) { return symbol; } + if (symbol.flags & SymbolFlags.Alias) { + const target = resolveAlias(symbol); + // Unknown symbol means an error occurred in alias resolution, treat it as positive answer to avoid cascading errors + if (target === unknownSymbol || target.flags & meaning) { + return symbol; + } + } } } // return undefined if we can't find a symbol. @@ -664,7 +652,7 @@ namespace ts { // Resolve a given name for a given meaning at a given location. An error is reported if the name was not found and // the nameNotFoundMessage argument is not undefined. Returns the resolved symbol, or undefined if no symbol with // the given name can be found. - function resolveName(location: Node, name: string, meaning: SymbolFlags, nameNotFoundMessage: DiagnosticMessage, nameArg: string | Identifier): Symbol { + function resolveName(location: Node | undefined, name: string, meaning: SymbolFlags, nameNotFoundMessage: DiagnosticMessage, nameArg: string | Identifier): Symbol { let result: Symbol; let lastLocation: Node; let propertyWithInvalidInitializer: Node; @@ -744,7 +732,7 @@ namespace ts { // 2. We check === SymbolFlags.Alias in order to check that the symbol is *purely* // an alias. If we used &, we'd be throwing out symbols that have non alias aspects, // which is not the desired behavior. - if (hasProperty(moduleExports, name) && + if (moduleExports[name] && moduleExports[name].flags === SymbolFlags.Alias && getDeclarationOfKind(moduleExports[name], SyntaxKind.ExportSpecifier)) { break; @@ -881,7 +869,8 @@ namespace ts { if (!result) { if (nameNotFoundMessage) { - if (!checkAndReportErrorForMissingPrefix(errorLocation, name, nameArg) && + if (!errorLocation || + !checkAndReportErrorForMissingPrefix(errorLocation, name, nameArg) && !checkAndReportErrorForExtendingInterface(errorLocation)) { error(errorLocation, nameNotFoundMessage, typeof nameArg === "string" ? nameArg : declarationNameToString(nameArg)); } @@ -930,7 +919,7 @@ namespace ts { } function checkAndReportErrorForMissingPrefix(errorLocation: Node, name: string, nameArg: string | Identifier): boolean { - if (!errorLocation || (errorLocation.kind === SyntaxKind.Identifier && (isTypeReferenceIdentifier(errorLocation)) || isInTypeQuery(errorLocation))) { + if ((errorLocation.kind === SyntaxKind.Identifier && (isTypeReferenceIdentifier(errorLocation)) || isInTypeQuery(errorLocation))) { return false; } @@ -968,29 +957,31 @@ namespace ts { function checkAndReportErrorForExtendingInterface(errorLocation: Node): boolean { - let parentClassExpression = errorLocation; - while (parentClassExpression) { - const kind = parentClassExpression.kind; - if (kind === SyntaxKind.Identifier || kind === SyntaxKind.PropertyAccessExpression) { - parentClassExpression = parentClassExpression.parent; - continue; - } - if (kind === SyntaxKind.ExpressionWithTypeArguments) { - break; - } - return false; - } - if (!parentClassExpression) { - return false; - } - const expression = (parentClassExpression).expression; - if (resolveEntityName(expression, SymbolFlags.Interface, /*ignoreErrors*/ true)) { + const expression = getEntityNameForExtendingInterface(errorLocation); + const isError = !!(expression && resolveEntityName(expression, SymbolFlags.Interface, /*ignoreErrors*/ true)); + if (isError) { error(errorLocation, Diagnostics.Cannot_extend_an_interface_0_Did_you_mean_implements, getTextOfNode(expression)); - return true; } - return false; + return isError; + } + /** + * Climbs up parents to an ExpressionWithTypeArguments, and returns its expression, + * but returns undefined if that expression is not an EntityNameExpression. + */ + function getEntityNameForExtendingInterface(node: Node): EntityNameExpression | undefined { + switch (node.kind) { + case SyntaxKind.Identifier: + case SyntaxKind.PropertyAccessExpression: + return node.parent ? getEntityNameForExtendingInterface(node.parent) : undefined; + case SyntaxKind.ExpressionWithTypeArguments: + Debug.assert(isEntityNameExpression((node).expression)); + return (node).expression; + default: + return undefined; + } } + function checkResolvedBlockScopedVariable(result: Symbol, errorLocation: Node): void { Debug.assert((result.flags & SymbolFlags.BlockScopedVariable) !== 0); // Block-scoped variables cannot be used before their definition @@ -1033,7 +1024,7 @@ namespace ts { } function getDeclarationOfAliasSymbol(symbol: Symbol): Declaration { - return forEach(symbol.declarations, d => isAliasSymbolDeclaration(d) ? d : undefined); + return findMap(symbol.declarations, d => isAliasSymbolDeclaration(d) ? d : undefined); } function getTargetOfImportEqualsDeclaration(node: ImportEqualsDeclaration): Symbol { @@ -1101,9 +1092,9 @@ namespace ts { function getExportOfModule(symbol: Symbol, name: string): Symbol { if (symbol.flags & SymbolFlags.Module) { - const exports = getExportsOfSymbol(symbol); - if (hasProperty(exports, name)) { - return resolveSymbol(exports[name]); + const exportedSymbol = getExportsOfSymbol(symbol)[name]; + if (exportedSymbol) { + return resolveSymbol(exportedSymbol); } } } @@ -1135,6 +1126,10 @@ namespace ts { else { symbolFromVariable = getPropertyOfVariable(targetSymbol, name.text); } + // If the export member we're looking for is default, and there is no real default but allowSyntheticDefaultImports is on, return the entire module as the default + if (!symbolFromVariable && allowSyntheticDefaultImports && name.text === "default") { + symbolFromVariable = resolveExternalModuleSymbol(moduleSymbol) || resolveSymbol(moduleSymbol); + } // if symbolFromVariable is export - get its final target symbolFromVariable = resolveSymbol(symbolFromVariable); const symbolFromModule = getExportOfModule(targetSymbol, name.text); @@ -1164,7 +1159,7 @@ namespace ts { } function getTargetOfExportAssignment(node: ExportAssignment): Symbol { - return resolveEntityName(node.expression, SymbolFlags.Value | SymbolFlags.Type | SymbolFlags.Namespace); + return resolveEntityName(node.expression, SymbolFlags.Value | SymbolFlags.Type | SymbolFlags.Namespace); } function getTargetOfAliasDeclaration(node: Declaration): Symbol { @@ -1274,7 +1269,7 @@ namespace ts { } // Resolves a qualified name and any involved aliases - function resolveEntityName(name: EntityName | Expression, meaning: SymbolFlags, ignoreErrors?: boolean, dontResolveAlias?: boolean): Symbol { + function resolveEntityName(name: EntityNameOrEntityNameExpression, meaning: SymbolFlags, ignoreErrors?: boolean, dontResolveAlias?: boolean): Symbol | undefined { if (nodeIsMissing(name)) { return undefined; } @@ -1289,7 +1284,7 @@ namespace ts { } } else if (name.kind === SyntaxKind.QualifiedName || name.kind === SyntaxKind.PropertyAccessExpression) { - const left = name.kind === SyntaxKind.QualifiedName ? (name).left : (name).expression; + const left = name.kind === SyntaxKind.QualifiedName ? (name).left : (name).expression; const right = name.kind === SyntaxKind.QualifiedName ? (name).right : (name).name; const namespace = resolveEntityName(left, SymbolFlags.Namespace, ignoreErrors); @@ -1366,7 +1361,14 @@ namespace ts { if (moduleNotFoundError) { // report errors only if it was requested - error(moduleReferenceLiteral, moduleNotFoundError, moduleName); + const tsExtension = tryExtractTypeScriptExtension(moduleName); + if (tsExtension) { + const diag = Diagnostics.An_import_path_cannot_end_with_a_0_extension_Consider_importing_1_instead; + error(moduleReferenceLiteral, diag, tsExtension, removeExtension(moduleName, tsExtension)); + } + else { + error(moduleReferenceLiteral, moduleNotFoundError, moduleName); + } } return undefined; } @@ -1417,7 +1419,7 @@ namespace ts { */ function extendExportSymbols(target: SymbolTable, source: SymbolTable, lookupTable?: Map, exportNode?: ExportDeclaration) { for (const id in source) { - if (id !== "default" && !hasProperty(target, id)) { + if (id !== "default" && !target[id]) { target[id] = source[id]; if (lookupTable && exportNode) { lookupTable[id] = { @@ -1425,7 +1427,7 @@ namespace ts { } as ExportCollisionTracker; } } - else if (lookupTable && exportNode && id !== "default" && hasProperty(target, id) && resolveSymbol(target[id]) !== resolveSymbol(source[id])) { + else if (lookupTable && exportNode && id !== "default" && target[id] && resolveSymbol(target[id]) !== resolveSymbol(source[id])) { if (!lookupTable[id].exportsWithDuplicate) { lookupTable[id].exportsWithDuplicate = [exportNode]; } @@ -1447,12 +1449,12 @@ namespace ts { return; } visitedSymbols.push(symbol); - const symbols = cloneSymbolTable(symbol.exports); + const symbols = cloneMap(symbol.exports); // All export * declarations are collected in an __export symbol by the binder const exportStars = symbol.exports["__export"]; if (exportStars) { - const nestedSymbols: SymbolTable = {}; - const lookupTable: Map = {}; + const nestedSymbols = createMap(); + const lookupTable = createMap(); for (const node of exportStars.declarations) { const resolvedModule = resolveExternalModuleName(node, (node as ExportDeclaration).moduleSpecifier); const exportedSymbols = visit(resolvedModule); @@ -1466,7 +1468,7 @@ namespace ts { for (const id in lookupTable) { const { exportsWithDuplicate } = lookupTable[id]; // It's not an error if the file with multiple `export *`s with duplicate names exports a member with that name itself - if (id === "export=" || !(exportsWithDuplicate && exportsWithDuplicate.length) || hasProperty(symbols, id)) { + if (id === "export=" || !(exportsWithDuplicate && exportsWithDuplicate.length) || symbols[id]) { continue; } for (const node of exportsWithDuplicate) { @@ -1534,8 +1536,8 @@ namespace ts { function createType(flags: TypeFlags): Type { const result = new Type(checker, flags); - result.id = typeCount; typeCount++; + result.id = typeCount; return result; } @@ -1572,13 +1574,11 @@ namespace ts { function getNamedMembers(members: SymbolTable): Symbol[] { let result: Symbol[]; for (const id in members) { - if (hasProperty(members, id)) { - if (!isReservedMemberName(id)) { - if (!result) result = []; - const symbol = members[id]; - if (symbolIsValue(symbol)) { - result.push(symbol); - } + if (!isReservedMemberName(id)) { + if (!result) result = []; + const symbol = members[id]; + if (symbolIsValue(symbol)) { + result.push(symbol); } } } @@ -1654,12 +1654,12 @@ namespace ts { } // If symbol is directly available by its name in the symbol table - if (isAccessible(lookUp(symbols, symbol.name))) { + if (isAccessible(symbols[symbol.name])) { return [symbol]; } // Check if symbol is any of the alias - return forEachValue(symbols, symbolFromSymbolTable => { + return forEachProperty(symbols, symbolFromSymbolTable => { if (symbolFromSymbolTable.flags & SymbolFlags.Alias && symbolFromSymbolTable.name !== "export=" && !getDeclarationOfKind(symbolFromSymbolTable, SyntaxKind.ExportSpecifier)) { @@ -1694,12 +1694,12 @@ namespace ts { let qualify = false; forEachSymbolTableInScope(enclosingDeclaration, symbolTable => { // If symbol of this name is not available in the symbol table we are ok - if (!hasProperty(symbolTable, symbol.name)) { + let symbolFromSymbolTable = symbolTable[symbol.name]; + if (!symbolFromSymbolTable) { // Continue to the next symbol table return false; } // If the symbol with this name is present it should refer to the symbol - let symbolFromSymbolTable = symbolTable[symbol.name]; if (symbolFromSymbolTable === symbol) { // No need to qualify return true; @@ -1845,7 +1845,7 @@ namespace ts { } } - function isEntityNameVisible(entityName: EntityName | Expression, enclosingDeclaration: Node): SymbolVisibilityResult { + function isEntityNameVisible(entityName: EntityNameOrEntityNameExpression, enclosingDeclaration: Node): SymbolVisibilityResult { // get symbol of the first identifier of the entityName let meaning: SymbolFlags; if (entityName.parent.kind === SyntaxKind.TypeQuery || isExpressionWithTypeArgumentsInClassExtendsClause(entityName.parent)) { @@ -3090,7 +3090,7 @@ namespace ts { // If the declaration specifies a binding pattern, use the type implied by the binding pattern if (isBindingPattern(declaration.name)) { - return getTypeFromBindingPattern(declaration.name, /*includePatternInType*/ false); + return getTypeFromBindingPattern(declaration.name, /*includePatternInType*/ false, /*reportErrors*/ true); } // No type specified and nothing can be inferred @@ -3100,24 +3100,22 @@ namespace ts { // Return the type implied by a binding pattern element. This is the type of the initializer of the element if // one is present. Otherwise, if the element is itself a binding pattern, it is the type implied by the binding // pattern. Otherwise, it is the type any. - function getTypeFromBindingElement(element: BindingElement, includePatternInType?: boolean): Type { + function getTypeFromBindingElement(element: BindingElement, includePatternInType?: boolean, reportErrors?: boolean): Type { if (element.initializer) { - const type = checkExpressionCached(element.initializer); - reportErrorsFromWidening(element, type); - return getWidenedType(type); + return checkExpressionCached(element.initializer); } if (isBindingPattern(element.name)) { - return getTypeFromBindingPattern(element.name, includePatternInType); + return getTypeFromBindingPattern(element.name, includePatternInType, reportErrors); } - if (compilerOptions.noImplicitAny && !declarationBelongsToPrivateAmbientMember(element)) { + if (reportErrors && compilerOptions.noImplicitAny && !declarationBelongsToPrivateAmbientMember(element)) { reportImplicitAnyError(element, anyType); } return anyType; } // Return the type implied by an object binding pattern - function getTypeFromObjectBindingPattern(pattern: BindingPattern, includePatternInType: boolean): Type { - const members: SymbolTable = {}; + function getTypeFromObjectBindingPattern(pattern: BindingPattern, includePatternInType: boolean, reportErrors: boolean): Type { + const members = createMap(); let hasComputedProperties = false; forEach(pattern.elements, e => { const name = e.propertyName || e.name; @@ -3130,7 +3128,7 @@ namespace ts { const text = getTextOfPropertyName(name); const flags = SymbolFlags.Property | SymbolFlags.Transient | (e.initializer ? SymbolFlags.Optional : 0); const symbol = createSymbol(flags, text); - symbol.type = getTypeFromBindingElement(e, includePatternInType); + symbol.type = getTypeFromBindingElement(e, includePatternInType, reportErrors); symbol.bindingElement = e; members[symbol.name] = symbol; }); @@ -3145,13 +3143,13 @@ namespace ts { } // Return the type implied by an array binding pattern - function getTypeFromArrayBindingPattern(pattern: BindingPattern, includePatternInType: boolean): Type { + function getTypeFromArrayBindingPattern(pattern: BindingPattern, includePatternInType: boolean, reportErrors: boolean): Type { const elements = pattern.elements; if (elements.length === 0 || elements[elements.length - 1].dotDotDotToken) { return languageVersion >= ScriptTarget.ES6 ? createIterableType(anyType) : anyArrayType; } // If the pattern has at least one element, and no rest element, then it should imply a tuple type. - const elementTypes = map(elements, e => e.kind === SyntaxKind.OmittedExpression ? anyType : getTypeFromBindingElement(e, includePatternInType)); + const elementTypes = map(elements, e => e.kind === SyntaxKind.OmittedExpression ? anyType : getTypeFromBindingElement(e, includePatternInType, reportErrors)); if (includePatternInType) { const result = createNewTupleType(elementTypes); result.pattern = pattern; @@ -3167,10 +3165,10 @@ namespace ts { // used as the contextual type of an initializer associated with the binding pattern. Also, for a destructuring // parameter with no type annotation or initializer, the type implied by the binding pattern becomes the type of // the parameter. - function getTypeFromBindingPattern(pattern: BindingPattern, includePatternInType?: boolean): Type { + function getTypeFromBindingPattern(pattern: BindingPattern, includePatternInType?: boolean, reportErrors?: boolean): Type { return pattern.kind === SyntaxKind.ObjectBindingPattern - ? getTypeFromObjectBindingPattern(pattern, includePatternInType) - : getTypeFromArrayBindingPattern(pattern, includePatternInType); + ? getTypeFromObjectBindingPattern(pattern, includePatternInType, reportErrors) + : getTypeFromArrayBindingPattern(pattern, includePatternInType, reportErrors); } // Return the type associated with a variable, parameter, or property declaration. In the simple case this is the type @@ -3247,6 +3245,13 @@ namespace ts { // * className.prototype.method = expr if (declaration.kind === SyntaxKind.BinaryExpression || declaration.kind === SyntaxKind.PropertyAccessExpression && declaration.parent.kind === SyntaxKind.BinaryExpression) { + // Use JS Doc type if present on parent expression statement + if (declaration.flags & NodeFlags.JavaScriptFile) { + const typeTag = getJSDocTypeTag(declaration.parent); + if (typeTag && typeTag.typeExpression) { + return links.type = getTypeFromTypeNode(typeTag.typeExpression.type); + } + } const declaredTypes = map(symbol.declarations, decl => decl.kind === SyntaxKind.BinaryExpression ? checkExpressionCached((decl).right) : @@ -3675,7 +3680,7 @@ namespace ts { const baseTypeNodes = getInterfaceBaseTypeNodes(declaration); if (baseTypeNodes) { for (const node of baseTypeNodes) { - if (isSupportedExpressionWithTypeArguments(node)) { + if (isEntityNameExpression(node.expression)) { const baseSymbol = resolveEntityName(node.expression, SymbolFlags.Type, /*ignoreErrors*/ true); if (!baseSymbol || !(baseSymbol.flags & SymbolFlags.Interface) || getDeclaredTypeOfClassOrInterface(baseSymbol).thisType) { return false; @@ -3705,7 +3710,7 @@ namespace ts { type.typeParameters = concatenate(outerTypeParameters, localTypeParameters); type.outerTypeParameters = outerTypeParameters; type.localTypeParameters = localTypeParameters; - (type).instantiations = {}; + (type).instantiations = createMap(); (type).instantiations[getTypeListId(type.typeParameters)] = type; (type).target = type; (type).typeArguments = type.typeParameters; @@ -3747,7 +3752,7 @@ namespace ts { if (typeParameters) { // Initialize the instantiation cache for generic type aliases. The declared type corresponds to // an instantiation of the type alias with the type parameters supplied as type arguments. - links.instantiations = {}; + links.instantiations = createMap(); links.instantiations[getTypeListId(links.typeParameters)] = type; } } @@ -3768,7 +3773,7 @@ namespace ts { return expr.kind === SyntaxKind.NumericLiteral || expr.kind === SyntaxKind.PrefixUnaryExpression && (expr).operator === SyntaxKind.MinusToken && (expr).operand.kind === SyntaxKind.NumericLiteral || - expr.kind === SyntaxKind.Identifier && hasProperty(symbol.exports, (expr).text); + expr.kind === SyntaxKind.Identifier && !!symbol.exports[(expr).text]; } function enumHasLiteralMembers(symbol: Symbol) { @@ -3791,7 +3796,7 @@ namespace ts { enumType.symbol = symbol; if (enumHasLiteralMembers(symbol)) { const memberTypeList: Type[] = []; - const memberTypes: Map = {}; + const memberTypes = createMap(); for (const declaration of enumType.symbol.declarations) { if (declaration.kind === SyntaxKind.EnumDeclaration) { computeEnumMemberValues(declaration); @@ -3954,7 +3959,7 @@ namespace ts { } function createSymbolTable(symbols: Symbol[]): SymbolTable { - const result: SymbolTable = {}; + const result = createMap(); for (const symbol of symbols) { result[symbol.name] = symbol; } @@ -3964,7 +3969,7 @@ namespace ts { // The mappingThisOnly flag indicates that the only type parameter being mapped is "this". When the flag is true, // we check symbols to see if we can quickly conclude they are free of "this" references, thus needing no instantiation. function createInstantiatedSymbolTable(symbols: Symbol[], mapper: TypeMapper, mappingThisOnly: boolean): SymbolTable { - const result: SymbolTable = {}; + const result = createMap(); for (const symbol of symbols) { result[symbol.name] = mappingThisOnly && isIndependentMember(symbol) ? symbol : instantiateSymbol(symbol, mapper); } @@ -3973,7 +3978,7 @@ namespace ts { function addInheritedMembers(symbols: SymbolTable, baseSymbols: Symbol[]) { for (const s of baseSymbols) { - if (!hasProperty(symbols, s.name)) { + if (!symbols[s.name]) { symbols[s.name] = s; } } @@ -3996,6 +4001,9 @@ namespace ts { return createTypeReference((type).target, concatenate((type).typeArguments, [thisArgument || (type).target.thisType])); } + if (type.flags & TypeFlags.Tuple) { + return createTupleType((type as TupleType).elementTypes, thisArgument); + } return type; } @@ -4087,7 +4095,7 @@ namespace ts { } function createTupleTypeMemberSymbols(memberTypes: Type[]): SymbolTable { - const members: SymbolTable = {}; + const members = createMap(); for (let i = 0; i < memberTypes.length; i++) { const symbol = createSymbol(SymbolFlags.Property | SymbolFlags.Transient, "" + i); symbol.type = memberTypes[i]; @@ -4099,7 +4107,8 @@ namespace ts { function resolveTupleTypeMembers(type: TupleType) { const arrayElementType = getUnionType(type.elementTypes); // Make the tuple type itself the 'this' type by including an extra type argument - const arrayType = resolveStructuredTypeMembers(createTypeFromGenericGlobalType(globalArrayType, [arrayElementType, type])); + // (Unless it's provided in the case that the tuple is a type parameter constraint) + const arrayType = resolveStructuredTypeMembers(createTypeFromGenericGlobalType(globalArrayType, [arrayElementType, type.thisType || type])); const members = createTupleTypeMemberSymbols(type.elementTypes); addInheritedMembers(members, arrayType.properties); setObjectTypeMembers(type, members, arrayType.callSignatures, arrayType.constructSignatures, arrayType.stringIndexInfo, arrayType.numberIndexInfo); @@ -4309,11 +4318,9 @@ namespace ts { function getPropertyOfObjectType(type: Type, name: string): Symbol { if (type.flags & TypeFlags.ObjectType) { const resolved = resolveStructuredTypeMembers(type); - if (hasProperty(resolved.members, name)) { - const symbol = resolved.members[name]; - if (symbolIsValue(symbol)) { - return symbol; - } + const symbol = resolved.members[name]; + if (symbol && symbolIsValue(symbol)) { + return symbol; } } } @@ -4412,10 +4419,19 @@ namespace ts { } const propTypes: Type[] = []; const declarations: Declaration[] = []; + let commonType: Type = undefined; + let hasCommonType = true; for (const prop of props) { if (prop.declarations) { addRange(declarations, prop.declarations); } + const type = getTypeOfSymbol(prop); + if (!commonType) { + commonType = type; + } + else if (type !== commonType) { + hasCommonType = false; + } propTypes.push(getTypeOfSymbol(prop)); } const result = createSymbol( @@ -4425,6 +4441,7 @@ namespace ts { commonFlags, name); result.containingType = containingType; + result.hasCommonType = hasCommonType; result.declarations = declarations; result.isReadonly = isReadonly; result.type = containingType.flags & TypeFlags.Union ? getUnionType(propTypes) : getIntersectionType(propTypes); @@ -4432,29 +4449,32 @@ namespace ts { } function getPropertyOfUnionOrIntersectionType(type: UnionOrIntersectionType, name: string): Symbol { - const properties = type.resolvedProperties || (type.resolvedProperties = {}); - if (hasProperty(properties, name)) { - return properties[name]; - } - const property = createUnionOrIntersectionProperty(type, name); - if (property) { - properties[name] = property; + const properties = type.resolvedProperties || (type.resolvedProperties = createMap()); + let property = properties[name]; + if (!property) { + property = createUnionOrIntersectionProperty(type, name); + if (property) { + properties[name] = property; + } } return property; } - // Return the symbol for the property with the given name in the given type. Creates synthetic union properties when - // necessary, maps primitive types and type parameters are to their apparent types, and augments with properties from - // Object and Function as appropriate. + /** + * Return the symbol for the property with the given name in the given type. Creates synthetic union properties when + * necessary, maps primitive types and type parameters are to their apparent types, and augments with properties from + * Object and Function as appropriate. + * + * @param type a type to look up property from + * @param name a name of property to look up in a given type + */ function getPropertyOfType(type: Type, name: string): Symbol { type = getApparentType(type); if (type.flags & TypeFlags.ObjectType) { const resolved = resolveStructuredTypeMembers(type); - if (hasProperty(resolved.members, name)) { - const symbol = resolved.members[name]; - if (symbolIsValue(symbol)) { - return symbol; - } + const symbol = resolved.members[name]; + if (symbol && symbolIsValue(symbol)) { + return symbol; } if (resolved === anyFunctionType || resolved.callSignatures.length || resolved.constructSignatures.length) { const symbol = getPropertyOfObjectType(globalFunctionType, name); @@ -4922,24 +4942,27 @@ namespace ts { } function getTypeListId(types: Type[]) { + let result = ""; if (types) { - switch (types.length) { - case 1: - return "" + types[0].id; - case 2: - return types[0].id + "," + types[1].id; - default: - let result = ""; - for (let i = 0; i < types.length; i++) { - if (i > 0) { - result += ","; - } - result += types[i].id; - } - return result; + const length = types.length; + let i = 0; + while (i < length) { + const startId = types[i].id; + let count = 1; + while (i + count < length && types[i + count].id === startId + count) { + count++; + } + if (result.length) { + result += ","; + } + result += startId; + if (count > 1) { + result += ":" + count; + } + i += count; } } - return ""; + return result; } // This function is used to propagate certain flags when creating new object type references and union types. @@ -5022,7 +5045,7 @@ namespace ts { return getDeclaredTypeOfSymbol(symbol); } - function getTypeReferenceName(node: TypeReferenceNode | ExpressionWithTypeArguments | JSDocTypeReference): LeftHandSideExpression | EntityName { + function getTypeReferenceName(node: TypeReferenceNode | ExpressionWithTypeArguments | JSDocTypeReference): EntityNameOrEntityNameExpression | undefined { switch (node.kind) { case SyntaxKind.TypeReference: return (node).typeName; @@ -5031,8 +5054,9 @@ namespace ts { case SyntaxKind.ExpressionWithTypeArguments: // We only support expressions that are simple qualified names. For other // expressions this produces undefined. - if (isSupportedExpressionWithTypeArguments(node)) { - return (node).expression; + const expr = (node).expression; + if (isEntityNameExpression(expr)) { + return expr; } // fall through; @@ -5043,7 +5067,7 @@ namespace ts { function resolveTypeReferenceName( node: TypeReferenceNode | ExpressionWithTypeArguments | JSDocTypeReference, - typeReferenceName: LeftHandSideExpression | EntityName) { + typeReferenceName: EntityNameExpression | EntityName) { if (!typeReferenceName) { return unknownSymbol; @@ -5084,15 +5108,14 @@ namespace ts { const typeReferenceName = getTypeReferenceName(node); symbol = resolveTypeReferenceName(node, typeReferenceName); type = getTypeReferenceType(node, symbol); - - links.resolvedSymbol = symbol; - links.resolvedType = type; } else { // We only support expressions that are simple qualified names. For other expressions this produces undefined. - const typeNameOrExpression = node.kind === SyntaxKind.TypeReference ? (node).typeName : - isSupportedExpressionWithTypeArguments(node) ? (node).expression : - undefined; + const typeNameOrExpression: EntityNameOrEntityNameExpression = node.kind === SyntaxKind.TypeReference + ? (node).typeName + : isEntityNameExpression((node).expression) + ? (node).expression + : undefined; symbol = typeNameOrExpression && resolveEntityName(typeNameOrExpression, SymbolFlags.Type) || unknownSymbol; type = symbol === unknownSymbol ? unknownType : symbol.flags & (SymbolFlags.Class | SymbolFlags.Interface) ? getTypeFromClassOrInterfaceReference(node, symbol) : @@ -5211,15 +5234,16 @@ namespace ts { return links.resolvedType; } - function createTupleType(elementTypes: Type[]) { - const id = getTypeListId(elementTypes); - return tupleTypes[id] || (tupleTypes[id] = createNewTupleType(elementTypes)); + function createTupleType(elementTypes: Type[], thisType?: Type) { + const id = getTypeListId(elementTypes) + "," + (thisType ? thisType.id : 0); + return tupleTypes[id] || (tupleTypes[id] = createNewTupleType(elementTypes, thisType)); } - function createNewTupleType(elementTypes: Type[]) { + function createNewTupleType(elementTypes: Type[], thisType?: Type) { const propagatedFlags = getPropagatingFlagsOfTypes(elementTypes, /*excludeKinds*/ 0); const type = createObjectType(TypeFlags.Tuple | propagatedFlags); type.elementTypes = elementTypes; + type.thisType = thisType; return type; } @@ -5321,12 +5345,12 @@ namespace ts { } } - // We deduplicate the constituent types based on object identity. If the subtypeReduction flag is - // specified we also reduce the constituent type set to only include types that aren't subtypes of - // other types. Subtype reduction is expensive for large union types and is possible only when union + // We sort and deduplicate the constituent types based on object identity. If the subtypeReduction + // flag is specified we also reduce the constituent type set to only include types that aren't subtypes + // of other types. Subtype reduction is expensive for large union types and is possible only when union // types are known not to circularly reference themselves (as is the case with union types created by // expression constructs such as array literals and the || and ?: operators). Named types can - // circularly reference themselves and therefore cannot be deduplicated during their declaration. + // circularly reference themselves and therefore cannot be subtype reduced during their declaration. // For example, "type Item = string | (() => Item" is a named type that circularly references itself. function getUnionType(types: Type[], subtypeReduction?: boolean, aliasSymbol?: Symbol, aliasTypeArguments?: Type[]): Type { if (types.length === 0) { @@ -5348,15 +5372,23 @@ namespace ts { typeSet.containsUndefined ? typeSet.containsNonWideningType ? undefinedType : undefinedWideningType : neverType; } - else if (typeSet.length === 1) { - return typeSet[0]; + return getUnionTypeFromSortedList(typeSet, aliasSymbol, aliasTypeArguments); + } + + // This function assumes the constituent type list is sorted and deduplicated. + function getUnionTypeFromSortedList(types: Type[], aliasSymbol?: Symbol, aliasTypeArguments?: Type[]): Type { + if (types.length === 0) { + return neverType; } - const id = getTypeListId(typeSet); + if (types.length === 1) { + return types[0]; + } + const id = getTypeListId(types); let type = unionTypes[id]; if (!type) { - const propagatedFlags = getPropagatingFlagsOfTypes(typeSet, /*excludeKinds*/ TypeFlags.Nullable); + const propagatedFlags = getPropagatingFlagsOfTypes(types, /*excludeKinds*/ TypeFlags.Nullable); type = unionTypes[id] = createObjectType(TypeFlags.Union | propagatedFlags); - type.types = typeSet; + type.types = types; type.aliasSymbol = aliasSymbol; type.aliasTypeArguments = aliasTypeArguments; } @@ -5448,7 +5480,7 @@ namespace ts { function getLiteralTypeForText(flags: TypeFlags, text: string) { const map = flags & TypeFlags.StringLiteral ? stringLiteralTypes : numericLiteralTypes; - return hasProperty(map, text) ? map[text] : map[text] = createLiteralType(flags, text); + return map[text] || (map[text] = createLiteralType(flags, text)); } function getTypeFromLiteralTypeNode(node: LiteralTypeNode): Type { @@ -5525,6 +5557,8 @@ namespace ts { return getTypeFromThisTypeNode(node); case SyntaxKind.LiteralType: return getTypeFromLiteralTypeNode(node); + case SyntaxKind.JSDocLiteralType: + return getTypeFromLiteralTypeNode((node).literal); case SyntaxKind.TypeReference: case SyntaxKind.JSDocTypeReference: return getTypeFromTypeReference(node); @@ -5905,6 +5939,13 @@ namespace ts { return isTypeRelatedTo(source, target, assignableRelation); } + // A type S is considered to be an instance of a type T if S and T are the same type or if S is a + // subtype of T but not structurally identical to T. This specifically means that two distinct but + // structurally identical types (such as two classes) are not considered instances of each other. + function isTypeInstanceOf(source: Type, target: Type): boolean { + return source === target || isTypeSubtypeOf(source, target) && !isTypeIdenticalTo(source, target); + } + /** * This is *not* a bi-directional relationship. * If one needs to check both directions for comparability, use a second call to this function or 'checkTypeComparableTo'. @@ -6556,7 +6597,7 @@ namespace ts { } sourceStack[depth] = source; targetStack[depth] = target; - maybeStack[depth] = {}; + maybeStack[depth] = createMap(); maybeStack[depth][id] = RelationComparisonResult.Succeeded; depth++; const saveExpandingFlags = expandingFlags; @@ -6587,7 +6628,7 @@ namespace ts { const maybeCache = maybeStack[depth]; // If result is definitely true, copy assumptions to global cache, else copy to next level up const destinationCache = (result === Ternary.True || depth === 0) ? relation : maybeStack[depth - 1]; - copyMap(maybeCache, destinationCache); + copyProperties(maybeCache, destinationCache); } else { // A false result goes straight into global cache (when something is false under assumptions it @@ -7197,7 +7238,7 @@ namespace ts { } function transformTypeOfMembers(type: Type, f: (propertyType: Type) => Type) { - const members: SymbolTable = {}; + const members = createMap(); for (const property of getPropertiesOfObjectType(type)) { const original = getTypeOfSymbol(property); const updated = f(original); @@ -7421,7 +7462,7 @@ namespace ts { let targetStack: Type[]; let depth = 0; let inferiority = 0; - const visited: Map = {}; + const visited = createMap(); inferFromTypes(source, target); function isInProcess(source: Type, target: Type) { @@ -7555,7 +7596,7 @@ namespace ts { return; } const key = source.id + "," + target.id; - if (hasProperty(visited, key)) { + if (visited[key]) { return; } visited[key] = true; @@ -7764,16 +7805,17 @@ namespace ts { } function isMatchingReference(source: Node, target: Node): boolean { - if (source.kind === target.kind) { - switch (source.kind) { - case SyntaxKind.Identifier: - return getResolvedSymbol(source) === getResolvedSymbol(target); - case SyntaxKind.ThisKeyword: - return true; - case SyntaxKind.PropertyAccessExpression: - return (source).name.text === (target).name.text && - isMatchingReference((source).expression, (target).expression); - } + switch (source.kind) { + case SyntaxKind.Identifier: + return target.kind === SyntaxKind.Identifier && getResolvedSymbol(source) === getResolvedSymbol(target) || + (target.kind === SyntaxKind.VariableDeclaration || target.kind === SyntaxKind.BindingElement) && + getExportSymbolOfValueSymbolIfExported(getResolvedSymbol(source)) === getSymbolOfNode(target); + case SyntaxKind.ThisKeyword: + return target.kind === SyntaxKind.ThisKeyword; + case SyntaxKind.PropertyAccessExpression: + return target.kind === SyntaxKind.PropertyAccessExpression && + (source).name.text === (target).name.text && + isMatchingReference((source).expression, (target).expression); } return false; } @@ -7788,6 +7830,51 @@ namespace ts { return false; } + // Return true if target is a property access xxx.yyy, source is a property access xxx.zzz, the declared + // type of xxx is a union type, and yyy is a property that is possibly a discriminant. We consider a property + // a possible discriminant if its type differs in the constituents of containing union type, and if every + // choice is a unit type or a union of unit types. + function containsMatchingReferenceDiscriminant(source: Node, target: Node) { + return target.kind === SyntaxKind.PropertyAccessExpression && + containsMatchingReference(source, (target).expression) && + isDiscriminantProperty(getDeclaredTypeOfReference((target).expression), (target).name.text); + } + + function getDeclaredTypeOfReference(expr: Node): Type { + if (expr.kind === SyntaxKind.Identifier) { + return getTypeOfSymbol(getResolvedSymbol(expr)); + } + if (expr.kind === SyntaxKind.PropertyAccessExpression) { + const type = getDeclaredTypeOfReference((expr).expression); + return type && getTypeOfPropertyOfType(type, (expr).name.text); + } + return undefined; + } + + function isDiscriminantProperty(type: Type, name: string) { + if (type && type.flags & TypeFlags.Union) { + let prop = getPropertyOfType(type, name); + if (!prop) { + // The type may be a union that includes nullable or primitive types. If filtering + // those out produces a different type, get the property from that type instead. + // Effectively, we're checking if this *could* be a discriminant property once nullable + // and primitive types are removed by other type guards. + const filteredType = getTypeWithFacts(type, TypeFacts.Discriminatable); + if (filteredType !== type && filteredType.flags & TypeFlags.Union) { + prop = getPropertyOfType(filteredType, name); + } + } + if (prop && prop.flags & SymbolFlags.SyntheticProperty) { + if ((prop).isDiscriminantProperty === undefined) { + (prop).isDiscriminantProperty = !(prop).hasCommonType && + isUnitUnionType(getTypeOfSymbol(prop)); + } + return (prop).isDiscriminantProperty; + } + } + return false; + } + function isOrContainsMatchingReference(source: Node, target: Node) { return isMatchingReference(source, target) || containsMatchingReference(source, target); } @@ -7831,10 +7918,10 @@ namespace ts { // For example, when a variable of type number | string | boolean is assigned a value of type number | boolean, // we remove type string. function getAssignmentReducedType(declaredType: UnionType, assignedType: Type) { - if (declaredType !== assignedType && declaredType.flags & TypeFlags.Union) { - const reducedTypes = filter(declaredType.types, t => typeMaybeAssignableTo(assignedType, t)); - if (reducedTypes.length) { - return reducedTypes.length === 1 ? reducedTypes[0] : getUnionType(reducedTypes); + if (declaredType !== assignedType) { + const reducedType = filterType(declaredType, t => typeMaybeAssignableTo(assignedType, t)); + if (reducedType !== neverType) { + return reducedType; } } return declaredType; @@ -7848,6 +7935,14 @@ namespace ts { return result; } + function isFunctionObjectType(type: ObjectType): boolean { + // We do a quick check for a "bind" property before performing the more expensive subtype + // check. This gives us a quicker out in the common case where an object type is not a function. + const resolved = resolveStructuredTypeMembers(type); + return !!(resolved.callSignatures.length || resolved.constructSignatures.length || + resolved.members["bind"] && isTypeSubtypeOf(type, globalFunctionType)); + } + function getTypeFacts(type: Type): TypeFacts { const flags = type.flags; if (flags & TypeFlags.String) { @@ -7876,8 +7971,7 @@ namespace ts { type === falseType ? TypeFacts.FalseFacts : TypeFacts.TrueFacts; } if (flags & TypeFlags.ObjectType) { - const resolved = resolveStructuredTypeMembers(type); - return resolved.callSignatures.length || resolved.constructSignatures.length || isTypeSubtypeOf(type, globalFunctionType) ? + return isFunctionObjectType(type) ? strictNullChecks ? TypeFacts.FunctionStrictFacts : TypeFacts.FunctionFacts : strictNullChecks ? TypeFacts.ObjectStrictFacts : TypeFacts.ObjectFacts; } @@ -7892,7 +7986,7 @@ namespace ts { } if (flags & TypeFlags.TypeParameter) { const constraint = getConstraintOfTypeParameter(type); - return constraint ? getTypeFacts(constraint) : TypeFacts.All; + return getTypeFacts(constraint || emptyObjectType); } if (flags & TypeFlags.UnionOrIntersection) { return getTypeFactsOfTypes((type).types); @@ -7901,25 +7995,7 @@ namespace ts { } function getTypeWithFacts(type: Type, include: TypeFacts) { - if (!(type.flags & TypeFlags.Union)) { - return getTypeFacts(type) & include ? type : neverType; - } - let firstType: Type; - let types: Type[]; - for (const t of (type as UnionType).types) { - if (getTypeFacts(t) & include) { - if (!firstType) { - firstType = t; - } - else { - if (!types) { - types = [firstType]; - } - types.push(t); - } - } - } - return firstType ? types ? getUnionType(types) : firstType : neverType; + return filterType(type, t => (getTypeFacts(t) & include) !== 0); } function getTypeWithDefault(type: Type, defaultExpression: Expression) { @@ -8031,6 +8107,12 @@ namespace ts { getInitialTypeOfBindingElement(node); } + function getInitialOrAssignedType(node: VariableDeclaration | BindingElement | Expression) { + return node.kind === SyntaxKind.VariableDeclaration || node.kind === SyntaxKind.BindingElement ? + getInitialType(node) : + getAssignedType(node); + } + function getReferenceCandidate(node: Expression): Expression { switch (node.kind) { case SyntaxKind.ParenthesizedExpression: @@ -8069,27 +8151,61 @@ namespace ts { return source.flags & TypeFlags.Union ? !forEach((source).types, t => !contains(types, t)) : contains(types, source); } + function isTypeSubsetOf(source: Type, target: Type) { + return source === target || target.flags & TypeFlags.Union && isTypeSubsetOfUnion(source, target); + } + + function isTypeSubsetOfUnion(source: Type, target: UnionType) { + if (source.flags & TypeFlags.Union) { + for (const t of (source).types) { + if (!containsType(target.types, t)) { + return false; + } + } + return true; + } + if (source.flags & TypeFlags.EnumLiteral && target.flags & TypeFlags.Enum && (source).baseType === target) { + return true; + } + return containsType(target.types, source); + } + function filterType(type: Type, f: (t: Type) => boolean): Type { - return type.flags & TypeFlags.Union ? - getUnionType(filter((type).types, f)) : - f(type) ? type : neverType; + if (type.flags & TypeFlags.Union) { + const types = (type).types; + const filtered = filter(types, f); + return filtered === types ? type : getUnionTypeFromSortedList(filtered); + } + return f(type) ? type : neverType; } - function getFlowTypeOfReference(reference: Node, declaredType: Type, assumeInitialized: boolean, includeOuterFunctions: boolean) { + function isIncomplete(flowType: FlowType) { + return flowType.flags === 0; + } + + function getTypeFromFlowType(flowType: FlowType) { + return flowType.flags === 0 ? (flowType).type : flowType; + } + + function createFlowType(type: Type, incomplete: boolean): FlowType { + return incomplete ? { flags: 0, type } : type; + } + + function getFlowTypeOfReference(reference: Node, declaredType: Type, assumeInitialized: boolean, flowContainer: Node) { let key: string; if (!reference.flowNode || assumeInitialized && !(declaredType.flags & TypeFlags.Narrowable)) { return declaredType; } const initialType = assumeInitialized ? declaredType : includeFalsyTypes(declaredType, TypeFlags.Undefined); const visitedFlowStart = visitedFlowCount; - const result = getTypeAtFlowNode(reference.flowNode); + const result = getTypeFromFlowType(getTypeAtFlowNode(reference.flowNode)); visitedFlowCount = visitedFlowStart; if (reference.parent.kind === SyntaxKind.NonNullExpression && getTypeWithFacts(result, TypeFacts.NEUndefinedOrNull) === neverType) { return declaredType; } return result; - function getTypeAtFlowNode(flow: FlowNode): Type { + function getTypeAtFlowNode(flow: FlowNode): FlowType { while (true) { if (flow.flags & FlowFlags.Shared) { // We cache results of flow type resolution for shared nodes that were previously visited in @@ -8101,7 +8217,7 @@ namespace ts { } } } - let type: Type; + let type: FlowType; if (flow.flags & FlowFlags.Assignment) { type = getTypeAtFlowAssignment(flow); if (!type) { @@ -8127,7 +8243,7 @@ namespace ts { else if (flow.flags & FlowFlags.Start) { // Check if we should continue with the control flow of the containing function. const container = (flow).container; - if (container && includeOuterFunctions) { + if (container && container !== flowContainer && reference.kind !== SyntaxKind.PropertyAccessExpression) { flow = container.flowNode; continue; } @@ -8153,19 +8269,9 @@ namespace ts { const node = flow.node; // Assignments only narrow the computed type if the declared type is a union type. Thus, we // only need to evaluate the assigned type if the declared type is a union type. - if ((node.kind === SyntaxKind.VariableDeclaration || node.kind === SyntaxKind.BindingElement) && - reference.kind === SyntaxKind.Identifier && - getExportSymbolOfValueSymbolIfExported(getResolvedSymbol(reference)) === getSymbolOfNode(node)) { - return declaredType.flags & TypeFlags.Union ? - getAssignmentReducedType(declaredType, getInitialType(node)) : - declaredType; - } - // If the node is not a variable declaration or binding element, it is an identifier - // or a dotted name that is the target of an assignment. If we have a match, reduce - // the declared type by the assigned type. if (isMatchingReference(reference, node)) { return declaredType.flags & TypeFlags.Union ? - getAssignmentReducedType(declaredType, getAssignedType(node)) : + getAssignmentReducedType(declaredType, getInitialOrAssignedType(node)) : declaredType; } // We didn't have a direct match. However, if the reference is a dotted name, this @@ -8179,41 +8285,45 @@ namespace ts { return undefined; } - function getTypeAtFlowCondition(flow: FlowCondition) { - let type = getTypeAtFlowNode(flow.antecedent); + function getTypeAtFlowCondition(flow: FlowCondition): FlowType { + const flowType = getTypeAtFlowNode(flow.antecedent); + let type = getTypeFromFlowType(flowType); if (type !== neverType) { // If we have an antecedent type (meaning we're reachable in some way), we first - // attempt to narrow the antecedent type. If that produces the nothing type, then - // we take the type guard as an indication that control could reach here in a - // manner not understood by the control flow analyzer (e.g. a function argument - // has an invalid type, or a nested function has possibly made an assignment to a - // captured variable). We proceed by reverting to the declared type and then + // attempt to narrow the antecedent type. If that produces the never type, and if + // the antecedent type is incomplete (i.e. a transient type in a loop), then we + // take the type guard as an indication that control *could* reach here once we + // have the complete type. We proceed by reverting to the declared type and then // narrow that. const assumeTrue = (flow.flags & FlowFlags.TrueCondition) !== 0; type = narrowType(type, flow.expression, assumeTrue); - if (type === neverType) { + if (type === neverType && isIncomplete(flowType)) { type = narrowType(declaredType, flow.expression, assumeTrue); } } - return type; + return createFlowType(type, isIncomplete(flowType)); } - function getTypeAtSwitchClause(flow: FlowSwitchClause) { - const type = getTypeAtFlowNode(flow.antecedent); + function getTypeAtSwitchClause(flow: FlowSwitchClause): FlowType { + const flowType = getTypeAtFlowNode(flow.antecedent); + let type = getTypeFromFlowType(flowType); const expr = flow.switchStatement.expression; if (isMatchingReference(reference, expr)) { - return narrowTypeBySwitchOnDiscriminant(type, flow.switchStatement, flow.clauseStart, flow.clauseEnd); + type = narrowTypeBySwitchOnDiscriminant(type, flow.switchStatement, flow.clauseStart, flow.clauseEnd); } - if (isMatchingPropertyAccess(expr)) { - return narrowTypeByDiscriminant(type, expr, t => narrowTypeBySwitchOnDiscriminant(t, flow.switchStatement, flow.clauseStart, flow.clauseEnd)); + else if (isMatchingReferenceDiscriminant(expr)) { + type = narrowTypeByDiscriminant(type, expr, t => narrowTypeBySwitchOnDiscriminant(t, flow.switchStatement, flow.clauseStart, flow.clauseEnd)); } - return type; + return createFlowType(type, isIncomplete(flowType)); } - function getTypeAtFlowBranchLabel(flow: FlowLabel) { + function getTypeAtFlowBranchLabel(flow: FlowLabel): FlowType { const antecedentTypes: Type[] = []; + let subtypeReduction = false; + let seenIncomplete = false; for (const antecedent of flow.antecedents) { - const type = getTypeAtFlowNode(antecedent); + const flowType = getTypeAtFlowNode(antecedent); + const type = getTypeFromFlowType(flowType); // If the type at a particular antecedent path is the declared type and the // reference is known to always be assigned (i.e. when declared and initial types // are the same), there is no reason to process more antecedents since the only @@ -8224,15 +8334,24 @@ namespace ts { if (!contains(antecedentTypes, type)) { antecedentTypes.push(type); } + // If an antecedent type is not a subset of the declared type, we need to perform + // subtype reduction. This happens when a "foreign" type is injected into the control + // flow using the instanceof operator or a user defined type predicate. + if (!isTypeSubsetOf(type, declaredType)) { + subtypeReduction = true; + } + if (isIncomplete(flowType)) { + seenIncomplete = true; + } } - return getUnionType(antecedentTypes); + return createFlowType(getUnionType(antecedentTypes, subtypeReduction), seenIncomplete); } - function getTypeAtFlowLoopLabel(flow: FlowLabel) { + function getTypeAtFlowLoopLabel(flow: FlowLabel): FlowType { // If we have previously computed the control flow type for the reference at // this flow loop junction, return the cached type. const id = getFlowNodeId(flow); - const cache = flowLoopCaches[id] || (flowLoopCaches[id] = {}); + const cache = flowLoopCaches[id] || (flowLoopCaches[id] = createMap()); if (!key) { key = getFlowCacheKey(reference); } @@ -8240,24 +8359,30 @@ namespace ts { return cache[key]; } // If this flow loop junction and reference are already being processed, return - // the union of the types computed for each branch so far. We should never see - // an empty array here because the first antecedent of a loop junction is always - // the non-looping control flow path that leads to the top. + // the union of the types computed for each branch so far, marked as incomplete. + // We should never see an empty array here because the first antecedent of a loop + // junction is always the non-looping control flow path that leads to the top. for (let i = flowLoopStart; i < flowLoopCount; i++) { if (flowLoopNodes[i] === flow && flowLoopKeys[i] === key) { - return getUnionType(flowLoopTypes[i]); + return createFlowType(getUnionType(flowLoopTypes[i]), /*incomplete*/ true); } } // Add the flow loop junction and reference to the in-process stack and analyze // each antecedent code path. const antecedentTypes: Type[] = []; + let subtypeReduction = false; + let firstAntecedentType: FlowType; flowLoopNodes[flowLoopCount] = flow; flowLoopKeys[flowLoopCount] = key; flowLoopTypes[flowLoopCount] = antecedentTypes; for (const antecedent of flow.antecedents) { flowLoopCount++; - const type = getTypeAtFlowNode(antecedent); + const flowType = getTypeAtFlowNode(antecedent); flowLoopCount--; + if (!firstAntecedentType) { + firstAntecedentType = flowType; + } + const type = getTypeFromFlowType(flowType); // If we see a value appear in the cache it is a sign that control flow analysis // was restarted and completed by checkExpressionCached. We can simply pick up // the resulting type and bail out. @@ -8267,6 +8392,12 @@ namespace ts { if (!contains(antecedentTypes, type)) { antecedentTypes.push(type); } + // If an antecedent type is not a subset of the declared type, we need to perform + // subtype reduction. This happens when a "foreign" type is injected into the control + // flow using the instanceof operator or a user defined type predicate. + if (!isTypeSubsetOf(type, declaredType)) { + subtypeReduction = true; + } // If the type at a particular antecedent path is the declared type there is no // reason to process more antecedents since the only possible outcome is subtypes // that will be removed in the final union type anyway. @@ -8274,13 +8405,20 @@ namespace ts { break; } } - return cache[key] = getUnionType(antecedentTypes); + // The result is incomplete if the first antecedent (the non-looping control flow path) + // is incomplete. + const result = getUnionType(antecedentTypes, subtypeReduction); + if (isIncomplete(firstAntecedentType)) { + return createFlowType(result, /*incomplete*/ true); + } + return cache[key] = result; } - function isMatchingPropertyAccess(expr: Expression) { + function isMatchingReferenceDiscriminant(expr: Expression) { return expr.kind === SyntaxKind.PropertyAccessExpression && + declaredType.flags & TypeFlags.Union && isMatchingReference(reference, (expr).expression) && - (declaredType.flags & TypeFlags.Union) !== 0; + isDiscriminantProperty(declaredType, (expr).name.text); } function narrowTypeByDiscriminant(type: Type, propAccess: PropertyAccessExpression, narrowType: (t: Type) => Type): Type { @@ -8294,9 +8432,12 @@ namespace ts { if (isMatchingReference(reference, expr)) { return getTypeWithFacts(type, assumeTrue ? TypeFacts.Truthy : TypeFacts.Falsy); } - if (isMatchingPropertyAccess(expr)) { + if (isMatchingReferenceDiscriminant(expr)) { return narrowTypeByDiscriminant(type, expr, t => getTypeWithFacts(t, assumeTrue ? TypeFacts.Truthy : TypeFacts.Falsy)); } + if (containsMatchingReferenceDiscriminant(reference, expr)) { + return declaredType; + } return type; } @@ -8323,12 +8464,15 @@ namespace ts { if (isMatchingReference(reference, right)) { return narrowTypeByEquality(type, operator, left, assumeTrue); } - if (isMatchingPropertyAccess(left)) { + if (isMatchingReferenceDiscriminant(left)) { return narrowTypeByDiscriminant(type, left, t => narrowTypeByEquality(t, operator, right, assumeTrue)); } - if (isMatchingPropertyAccess(right)) { + if (isMatchingReferenceDiscriminant(right)) { return narrowTypeByDiscriminant(type, right, t => narrowTypeByEquality(t, operator, left, assumeTrue)); } + if (containsMatchingReferenceDiscriminant(reference, left) || containsMatchingReferenceDiscriminant(reference, right)) { + return declaredType; + } break; case SyntaxKind.InstanceOfKeyword: return narrowTypeByInstanceof(type, expr, assumeTrue); @@ -8381,16 +8525,16 @@ namespace ts { } if (assumeTrue && !(type.flags & TypeFlags.Union)) { // We narrow a non-union type to an exact primitive type if the non-union type - // is a supertype of that primtive type. For example, type 'any' can be narrowed + // is a supertype of that primitive type. For example, type 'any' can be narrowed // to one of the primitive types. - const targetType = getProperty(typeofTypesByName, literal.text); + const targetType = typeofTypesByName[literal.text]; if (targetType && isTypeSubtypeOf(targetType, type)) { return targetType; } } const facts = assumeTrue ? - getProperty(typeofEQFacts, literal.text) || TypeFacts.TypeofEQHostObject : - getProperty(typeofNEFacts, literal.text) || TypeFacts.TypeofNEHostObject; + typeofEQFacts[literal.text] || TypeFacts.TypeofEQHostObject : + typeofNEFacts[literal.text] || TypeFacts.TypeofNEHostObject; return getTypeWithFacts(type, facts); } @@ -8421,10 +8565,6 @@ namespace ts { } return type; } - // We never narrow type any in an instanceof guard - if (isTypeAny(type)) { - return type; - } // Check that right operand is a function type with a prototype property const rightType = checkExpression(expr.right); @@ -8442,6 +8582,11 @@ namespace ts { } } + // Don't narrow from 'any' if the target type is exactly 'Object' or 'Function' + if (isTypeAny(type) && (targetType === globalObjectType || targetType === globalFunctionType)) { + return type; + } + if (!targetType) { // Target type is type of construct signature let constructSignatures: Signature[]; @@ -8465,29 +8610,30 @@ namespace ts { function getNarrowedType(type: Type, candidate: Type, assumeTrue: boolean) { if (!assumeTrue) { - return type.flags & TypeFlags.Union ? - getUnionType(filter((type).types, t => !isTypeSubtypeOf(t, candidate))) : - type; + return filterType(type, t => !isTypeInstanceOf(t, candidate)); } - // If the current type is a union type, remove all constituents that aren't assignable to + // If the current type is a union type, remove all constituents that couldn't be instances of // the candidate type. If one or more constituents remain, return a union of those. if (type.flags & TypeFlags.Union) { - const assignableConstituents = filter((type).types, t => isTypeAssignableTo(t, candidate)); - if (assignableConstituents.length) { - return getUnionType(assignableConstituents); + const assignableType = filterType(type, t => isTypeInstanceOf(t, candidate)); + if (assignableType !== neverType) { + return assignableType; } } - // If the candidate type is assignable to the target type, narrow to the candidate type. - // Otherwise, if the current type is assignable to the candidate, keep the current type. - // Otherwise, the types are completely unrelated, so narrow to the empty type. + // If the candidate type is a subtype of the target type, narrow to the candidate type. + // Otherwise, if the target type is assignable to the candidate type, keep the target type. + // Otherwise, if the candidate type is assignable to the target type, narrow to the candidate + // type. Otherwise, the types are completely unrelated, so narrow to an intersection of the + // two types. const targetType = type.flags & TypeFlags.TypeParameter ? getApparentType(type) : type; - return isTypeAssignableTo(candidate, targetType) ? candidate : + return isTypeSubtypeOf(candidate, targetType) ? candidate : isTypeAssignableTo(type, candidate) ? type : - getIntersectionType([type, candidate]); + isTypeAssignableTo(candidate, targetType) ? candidate : + getIntersectionType([type, candidate]); } function narrowTypeByTypePredicate(type: Type, callExpression: CallExpression, assumeTrue: boolean): Type { - if (type.flags & TypeFlags.Any || !hasMatchingArgument(callExpression, reference)) { + if (!hasMatchingArgument(callExpression, reference)) { return type; } const signature = getResolvedSignature(callExpression); @@ -8495,6 +8641,12 @@ namespace ts { if (!predicate) { return type; } + + // Don't narrow from 'any' if the predicate type is exactly 'Object' or 'Function' + if (isTypeAny(type) && (predicate.type === globalObjectType || predicate.type === globalFunctionType)) { + return type; + } + if (isIdentifierTypePredicate(predicate)) { const predicateArgument = callExpression.arguments[predicate.parameterIndex]; if (predicateArgument) { @@ -8580,21 +8732,52 @@ namespace ts { function getControlFlowContainer(node: Node): Node { while (true) { node = node.parent; - if (isFunctionLike(node) || node.kind === SyntaxKind.ModuleBlock || node.kind === SyntaxKind.SourceFile || node.kind === SyntaxKind.PropertyDeclaration) { + if (isFunctionLike(node) && !getImmediatelyInvokedFunctionExpression(node) || + node.kind === SyntaxKind.ModuleBlock || + node.kind === SyntaxKind.SourceFile || + node.kind === SyntaxKind.PropertyDeclaration) { return node; } } } - function isDeclarationIncludedInFlow(reference: Node, declaration: Declaration, includeOuterFunctions: boolean) { - const declarationContainer = getControlFlowContainer(declaration); - let container = getControlFlowContainer(reference); - while (container !== declarationContainer && - (container.kind === SyntaxKind.FunctionExpression || container.kind === SyntaxKind.ArrowFunction) && - (includeOuterFunctions || getImmediatelyInvokedFunctionExpression(container))) { - container = getControlFlowContainer(container); + // Check if a parameter is assigned anywhere within its declaring function. + function isParameterAssigned(symbol: Symbol) { + const func = getRootDeclaration(symbol.valueDeclaration).parent; + const links = getNodeLinks(func); + if (!(links.flags & NodeCheckFlags.AssignmentsMarked)) { + links.flags |= NodeCheckFlags.AssignmentsMarked; + if (!hasParentWithAssignmentsMarked(func)) { + markParameterAssignments(func); + } + } + return symbol.isAssigned || false; + } + + function hasParentWithAssignmentsMarked(node: Node) { + while (true) { + node = node.parent; + if (!node) { + return false; + } + if (isFunctionLike(node) && getNodeLinks(node).flags & NodeCheckFlags.AssignmentsMarked) { + return true; + } + } + } + + function markParameterAssignments(node: Node) { + if (node.kind === SyntaxKind.Identifier) { + if (isAssignmentTarget(node)) { + const symbol = getResolvedSymbol(node); + if (symbol.valueDeclaration && getRootDeclaration(symbol.valueDeclaration).kind === SyntaxKind.Parameter) { + symbol.isAssigned = true; + } + } + } + else { + forEachChild(node, markParameterAssignments); } - return container === declarationContainer; } function checkIdentifier(node: Identifier): Type { @@ -8649,15 +8832,35 @@ namespace ts { checkNestedBlockScopedBinding(node, symbol); const type = getTypeOfSymbol(localOrExportSymbol); - if (!(localOrExportSymbol.flags & SymbolFlags.Variable) || isAssignmentTarget(node)) { + const declaration = localOrExportSymbol.valueDeclaration; + // We only narrow variables and parameters occurring in a non-assignment position. For all other + // entities we simply return the declared type. + if (!(localOrExportSymbol.flags & SymbolFlags.Variable) || isAssignmentTarget(node) || !declaration) { return type; } - const declaration = localOrExportSymbol.valueDeclaration; - const includeOuterFunctions = isReadonlySymbol(localOrExportSymbol); - const assumeInitialized = !strictNullChecks || (type.flags & TypeFlags.Any) !== 0 || !declaration || - getRootDeclaration(declaration).kind === SyntaxKind.Parameter || isInAmbientContext(declaration) || - !isDeclarationIncludedInFlow(node, declaration, includeOuterFunctions); - const flowType = getFlowTypeOfReference(node, type, assumeInitialized, includeOuterFunctions); + // The declaration container is the innermost function that encloses the declaration of the variable + // or parameter. The flow container is the innermost function starting with which we analyze the control + // flow graph to determine the control flow based type. + const isParameter = getRootDeclaration(declaration).kind === SyntaxKind.Parameter; + const declarationContainer = getControlFlowContainer(declaration); + let flowContainer = getControlFlowContainer(node); + // When the control flow originates in a function expression or arrow function and we are referencing + // a const variable or parameter from an outer function, we extend the origin of the control flow + // analysis to include the immediately enclosing function. + while (flowContainer !== declarationContainer && + (flowContainer.kind === SyntaxKind.FunctionExpression || flowContainer.kind === SyntaxKind.ArrowFunction) && + (isReadonlySymbol(localOrExportSymbol) || isParameter && !isParameterAssigned(localOrExportSymbol))) { + flowContainer = getControlFlowContainer(flowContainer); + } + // We only look for uninitialized variables in strict null checking mode, and only when we can analyze + // the entire control flow graph from the variable's declaration (i.e. when the flow container and + // declaration container are the same). + const assumeInitialized = !strictNullChecks || (type.flags & TypeFlags.Any) !== 0 || isParameter || + flowContainer !== declarationContainer || isInAmbientContext(declaration); + const flowType = getFlowTypeOfReference(node, type, assumeInitialized, flowContainer); + // A variable is considered uninitialized when it is possible to analyze the entire control flow graph + // from declaration to use, and when the variable's declared type doesn't include undefined but the + // control flow based type does include undefined. if (!assumeInitialized && !(getFalsyFlags(type) & TypeFlags.Undefined) && getFalsyFlags(flowType) & TypeFlags.Undefined) { error(node, Diagnostics.Variable_0_is_used_before_being_assigned, symbolToString(symbol)); // Return the declared type to reduce follow-on errors @@ -8910,7 +9113,7 @@ namespace ts { if (isClassLike(container.parent)) { const symbol = getSymbolOfNode(container.parent); const type = container.flags & NodeFlags.Static ? getTypeOfSymbol(symbol) : (getDeclaredTypeOfSymbol(symbol)).thisType; - return getFlowTypeOfReference(node, type, /*assumeInitialized*/ true, /*includeOuterFunctions*/ true); + return getFlowTypeOfReference(node, type, /*assumeInitialized*/ true, /*flowContainer*/ undefined); } if (isInJavaScriptFile(node)) { @@ -9216,7 +9419,7 @@ namespace ts { } } if (isBindingPattern(declaration.name)) { - return getTypeFromBindingPattern(declaration.name, /*includePatternInType*/ true); + return getTypeFromBindingPattern(declaration.name, /*includePatternInType*/ true, /*reportErrors*/ false); } if (isBindingPattern(declaration.parent)) { const parentDeclaration = declaration.parent.parent; @@ -9321,6 +9524,11 @@ namespace ts { const binaryExpression = node.parent; const operator = binaryExpression.operatorToken.kind; if (operator >= SyntaxKind.FirstAssignment && operator <= SyntaxKind.LastAssignment) { + // Don't do this for special property assignments to avoid circularity + if (getSpecialPropertyAssignmentKind(binaryExpression) !== SpecialPropertyAssignmentKind.None) { + return undefined; + } + // In an assignment expression, the right operand is contextually typed by the type of the left operand. if (node === binaryExpression.right) { return checkExpression(binaryExpression.left); @@ -9812,7 +10020,7 @@ namespace ts { // Grammar checking checkGrammarObjectLiteralExpression(node, inDestructuringPattern); - const propertiesTable: SymbolTable = {}; + const propertiesTable = createMap(); const propertiesArray: Symbol[] = []; const contextualType = getApparentTypeOfContextualType(node); const contextualTypeHasPattern = contextualType && contextualType.pattern && @@ -9903,7 +10111,7 @@ namespace ts { // type with those properties for which the binding pattern specifies a default value. if (contextualTypeHasPattern) { for (const prop of getPropertiesOfType(contextualType)) { - if (!hasProperty(propertiesTable, prop.name)) { + if (!propertiesTable[prop.name]) { if (!(prop.flags & SymbolFlags.Optional)) { error(prop.valueDeclaration || (prop).bindingElement, Diagnostics.Initializer_provides_no_value_for_this_binding_element_and_the_binding_element_has_no_default_value); @@ -9993,10 +10201,9 @@ namespace ts { const correspondingPropSymbol = getPropertyOfType(elementAttributesType, node.name.text); correspondingPropType = correspondingPropSymbol && getTypeOfSymbol(correspondingPropSymbol); if (isUnhyphenatedJsxName(node.name.text)) { - // Maybe there's a string indexer? - const indexerType = getIndexTypeOfType(elementAttributesType, IndexKind.String); - if (indexerType) { - correspondingPropType = indexerType; + const attributeType = getTypeOfPropertyOfType(elementAttributesType, getTextOfPropertyName(node.name)) || getIndexTypeOfType(elementAttributesType, IndexKind.String); + if (attributeType) { + correspondingPropType = attributeType; } else { // If there's no corresponding property with this name, error @@ -10353,7 +10560,7 @@ namespace ts { const targetAttributesType = getJsxElementAttributesType(node); - const nameTable: Map = {}; + const nameTable = createMap(); // Process this array in right-to-left order so we know which // attributes (mostly from spreads) are being overwritten and // thus should have their types ignored @@ -10377,7 +10584,7 @@ namespace ts { const targetProperties = getPropertiesOfType(targetAttributesType); for (let i = 0; i < targetProperties.length; i++) { if (!(targetProperties[i].flags & SymbolFlags.Optional) && - nameTable[targetProperties[i].name] === undefined) { + !nameTable[targetProperties[i].name]) { error(node, Diagnostics.Property_0_is_missing_in_type_1, targetProperties[i].name, typeToString(targetAttributesType)); } @@ -10571,7 +10778,7 @@ namespace ts { !(prop.flags & SymbolFlags.Method && propType.flags & TypeFlags.Union)) { return propType; } - return getFlowTypeOfReference(node, propType, /*assumeInitialized*/ true, /*includeOuterFunctions*/ false); + return getFlowTypeOfReference(node, propType, /*assumeInitialized*/ true, /*flowContainer*/ undefined); } function isValidPropertyAccess(node: PropertyAccessExpression | QualifiedName, propertyName: string): boolean { @@ -10696,10 +10903,11 @@ namespace ts { } // Check for compatible indexer types. - if (isTypeAnyOrAllConstituentTypesHaveKind(indexType, TypeFlags.StringLike | TypeFlags.NumberLike | TypeFlags.ESSymbol)) { + const allowedNullableFlags = strictNullChecks ? 0 : TypeFlags.Nullable; + if (isTypeAnyOrAllConstituentTypesHaveKind(indexType, TypeFlags.StringLike | TypeFlags.NumberLike | TypeFlags.ESSymbol | allowedNullableFlags)) { // Try to use a number indexer. - if (isTypeAnyOrAllConstituentTypesHaveKind(indexType, TypeFlags.NumberLike) || isForInVariableForNumericPropertyNames(node.argumentExpression)) { + if (isTypeAnyOrAllConstituentTypesHaveKind(indexType, TypeFlags.NumberLike | allowedNullableFlags) || isForInVariableForNumericPropertyNames(node.argumentExpression)) { const numberIndexInfo = getIndexInfoOfType(objectType, IndexKind.Number); if (numberIndexInfo) { getNodeLinks(node).resolvedIndexInfo = numberIndexInfo; @@ -12826,6 +13034,9 @@ namespace ts { return checkDestructuringAssignment(element, type, contextualMapper); } else { + // We still need to check element expression here because we may need to set appropriate flag on the expression + // such as NodeCheckFlags.LexicalThis on "this"expression. + checkExpression(element); if (isTupleType(sourceType)) { error(element, Diagnostics.Tuple_type_0_with_length_1_cannot_be_assigned_to_tuple_with_length_2, typeToString(sourceType), (sourceType).elementTypes.length, elements.length); } @@ -12896,6 +13107,14 @@ namespace ts { return (target.flags & TypeFlags.Nullable) !== 0 || isTypeComparableTo(source, target); } + function getBestChoiceType(type1: Type, type2: Type): Type { + const firstAssignableToSecond = isTypeAssignableTo(type1, type2); + const secondAssignableToFirst = isTypeAssignableTo(type2, type1); + return secondAssignableToFirst && !firstAssignableToSecond ? type1 : + firstAssignableToSecond && !secondAssignableToFirst ? type2 : + getUnionType([type1, type2], /*subtypeReduction*/ true); + } + function checkBinaryExpression(node: BinaryExpression, contextualMapper?: TypeMapper) { return checkBinaryLikeExpression(node.left, node.operatorToken, node.right, contextualMapper, node); } @@ -13039,7 +13258,7 @@ namespace ts { leftType; case SyntaxKind.BarBarToken: return getTypeFacts(leftType) & TypeFacts.Falsy ? - getUnionType([removeDefinitelyFalsyTypes(leftType), rightType], /*subtypeReduction*/ true) : + getBestChoiceType(removeDefinitelyFalsyTypes(leftType), rightType) : leftType; case SyntaxKind.EqualsToken: checkAssignmentOperator(rightType); @@ -13166,7 +13385,7 @@ namespace ts { checkExpression(node.condition); const type1 = checkExpression(node.whenTrue, contextualMapper); const type2 = checkExpression(node.whenFalse, contextualMapper); - return getUnionType([type1, type2], /*subtypeReduction*/ true); + return getBestChoiceType(type1, type2); } function typeContainsLiteralFromEnum(type: Type, enumType: EnumType) { @@ -13648,15 +13867,19 @@ namespace ts { } function checkClassForDuplicateDeclarations(node: ClassLikeDeclaration) { - const getter = 1, setter = 2, property = getter | setter; + const enum Accessor { + Getter = 1, + Setter = 2, + Property = Getter | Setter + } - const instanceNames: Map = {}; - const staticNames: Map = {}; + const instanceNames = createMap(); + const staticNames = createMap(); for (const member of node.members) { if (member.kind === SyntaxKind.Constructor) { for (const param of (member as ConstructorDeclaration).parameters) { if (isParameterPropertyDeclaration(param)) { - addName(instanceNames, param.name, (param.name as Identifier).text, property); + addName(instanceNames, param.name, (param.name as Identifier).text, Accessor.Property); } } } @@ -13668,24 +13891,24 @@ namespace ts { if (memberName) { switch (member.kind) { case SyntaxKind.GetAccessor: - addName(names, member.name, memberName, getter); + addName(names, member.name, memberName, Accessor.Getter); break; case SyntaxKind.SetAccessor: - addName(names, member.name, memberName, setter); + addName(names, member.name, memberName, Accessor.Setter); break; case SyntaxKind.PropertyDeclaration: - addName(names, member.name, memberName, property); + addName(names, member.name, memberName, Accessor.Property); break; } } } } - function addName(names: Map, location: Node, name: string, meaning: number) { - if (hasProperty(names, name)) { - const prev = names[name]; + function addName(names: Map, location: Node, name: string, meaning: Accessor) { + const prev = names[name]; + if (prev) { if (prev & meaning) { error(location, Diagnostics.Duplicate_identifier_0, getTextOfNode(location)); } @@ -13700,7 +13923,7 @@ namespace ts { } function checkObjectTypeForDuplicateDeclarations(node: TypeLiteralNode | InterfaceDeclaration) { - const names: Map = {}; + const names = createMap(); for (const member of node.members) { if (member.kind == SyntaxKind.PropertySignature) { let memberName: string; @@ -13714,7 +13937,7 @@ namespace ts { continue; } - if (hasProperty(names, memberName)) { + if (names[memberName]) { error(member.symbol.valueDeclaration.name, Diagnostics.Duplicate_identifier_0, memberName); error(member.name, Diagnostics.Duplicate_identifier_0, memberName); } @@ -13903,12 +14126,7 @@ namespace ts { checkSignatureDeclaration(node); if (node.kind === SyntaxKind.GetAccessor) { if (!isInAmbientContext(node) && nodeIsPresent(node.body) && (node.flags & NodeFlags.HasImplicitReturn)) { - if (node.flags & NodeFlags.HasExplicitReturn) { - if (compilerOptions.noImplicitReturns) { - error(node.name, Diagnostics.Not_all_code_paths_return_a_value); - } - } - else { + if (!(node.flags & NodeFlags.HasExplicitReturn)) { error(node.name, Diagnostics.A_get_accessor_must_return_a_value); } } @@ -13938,7 +14156,10 @@ namespace ts { checkAccessorDeclarationTypesIdentical(node, otherAccessor, getThisTypeOfDeclaration, Diagnostics.get_and_set_accessor_must_have_the_same_this_type); } } - getTypeOfAccessors(getSymbolOfNode(node)); + const returnType = getTypeOfAccessors(getSymbolOfNode(node)); + if (node.kind === SyntaxKind.GetAccessor) { + checkAllCodePathsInNonVoidFunctionReturnOrThrow(node, returnType); + } } if (node.parent.kind !== SyntaxKind.ObjectLiteralExpression) { checkSourceElement(node.body); @@ -14926,22 +15147,20 @@ namespace ts { function checkUnusedLocalsAndParameters(node: Node): void { if (node.parent.kind !== SyntaxKind.InterfaceDeclaration && noUnusedIdentifiers && !isInAmbientContext(node)) { for (const key in node.locals) { - if (hasProperty(node.locals, key)) { - const local = node.locals[key]; - if (!local.isReferenced) { - if (local.valueDeclaration && local.valueDeclaration.kind === SyntaxKind.Parameter) { - const parameter = local.valueDeclaration; - if (compilerOptions.noUnusedParameters && - !isParameterPropertyDeclaration(parameter) && - !parameterIsThisKeyword(parameter) && - !parameterNameStartsWithUnderscore(parameter)) { - error(local.valueDeclaration.name, Diagnostics._0_is_declared_but_never_used, local.name); - } - } - else if (compilerOptions.noUnusedLocals) { - forEach(local.declarations, d => error(d.name || d, Diagnostics._0_is_declared_but_never_used, local.name)); + const local = node.locals[key]; + if (!local.isReferenced) { + if (local.valueDeclaration && local.valueDeclaration.kind === SyntaxKind.Parameter) { + const parameter = local.valueDeclaration; + if (compilerOptions.noUnusedParameters && + !isParameterPropertyDeclaration(parameter) && + !parameterIsThisKeyword(parameter) && + !parameterNameStartsWithUnderscore(parameter)) { + error(local.valueDeclaration.name, Diagnostics._0_is_declared_but_never_used, local.name); } } + else if (compilerOptions.noUnusedLocals) { + forEach(local.declarations, d => error(d.name || d, Diagnostics._0_is_declared_but_never_used, local.name)); + } } } } @@ -14967,7 +15186,7 @@ namespace ts { else if (member.kind === SyntaxKind.Constructor) { for (const parameter of (member).parameters) { if (!parameter.symbol.isReferenced && parameter.flags & NodeFlags.Private) { - error(parameter.name, Diagnostics._0_is_declared_but_never_used, parameter.symbol.name); + error(parameter.name, Diagnostics.Property_0_is_declared_but_never_used, parameter.symbol.name); } } } @@ -14998,13 +15217,11 @@ namespace ts { function checkUnusedModuleMembers(node: ModuleDeclaration | SourceFile): void { if (compilerOptions.noUnusedLocals && !isInAmbientContext(node)) { for (const key in node.locals) { - if (hasProperty(node.locals, key)) { - const local = node.locals[key]; - if (!local.isReferenced && !local.exportSymbol) { - for (const declaration of local.declarations) { - if (!isAmbientModule(declaration)) { - error(declaration.name, Diagnostics._0_is_declared_but_never_used, local.name); - } + const local = node.locals[key]; + if (!local.isReferenced && !local.exportSymbol) { + for (const declaration of local.declarations) { + if (!isAmbientModule(declaration)) { + error(declaration.name, Diagnostics._0_is_declared_but_never_used, local.name); } } } @@ -16013,7 +16230,7 @@ namespace ts { else { const identifierName = (catchClause.variableDeclaration.name).text; const locals = catchClause.block.locals; - if (locals && hasProperty(locals, identifierName)) { + if (locals) { const localSymbol = locals[identifierName]; if (localSymbol && (localSymbol.flags & SymbolFlags.BlockScopedVariable) !== 0) { grammarErrorOnNode(localSymbol.valueDeclaration, Diagnostics.Cannot_redeclare_identifier_0_in_catch_clause, identifierName); @@ -16227,6 +16444,12 @@ namespace ts { checkTypeAssignableTo(staticType, getTypeWithoutSignatures(staticBaseType), node.name || node, Diagnostics.Class_static_side_0_incorrectly_extends_base_class_static_side_1); + if (baseType.symbol.valueDeclaration && !isInAmbientContext(baseType.symbol.valueDeclaration)) { + if (!isBlockScopedNameDeclaredBeforeUse(baseType.symbol.valueDeclaration, node)) { + error(baseTypeNode, Diagnostics.A_class_must_be_declared_after_its_base_class); + } + } + if (!(staticBaseType.symbol && staticBaseType.symbol.flags & SymbolFlags.Class)) { // When the static base type is a "class-like" constructor function (but not actually a class), we verify // that all instantiated base constructor signatures return the same type. We can simply compare the type @@ -16244,7 +16467,7 @@ namespace ts { const implementedTypeNodes = getClassImplementsHeritageClauseElements(node); if (implementedTypeNodes) { for (const typeRefNode of implementedTypeNodes) { - if (!isSupportedExpressionWithTypeArguments(typeRefNode)) { + if (!isEntityNameExpression(typeRefNode.expression)) { error(typeRefNode.expression, Diagnostics.A_class_can_only_implement_an_identifier_Slashqualified_name_with_optional_type_arguments); } checkTypeReferenceNode(typeRefNode); @@ -16428,18 +16651,18 @@ namespace ts { return true; } - const seen: Map<{ prop: Symbol; containingType: Type }> = {}; + const seen = createMap<{ prop: Symbol; containingType: Type }>(); forEach(resolveDeclaredMembers(type).declaredProperties, p => { seen[p.name] = { prop: p, containingType: type }; }); let ok = true; for (const base of baseTypes) { const properties = getPropertiesOfObjectType(getTypeWithThisArgument(base, type.thisType)); for (const prop of properties) { - if (!hasProperty(seen, prop.name)) { + const existing = seen[prop.name]; + if (!existing) { seen[prop.name] = { prop: prop, containingType: base }; } else { - const existing = seen[prop.name]; const isInheritedProperty = existing.containingType !== type; if (isInheritedProperty && !isPropertyIdenticalTo(existing.prop, prop)) { ok = false; @@ -16486,7 +16709,7 @@ namespace ts { checkObjectTypeForDuplicateDeclarations(node); } forEach(getInterfaceBaseTypeNodes(node), heritageElement => { - if (!isSupportedExpressionWithTypeArguments(heritageElement)) { + if (!isEntityNameExpression(heritageElement.expression)) { error(heritageElement.expression, Diagnostics.An_interface_can_only_extend_an_identifier_Slashqualified_name_with_optional_type_arguments); } checkTypeReferenceNode(heritageElement); @@ -16886,11 +17109,6 @@ namespace ts { } } - if (compilerOptions.noImplicitAny && !node.body) { - // Ambient shorthand module is an implicit any - reportImplicitAnyError(node, anyType); - } - if (node.body) { checkSourceElement(node.body); if (!isGlobalScopeAugmentation(node)) { @@ -16951,20 +17169,21 @@ namespace ts { } } - function getFirstIdentifier(node: EntityName | Expression): Identifier { - while (true) { - if (node.kind === SyntaxKind.QualifiedName) { - node = (node).left; - } - else if (node.kind === SyntaxKind.PropertyAccessExpression) { - node = (node).expression; - } - else { - break; - } + function getFirstIdentifier(node: EntityNameOrEntityNameExpression): Identifier { + switch (node.kind) { + case SyntaxKind.Identifier: + return node; + case SyntaxKind.QualifiedName: + do { + node = (node).left; + } while (node.kind !== SyntaxKind.Identifier); + return node; + case SyntaxKind.PropertyAccessExpression: + do { + node = (node).expression; + } while (node.kind !== SyntaxKind.Identifier); + return node; } - Debug.assert(node.kind === SyntaxKind.Identifier); - return node; } function checkExternalImportOrExportDeclaration(node: ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration): boolean { @@ -17393,11 +17612,10 @@ namespace ts { } function checkSourceFile(node: SourceFile) { - const start = performance.mark(); - + performance.mark("beforeCheck"); checkSourceFileWorker(node); - - performance.measure("Check", start); + performance.mark("afterCheck"); + performance.measure("Check", "beforeCheck", "afterCheck"); } // Fully type check a source file and collect the relevant diagnostics. @@ -17497,7 +17715,7 @@ namespace ts { } function getSymbolsInScope(location: Node, meaning: SymbolFlags): Symbol[] { - const symbols: SymbolTable = {}; + const symbols = createMap(); let memberFlags: NodeFlags = 0; if (isInsideWithStatementBody(location)) { @@ -17575,7 +17793,7 @@ namespace ts { // We will copy all symbol regardless of its reserved name because // symbolsToArray will check whether the key is a reserved name and // it will not copy symbol with reserved name to the array - if (!hasProperty(symbols, id)) { + if (!symbols[id]) { symbols[id] = symbol; } } @@ -17663,7 +17881,7 @@ namespace ts { return getLeftSideOfImportEqualsOrExportAssignment(node) !== undefined; } - function getSymbolOfEntityNameOrPropertyAccessExpression(entityName: EntityName | PropertyAccessExpression): Symbol { + function getSymbolOfEntityNameOrPropertyAccessExpression(entityName: EntityName | PropertyAccessExpression): Symbol | undefined { if (isDeclarationName(entityName)) { return getSymbolOfNode(entityName.parent); } @@ -17682,22 +17900,20 @@ namespace ts { } } - if (entityName.parent.kind === SyntaxKind.ExportAssignment) { - return resolveEntityName(entityName, + if (entityName.parent.kind === SyntaxKind.ExportAssignment && isEntityNameExpression(entityName)) { + return resolveEntityName(entityName, /*all meanings*/ SymbolFlags.Value | SymbolFlags.Type | SymbolFlags.Namespace | SymbolFlags.Alias); } - if (entityName.kind !== SyntaxKind.PropertyAccessExpression) { - if (isInRightSideOfImportOrExportAssignment(entityName)) { - // Since we already checked for ExportAssignment, this really could only be an Import - const importEqualsDeclaration = getAncestor(entityName, SyntaxKind.ImportEqualsDeclaration); - Debug.assert(importEqualsDeclaration !== undefined); - return getSymbolOfPartOfRightHandSideOfImportEquals(entityName, importEqualsDeclaration, /*dontResolveAlias*/ true); - } + if (entityName.kind !== SyntaxKind.PropertyAccessExpression && isInRightSideOfImportOrExportAssignment(entityName)) { + // Since we already checked for ExportAssignment, this really could only be an Import + const importEqualsDeclaration = getAncestor(entityName, SyntaxKind.ImportEqualsDeclaration); + Debug.assert(importEqualsDeclaration !== undefined); + return getSymbolOfPartOfRightHandSideOfImportEquals(entityName, importEqualsDeclaration, /*dontResolveAlias*/ true); } if (isRightSideOfQualifiedNameOrPropertyAccess(entityName)) { - entityName = entityName.parent; + entityName = entityName.parent; } if (isHeritageClauseElementIdentifier(entityName)) { @@ -17996,7 +18212,7 @@ namespace ts { const propsByName = createSymbolTable(getPropertiesOfType(type)); if (getSignaturesOfType(type, SignatureKind.Call).length || getSignaturesOfType(type, SignatureKind.Construct).length) { forEach(getPropertiesOfType(globalFunctionType), p => { - if (!hasProperty(propsByName, p.name)) { + if (!propsByName[p.name]) { propsByName[p.name] = p; } }); @@ -18053,7 +18269,7 @@ namespace ts { // otherwise - check if at least one export is value symbolLinks.exportsSomeValue = hasExportAssignment ? !!(moduleSymbol.flags & SymbolFlags.Value) - : forEachValue(getExportsOfModule(moduleSymbol), isValue); + : forEachProperty(getExportsOfModule(moduleSymbol), isValue); } return symbolLinks.exportsSomeValue; @@ -18344,7 +18560,7 @@ namespace ts { } function hasGlobalName(name: string): boolean { - return hasProperty(globals, name); + return !!globals[name]; } function getReferencedValueSymbol(reference: Identifier): Symbol { @@ -18368,9 +18584,6 @@ namespace ts { // populate reverse mapping: file path -> type reference directive that was resolved to this file fileToDirective = createFileMap(); for (const key in resolvedTypeReferenceDirectives) { - if (!hasProperty(resolvedTypeReferenceDirectives, key)) { - continue; - } const resolvedDirective = resolvedTypeReferenceDirectives[key]; if (!resolvedDirective) { continue; @@ -18410,7 +18623,7 @@ namespace ts { }; // defined here to avoid outer scope pollution - function getTypeReferenceDirectivesForEntityName(node: EntityName | PropertyAccessExpression): string[] { + function getTypeReferenceDirectivesForEntityName(node: EntityNameOrEntityNameExpression): string[] { // program does not have any files with type reference directives - bail out if (!fileToDirective) { return undefined; @@ -18625,50 +18838,9 @@ namespace ts { } function checkGrammarModifiers(node: Node): boolean { - switch (node.kind) { - case SyntaxKind.GetAccessor: - case SyntaxKind.SetAccessor: - case SyntaxKind.Constructor: - case SyntaxKind.PropertyDeclaration: - case SyntaxKind.PropertySignature: - case SyntaxKind.MethodDeclaration: - case SyntaxKind.MethodSignature: - case SyntaxKind.IndexSignature: - case SyntaxKind.ModuleDeclaration: - case SyntaxKind.ImportDeclaration: - case SyntaxKind.ImportEqualsDeclaration: - case SyntaxKind.ExportDeclaration: - case SyntaxKind.ExportAssignment: - case SyntaxKind.FunctionExpression: - case SyntaxKind.ArrowFunction: - case SyntaxKind.Parameter: - break; - case SyntaxKind.FunctionDeclaration: - if (node.modifiers && (node.modifiers.length > 1 || node.modifiers[0].kind !== SyntaxKind.AsyncKeyword) && - node.parent.kind !== SyntaxKind.ModuleBlock && node.parent.kind !== SyntaxKind.SourceFile) { - return grammarErrorOnFirstToken(node, Diagnostics.Modifiers_cannot_appear_here); - } - break; - case SyntaxKind.ClassDeclaration: - case SyntaxKind.InterfaceDeclaration: - case SyntaxKind.VariableStatement: - case SyntaxKind.TypeAliasDeclaration: - if (node.modifiers && node.parent.kind !== SyntaxKind.ModuleBlock && node.parent.kind !== SyntaxKind.SourceFile) { - return grammarErrorOnFirstToken(node, Diagnostics.Modifiers_cannot_appear_here); - } - break; - case SyntaxKind.EnumDeclaration: - if (node.modifiers && (node.modifiers.length > 1 || node.modifiers[0].kind !== SyntaxKind.ConstKeyword) && - node.parent.kind !== SyntaxKind.ModuleBlock && node.parent.kind !== SyntaxKind.SourceFile) { - return grammarErrorOnFirstToken(node, Diagnostics.Modifiers_cannot_appear_here); - } - break; - default: - return false; - } - - if (!node.modifiers) { - return; + const quickResult = reportObviousModifierErrors(node); + if (quickResult !== undefined) { + return quickResult; } let lastStatic: Node, lastPrivate: Node, lastProtected: Node, lastDeclare: Node, lastAsync: Node, lastReadonly: Node; @@ -18873,6 +19045,61 @@ namespace ts { } } + /** + * true | false: Early return this value from checkGrammarModifiers. + * undefined: Need to do full checking on the modifiers. + */ + function reportObviousModifierErrors(node: Node): boolean | undefined { + return !node.modifiers + ? false + : shouldReportBadModifier(node) + ? grammarErrorOnFirstToken(node, Diagnostics.Modifiers_cannot_appear_here) + : undefined; + } + function shouldReportBadModifier(node: Node): boolean { + switch (node.kind) { + case SyntaxKind.GetAccessor: + case SyntaxKind.SetAccessor: + case SyntaxKind.Constructor: + case SyntaxKind.PropertyDeclaration: + case SyntaxKind.PropertySignature: + case SyntaxKind.MethodDeclaration: + case SyntaxKind.MethodSignature: + case SyntaxKind.IndexSignature: + case SyntaxKind.ModuleDeclaration: + case SyntaxKind.ImportDeclaration: + case SyntaxKind.ImportEqualsDeclaration: + case SyntaxKind.ExportDeclaration: + case SyntaxKind.ExportAssignment: + case SyntaxKind.FunctionExpression: + case SyntaxKind.ArrowFunction: + case SyntaxKind.Parameter: + return false; + default: + if (node.parent.kind === SyntaxKind.ModuleBlock || node.parent.kind === SyntaxKind.SourceFile) { + return false; + } + switch (node.kind) { + case SyntaxKind.FunctionDeclaration: + return nodeHasAnyModifiersExcept(node, SyntaxKind.AsyncKeyword); + case SyntaxKind.ClassDeclaration: + return nodeHasAnyModifiersExcept(node, SyntaxKind.AbstractKeyword); + case SyntaxKind.InterfaceDeclaration: + case SyntaxKind.VariableStatement: + case SyntaxKind.TypeAliasDeclaration: + return true; + case SyntaxKind.EnumDeclaration: + return nodeHasAnyModifiersExcept(node, SyntaxKind.ConstKeyword); + default: + Debug.fail(); + return false; + } + } + } + function nodeHasAnyModifiersExcept(node: Node, allowedModifier: SyntaxKind): boolean { + return node.modifiers.length > 1 || node.modifiers[0].kind !== allowedModifier; + } + function checkGrammarAsyncModifier(node: Node, asyncModifier: Node): boolean { if (languageVersion < ScriptTarget.ES6) { return grammarErrorOnNode(asyncModifier, Diagnostics.Async_functions_are_only_available_when_targeting_ECMAScript_2015_or_higher); @@ -19145,7 +19372,7 @@ namespace ts { } function checkGrammarObjectLiteralExpression(node: ObjectLiteralExpression, inDestructuring: boolean) { - const seen: Map = {}; + const seen = createMap(); const Property = 1; const GetAccessor = 2; const SetAccessor = 4; @@ -19207,7 +19434,7 @@ namespace ts { continue; } - if (!hasProperty(seen, effectiveName)) { + if (!seen[effectiveName]) { seen[effectiveName] = currentKind; } else { @@ -19231,7 +19458,7 @@ namespace ts { } function checkGrammarJsxElement(node: JsxOpeningLikeElement) { - const seen: Map = {}; + const seen = createMap(); for (const attr of node.attributes) { if (attr.kind === SyntaxKind.JsxSpreadAttribute) { continue; @@ -19239,7 +19466,7 @@ namespace ts { const jsxAttr = (attr); const name = jsxAttr.name; - if (!hasProperty(seen, name.text)) { + if (!seen[name.text]) { seen[name.text] = true; } else { @@ -19634,7 +19861,7 @@ namespace ts { } function checkGrammarTopLevelElementForRequiredDeclareModifier(node: Node): boolean { - // A declare modifier is required for any top level .d.ts declaration except export=, export default, + // A declare modifier is required for any top level .d.ts declaration except export=, export default, export as namespace // interfaces and imports categories: // // DeclarationElement: @@ -19652,6 +19879,7 @@ namespace ts { node.kind === SyntaxKind.ImportEqualsDeclaration || node.kind === SyntaxKind.ExportDeclaration || node.kind === SyntaxKind.ExportAssignment || + node.kind === SyntaxKind.NamespaceExportDeclaration || (node.flags & NodeFlags.Ambient) || (node.flags & (NodeFlags.Export | NodeFlags.Default))) { diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 90770ce1bffe0..fb78b518e2d26 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -64,10 +64,10 @@ namespace ts { }, { name: "jsx", - type: { + type: createMap({ "preserve": JsxEmit.Preserve, "react": JsxEmit.React - }, + }), paramType: Diagnostics.KIND, description: Diagnostics.Specify_JSX_code_generation_Colon_preserve_or_react, }, @@ -94,7 +94,7 @@ namespace ts { { name: "module", shortName: "m", - type: { + type: createMap({ "none": ModuleKind.None, "commonjs": ModuleKind.CommonJS, "amd": ModuleKind.AMD, @@ -102,16 +102,16 @@ namespace ts { "umd": ModuleKind.UMD, "es6": ModuleKind.ES6, "es2015": ModuleKind.ES2015, - }, + }), description: Diagnostics.Specify_module_code_generation_Colon_commonjs_amd_system_umd_or_es2015, paramType: Diagnostics.KIND, }, { name: "newLine", - type: { + type: createMap({ "crlf": NewLineKind.CarriageReturnLineFeed, "lf": NewLineKind.LineFeed - }, + }), description: Diagnostics.Specify_the_end_of_line_sequence_to_be_used_when_emitting_files_Colon_CRLF_dos_or_LF_unix, paramType: Diagnostics.NEWLINE, }, @@ -129,6 +129,10 @@ namespace ts { type: "boolean", description: Diagnostics.Do_not_emit_outputs_if_any_errors_were_reported, }, + { + name: "noErrorTruncation", + type: "boolean" + }, { name: "noImplicitAny", type: "boolean", @@ -253,12 +257,12 @@ namespace ts { { name: "target", shortName: "t", - type: { + type: createMap({ "es3": ScriptTarget.ES3, "es5": ScriptTarget.ES5, "es6": ScriptTarget.ES6, "es2015": ScriptTarget.ES2015, - }, + }), description: Diagnostics.Specify_ECMAScript_target_version_Colon_ES3_default_ES5_or_ES2015, paramType: Diagnostics.VERSION, }, @@ -287,10 +291,10 @@ namespace ts { }, { name: "moduleResolution", - type: { + type: createMap({ "node": ModuleResolutionKind.NodeJs, "classic": ModuleResolutionKind.Classic, - }, + }), description: Diagnostics.Specify_module_resolution_strategy_Colon_node_Node_js_or_classic_TypeScript_pre_1_6, }, { @@ -395,7 +399,7 @@ namespace ts { type: "list", element: { name: "lib", - type: { + type: createMap({ // JavaScript only "es5": "lib.es5.d.ts", "es6": "lib.es2015.d.ts", @@ -420,7 +424,7 @@ namespace ts { "es2016.array.include": "lib.es2016.array.include.d.ts", "es2017.object": "lib.es2017.object.d.ts", "es2017.sharedmemory": "lib.es2017.sharedmemory.d.ts" - }, + }), }, description: Diagnostics.Specify_library_files_to_be_included_in_the_compilation_Colon }, @@ -465,6 +469,14 @@ namespace ts { shortOptionNames: Map; } + /* @internal */ + export const defaultInitCompilerOptions: CompilerOptions = { + module: ModuleKind.CommonJS, + target: ScriptTarget.ES5, + noImplicitAny: false, + sourceMap: false, + }; + let optionNameMapCache: OptionNameMap; /* @internal */ @@ -473,8 +485,8 @@ namespace ts { return optionNameMapCache; } - const optionNameMap: Map = {}; - const shortOptionNames: Map = {}; + const optionNameMap = createMap(); + const shortOptionNames = createMap(); forEach(optionDeclarations, option => { optionNameMap[option.name.toLowerCase()] = option; if (option.shortName) { @@ -489,10 +501,9 @@ namespace ts { /* @internal */ export function createCompilerDiagnosticForInvalidCustomType(opt: CommandLineOptionOfCustomType): Diagnostic { const namesOfType: string[] = []; - forEachKey(opt.type, key => { + for (const key in opt.type) { namesOfType.push(` '${key}'`); - }); - + } return createCompilerDiagnostic(Diagnostics.Argument_for_0_option_must_be_Colon_1, `--${opt.name}`, namesOfType); } @@ -500,7 +511,7 @@ namespace ts { export function parseCustomTypeOption(opt: CommandLineOptionOfCustomType, value: string, errors: Diagnostic[]) { const key = trimString((value || "")).toLowerCase(); const map = opt.type; - if (hasProperty(map, key)) { + if (key in map) { return map[key]; } else { @@ -554,11 +565,11 @@ namespace ts { s = s.slice(s.charCodeAt(1) === CharacterCodes.minus ? 2 : 1).toLowerCase(); // Try to translate short option names to their full equivalents. - if (hasProperty(shortOptionNames, s)) { + if (s in shortOptionNames) { s = shortOptionNames[s]; } - if (hasProperty(optionNameMap, s)) { + if (s in optionNameMap) { const opt = optionNameMap[s]; if (opt.isTSConfigOnly) { @@ -671,6 +682,94 @@ namespace ts { } } + /** + * Generate tsconfig configuration when running command line "--init" + * @param options commandlineOptions to be generated into tsconfig.json + * @param fileNames array of filenames to be generated into tsconfig.json + */ + /* @internal */ + export function generateTSConfig(options: CompilerOptions, fileNames: string[]): { compilerOptions: Map } { + const compilerOptions = extend(options, defaultInitCompilerOptions); + const configurations: any = { + compilerOptions: serializeCompilerOptions(compilerOptions) + }; + if (fileNames && fileNames.length) { + // only set the files property if we have at least one file + configurations.files = fileNames; + } + + return configurations; + + function getCustomTypeMapOfCommandLineOption(optionDefinition: CommandLineOption): Map | undefined { + if (optionDefinition.type === "string" || optionDefinition.type === "number" || optionDefinition.type === "boolean") { + // this is of a type CommandLineOptionOfPrimitiveType + return undefined; + } + else if (optionDefinition.type === "list") { + return getCustomTypeMapOfCommandLineOption((optionDefinition).element); + } + else { + return (optionDefinition).type; + } + } + + function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: MapLike): string | undefined { + // There is a typeMap associated with this command-line option so use it to map value back to its name + for (const key in customTypeMap) { + if (customTypeMap[key] === value) { + return key; + } + } + return undefined; + } + + function serializeCompilerOptions(options: CompilerOptions): Map { + const result = createMap(); + const optionsNameMap = getOptionNameMap().optionNameMap; + + for (const name in options) { + if (hasProperty(options, name)) { + // tsconfig only options cannot be specified via command line, + // so we can assume that only types that can appear here string | number | boolean + switch (name) { + case "init": + case "watch": + case "version": + case "help": + case "project": + break; + default: + const value = options[name]; + let optionDefinition = optionsNameMap[name.toLowerCase()]; + if (optionDefinition) { + const customTypeMap = getCustomTypeMapOfCommandLineOption(optionDefinition); + if (!customTypeMap) { + // There is no map associated with this compiler option then use the value as-is + // This is the case if the value is expect to be string, number, boolean or list of string + result[name] = value; + } + else { + if (optionDefinition.type === "list") { + const convertedValue: string[] = []; + for (const element of value as (string | number)[]) { + convertedValue.push(getNameOfCompilerOptionValue(element, customTypeMap)); + } + result[name] = convertedValue; + } + else { + // There is a typeMap associated with this command-line option so use it to map value back to its name + result[name] = getNameOfCompilerOptionValue(value, customTypeMap); + } + } + } + break; + } + } + } + return result; + } + } + /** * Remove the comments from a json like text. * Comments can be single line comments (starting with # or //) or multiline comments using / * * / @@ -827,7 +926,7 @@ namespace ts { const optionNameMap = arrayToMap(optionDeclarations, opt => opt.name); for (const id in jsonOptions) { - if (hasProperty(optionNameMap, id)) { + if (id in optionNameMap) { const opt = optionNameMap[id]; defaultOptions[opt.name] = convertJsonOption(opt, jsonOptions[id], basePath, errors); } @@ -864,7 +963,7 @@ namespace ts { function convertJsonOptionOfCustomType(opt: CommandLineOptionOfCustomType, value: string, errors: Diagnostic[]) { const key = value.toLowerCase(); - if (hasProperty(opt.type, key)) { + if (key in opt.type) { return opt.type[key]; } else { @@ -974,12 +1073,12 @@ namespace ts { // Literal file names (provided via the "files" array in tsconfig.json) are stored in a // file map with a possibly case insensitive key. We use this map later when when including // wildcard paths. - const literalFileMap: Map = {}; + const literalFileMap = createMap(); // Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a // file map with a possibly case insensitive key. We use this map to store paths matched // via wildcard, and to handle extension priority. - const wildcardFileMap: Map = {}; + const wildcardFileMap = createMap(); if (include) { include = validateSpecs(include, errors, /*allowTrailingRecursion*/ false); @@ -1027,7 +1126,7 @@ namespace ts { removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper); const key = keyMapper(file); - if (!hasProperty(literalFileMap, key) && !hasProperty(wildcardFileMap, key)) { + if (!(key in literalFileMap) && !(key in wildcardFileMap)) { wildcardFileMap[key] = file; } } @@ -1079,7 +1178,7 @@ namespace ts { // /a/b/a?z - Watch /a/b directly to catch any new file matching a?z const rawExcludeRegex = getRegularExpressionForWildcard(exclude, path, "exclude"); const excludeRegex = rawExcludeRegex && new RegExp(rawExcludeRegex, useCaseSensitiveFileNames ? "" : "i"); - const wildcardDirectories: Map = {}; + const wildcardDirectories = createMap(); if (include !== undefined) { const recursiveKeys: string[] = []; for (const file of include) { @@ -1092,7 +1191,7 @@ namespace ts { if (match) { const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase(); const flags = watchRecursivePattern.test(name) ? WatchDirectoryFlags.Recursive : WatchDirectoryFlags.None; - const existingFlags = getProperty(wildcardDirectories, key); + const existingFlags = wildcardDirectories[key]; if (existingFlags === undefined || existingFlags < flags) { wildcardDirectories[key] = flags; if (flags === WatchDirectoryFlags.Recursive) { @@ -1104,11 +1203,9 @@ namespace ts { // Remove any subpaths under an existing recursively watched directory. for (const key in wildcardDirectories) { - if (hasProperty(wildcardDirectories, key)) { - for (const recursiveKey of recursiveKeys) { - if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { - delete wildcardDirectories[key]; - } + for (const recursiveKey of recursiveKeys) { + if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) { + delete wildcardDirectories[key]; } } } @@ -1131,7 +1228,7 @@ namespace ts { for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) { const higherPriorityExtension = extensions[i]; const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension)); - if (hasProperty(literalFiles, higherPriorityPath) || hasProperty(wildcardFiles, higherPriorityPath)) { + if (higherPriorityPath in literalFiles || higherPriorityPath in wildcardFiles) { return true; } } diff --git a/src/compiler/core.ts b/src/compiler/core.ts index d4b59ca923611..77cc379acc27f 100644 --- a/src/compiler/core.ts +++ b/src/compiler/core.ts @@ -19,8 +19,28 @@ namespace ts { True = -1 } + const createObject = Object.create; + + export function createMap(template?: MapLike): Map { + const map: Map = createObject(null); // tslint:disable-line:no-null-keyword + + // Using 'delete' on an object causes V8 to put the object in dictionary mode. + // This disables creation of hidden classes, which are expensive when an object is + // constantly changing shape. + map["__"] = undefined; + delete map["__"]; + + // Copies keys/values from template. Note that for..in will not throw if + // template is undefined, and instead will just exit the loop. + for (const key in template) if (hasOwnProperty.call(template, key)) { + map[key] = template[key]; + } + + return map; + } + export function createFileMap(keyMapper?: (key: string) => string): FileMap { - let files: Map = {}; + let files = createMap(); return { get, set, @@ -55,7 +75,7 @@ namespace ts { } function contains(path: Path) { - return hasProperty(files, toKey(path)); + return toKey(path) in files; } function remove(path: Path) { @@ -64,7 +84,7 @@ namespace ts { } function clear() { - files = {}; + files = createMap(); } function toKey(path: Path): string { @@ -90,7 +110,7 @@ namespace ts { * returns a truthy value, then returns that value. * If no such value is found, the callback is applied to each element of array and undefined is returned. */ - export function forEach(array: T[], callback: (element: T, index: number) => U): U { + export function forEach(array: T[] | undefined, callback: (element: T, index: number) => U | undefined): U | undefined { if (array) { for (let i = 0, len = array.length; i < len; i++) { const result = callback(array[i], i); @@ -102,6 +122,31 @@ namespace ts { return undefined; } + /** Works like Array.prototype.find, returning `undefined` if no element satisfying the predicate is found. */ + export function find(array: T[], predicate: (element: T, index: number) => boolean): T | undefined { + for (let i = 0, len = array.length; i < len; i++) { + const value = array[i]; + if (predicate(value, i)) { + return value; + } + } + return undefined; + } + + /** + * Returns the first truthy result of `callback`, or else fails. + * This is like `forEach`, but never returns undefined. + */ + export function findMap(array: T[], callback: (element: T, index: number) => U | undefined): U { + for (let i = 0, len = array.length; i < len; i++) { + const result = callback(array[i], i); + if (result) { + return result; + } + } + Debug.fail(); + } + export function contains(array: T[], value: T): boolean { if (array) { for (const v of array) { @@ -145,17 +190,44 @@ namespace ts { return count; } + /** + * Filters an array by a predicate function. Returns the same array instance if the predicate is + * true for all elements, otherwise returns a new array instance containing the filtered subset. + */ export function filter(array: T[], f: (x: T) => boolean): T[] { - let result: T[]; if (array) { - result = []; - for (const item of array) { - if (f(item)) { - result.push(item); + const len = array.length; + let i = 0; + while (i < len && f(array[i])) i++; + if (i < len) { + const result = array.slice(0, i); + i++; + while (i < len) { + const item = array[i]; + if (f(item)) { + result.push(item); + } + i++; } + return result; } } - return result; + return array; + } + + export function removeWhere(array: T[], f: (x: T) => boolean): boolean { + let outIndex = 0; + for (const item of array) { + if (!f(item)) { + array[outIndex] = item; + outIndex++; + } + } + if (outIndex !== array.length) { + array.length = outIndex; + return true; + } + return false; } export function filterMutate(array: T[], f: (x: T) => boolean): void { @@ -328,76 +400,140 @@ namespace ts { const hasOwnProperty = Object.prototype.hasOwnProperty; - export function hasProperty(map: Map, key: string): boolean { + /** + * Indicates whether a map-like contains an own property with the specified key. + * + * NOTE: This is intended for use only with MapLike objects. For Map objects, use + * the 'in' operator. + * + * @param map A map-like. + * @param key A property key. + */ + export function hasProperty(map: MapLike, key: string): boolean { return hasOwnProperty.call(map, key); } - export function getKeys(map: Map): string[] { + /** + * Gets the value of an owned property in a map-like. + * + * NOTE: This is intended for use only with MapLike objects. For Map objects, use + * an indexer. + * + * @param map A map-like. + * @param key A property key. + */ + export function getProperty(map: MapLike, key: string): T | undefined { + return hasOwnProperty.call(map, key) ? map[key] : undefined; + } + + /** + * Gets the owned, enumerable property keys of a map-like. + * + * NOTE: This is intended for use with MapLike objects. For Map objects, use + * Object.keys instead as it offers better performance. + * + * @param map A map-like. + */ + export function getOwnKeys(map: MapLike): string[] { const keys: string[] = []; - for (const key in map) { + for (const key in map) if (hasOwnProperty.call(map, key)) { keys.push(key); } return keys; } - export function getProperty(map: Map, key: string): T { - return hasOwnProperty.call(map, key) ? map[key] : undefined; - } - - export function isEmpty(map: Map) { - for (const id in map) { - if (hasProperty(map, id)) { - return false; - } + /** + * Enumerates the properties of a Map, invoking a callback and returning the first truthy result. + * + * @param map A map for which properties should be enumerated. + * @param callback A callback to invoke for each property. + */ + export function forEachProperty(map: Map, callback: (value: T, key: string) => U): U { + let result: U; + for (const key in map) { + if (result = callback(map[key], key)) break; } - return true; + return result; } - export function clone(object: T): T { - const result: any = {}; - for (const id in object) { - result[id] = (object)[id]; + /** + * Returns true if a Map has some matching property. + * + * @param map A map whose properties should be tested. + * @param predicate An optional callback used to test each property. + */ + export function someProperties(map: Map, predicate?: (value: T, key: string) => boolean) { + for (const key in map) { + if (!predicate || predicate(map[key], key)) return true; } - return result; + return false; } - export function extend, T2 extends Map<{}>>(first: T1 , second: T2): T1 & T2 { - const result: T1 & T2 = {}; - for (const id in first) { - (result as any)[id] = first[id]; - } - for (const id in second) { - if (!hasProperty(result, id)) { - (result as any)[id] = second[id]; - } + /** + * Performs a shallow copy of the properties from a source Map to a target MapLike + * + * @param source A map from which properties should be copied. + * @param target A map to which properties should be copied. + */ + export function copyProperties(source: Map, target: MapLike): void { + for (const key in source) { + target[key] = source[key]; } - return result; } - export function forEachValue(map: Map, callback: (value: T) => U): U { - let result: U; - for (const id in map) { - if (result = callback(map[id])) break; + /** + * Reduce the properties of a map. + * + * NOTE: This is intended for use with Map objects. For MapLike objects, use + * reduceOwnProperties instead as it offers better runtime safety. + * + * @param map The map to reduce + * @param callback An aggregation function that is called for each entry in the map + * @param initial The initial value for the reduction. + */ + export function reduceProperties(map: Map, callback: (aggregate: U, value: T, key: string) => U, initial: U): U { + let result = initial; + for (const key in map) { + result = callback(result, map[key], String(key)); } return result; } - export function forEachKey(map: Map, callback: (key: string) => U): U { - let result: U; - for (const id in map) { - if (result = callback(id)) break; + /** + * Reduce the properties defined on a map-like (but not from its prototype chain). + * + * NOTE: This is intended for use with MapLike objects. For Map objects, use + * reduceProperties instead as it offers better performance. + * + * @param map The map-like to reduce + * @param callback An aggregation function that is called for each entry in the map + * @param initial The initial value for the reduction. + */ + export function reduceOwnProperties(map: MapLike, callback: (aggregate: U, value: T, key: string) => U, initial: U): U { + let result = initial; + for (const key in map) if (hasOwnProperty.call(map, key)) { + result = callback(result, map[key], String(key)); } return result; } - export function lookUp(map: Map, key: string): T { - return hasProperty(map, key) ? map[key] : undefined; - } - - export function copyMap(source: Map, target: Map): void { - for (const p in source) { - target[p] = source[p]; + /** + * Performs a shallow equality comparison of the contents of two map-likes. + * + * @param left A map-like whose properties should be compared. + * @param right A map-like whose properties should be compared. + */ + export function equalOwnProperties(left: MapLike, right: MapLike, equalityComparer?: (left: T, right: T) => boolean) { + if (left === right) return true; + if (!left || !right) return false; + for (const key in left) if (hasOwnProperty.call(left, key)) { + if (!hasOwnProperty.call(right, key) === undefined) return false; + if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false; + } + for (const key in right) if (hasOwnProperty.call(right, key)) { + if (!hasOwnProperty.call(left, key)) return false; } + return true; } /** @@ -410,33 +546,40 @@ namespace ts { * the same key with the given 'makeKey' function, then the element with the higher * index in the array will be the one associated with the produced key. */ - export function arrayToMap(array: T[], makeKey: (value: T) => string): Map { - const result: Map = {}; - - forEach(array, value => { - result[makeKey(value)] = value; - }); - + export function arrayToMap(array: T[], makeKey: (value: T) => string): Map; + export function arrayToMap(array: T[], makeKey: (value: T) => string, makeValue: (value: T) => U): Map; + export function arrayToMap(array: T[], makeKey: (value: T) => string, makeValue?: (value: T) => U): Map { + const result = createMap(); + for (const value of array) { + result[makeKey(value)] = makeValue ? makeValue(value) : value; + } return result; } - /** - * Reduce the properties of a map. - * - * @param map The map to reduce - * @param callback An aggregation function that is called for each entry in the map - * @param initial The initial value for the reduction. - */ - export function reduceProperties(map: Map, callback: (aggregate: U, value: T, key: string) => U, initial: U): U { - let result = initial; - if (map) { - for (const key in map) { - if (hasProperty(map, key)) { - result = callback(result, map[key], String(key)); - } + export function cloneMap(map: Map) { + const clone = createMap(); + copyProperties(map, clone); + return clone; + } + + export function clone(object: T): T { + const result: any = {}; + for (const id in object) { + if (hasOwnProperty.call(object, id)) { + result[id] = (object)[id]; } } + return result; + } + export function extend(first: T1 , second: T2): T1 & T2 { + const result: T1 & T2 = {}; + for (const id in second) if (hasOwnProperty.call(second, id)) { + (result as any)[id] = (second as any)[id]; + } + for (const id in first) if (hasOwnProperty.call(first, id)) { + (result as any)[id] = (first as any)[id]; + } return result; } @@ -467,9 +610,7 @@ namespace ts { export let localizedDiagnosticMessages: Map = undefined; export function getLocaleSpecificMessage(message: DiagnosticMessage) { - return localizedDiagnosticMessages && localizedDiagnosticMessages[message.key] - ? localizedDiagnosticMessages[message.key] - : message.message; + return localizedDiagnosticMessages && localizedDiagnosticMessages[message.key] || message.message; } export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: any[]): Diagnostic; @@ -1205,6 +1346,8 @@ namespace ts { * List of supported extensions in order of file resolution precedence. */ export const supportedTypeScriptExtensions = [".ts", ".tsx", ".d.ts"]; + /** Must have ".d.ts" first because if ".ts" goes first, that will be detected as the extension instead of ".d.ts". */ + export const supportedTypescriptExtensionsForExtractExtension = [".d.ts", ".ts", ".tsx"]; export const supportedJavascriptExtensions = [".js", ".jsx"]; const allSupportedExtensions = supportedTypeScriptExtensions.concat(supportedJavascriptExtensions); @@ -1212,6 +1355,14 @@ namespace ts { return options && options.allowJs ? allSupportedExtensions : supportedTypeScriptExtensions; } + export function hasJavaScriptFileExtension(fileName: string) { + return forEach(supportedJavascriptExtensions, extension => fileExtensionIs(fileName, extension)); + } + + export function hasTypeScriptFileExtension(fileName: string) { + return forEach(supportedTypeScriptExtensions, extension => fileExtensionIs(fileName, extension)); + } + export function isSupportedSourceFileName(fileName: string, compilerOptions?: CompilerOptions) { if (!fileName) { return false; } @@ -1287,8 +1438,12 @@ namespace ts { return path; } - export function tryRemoveExtension(path: string, extension: string): string { - return fileExtensionIs(path, extension) ? path.substring(0, path.length - extension.length) : undefined; + export function tryRemoveExtension(path: string, extension: string): string | undefined { + return fileExtensionIs(path, extension) ? removeExtension(path, extension) : undefined; + } + + export function removeExtension(path: string, extension: string): string { + return path.substring(0, path.length - extension.length); } export function isJsxOrTsxExtension(ext: string): boolean { @@ -1386,4 +1541,665 @@ namespace ts { : ((fileName) => fileName.toLowerCase()); } + + /* @internal */ + export function trace(host: ModuleResolutionHost, message: DiagnosticMessage, ...args: any[]): void; + export function trace(host: ModuleResolutionHost, message: DiagnosticMessage): void { + host.trace(formatMessage.apply(undefined, arguments)); + } + + /* @internal */ + export function isTraceEnabled(compilerOptions: CompilerOptions, host: ModuleResolutionHost): boolean { + return compilerOptions.traceResolution && host.trace !== undefined; + } + + /* @internal */ + export function hasZeroOrOneAsteriskCharacter(str: string): boolean { + let seenAsterisk = false; + for (let i = 0; i < str.length; i++) { + if (str.charCodeAt(i) === CharacterCodes.asterisk) { + if (!seenAsterisk) { + seenAsterisk = true; + } + else { + // have already seen asterisk + return false; + } + } + } + return true; + } + + function createResolvedModule(resolvedFileName: string, isExternalLibraryImport: boolean, failedLookupLocations: string[]): ResolvedModuleWithFailedLookupLocations { + return { resolvedModule: resolvedFileName ? { resolvedFileName, isExternalLibraryImport } : undefined, failedLookupLocations }; + } + + /* @internal */ + export function isExternalModuleNameRelative(moduleName: string): boolean { + // TypeScript 1.0 spec (April 2014): 11.2.1 + // An external module name is "relative" if the first term is "." or "..". + return /^\.\.?($|[\\/])/.test(moduleName); + } + + function moduleHasNonRelativeName(moduleName: string): boolean { + return !(isRootedDiskPath(moduleName) || isExternalModuleNameRelative(moduleName)); + } + + /* @internal */ + export interface ModuleResolutionState { + host: ModuleResolutionHost; + compilerOptions: CompilerOptions; + traceEnabled: boolean; + // skip .tsx files if jsx is not enabled + skipTsx: boolean; + } + + function tryReadTypesSection(packageJsonPath: string, baseDirectory: string, state: ModuleResolutionState): string { + const jsonContent = readJson(packageJsonPath, state.host); + + function tryReadFromField(fieldName: string) { + if (hasProperty(jsonContent, fieldName)) { + const typesFile = (jsonContent)[fieldName]; + if (typeof typesFile === "string") { + const typesFilePath = normalizePath(combinePaths(baseDirectory, typesFile)); + if (state.traceEnabled) { + trace(state.host, Diagnostics.package_json_has_0_field_1_that_references_2, fieldName, typesFile, typesFilePath); + } + return typesFilePath; + } + else { + if (state.traceEnabled) { + trace(state.host, Diagnostics.Expected_type_of_0_field_in_package_json_to_be_string_got_1, fieldName, typeof typesFile); + } + } + } + } + + const typesFilePath = tryReadFromField("typings") || tryReadFromField("types"); + if (typesFilePath) { + return typesFilePath; + } + + // Use the main module for inferring types if no types package specified and the allowJs is set + if (state.compilerOptions.allowJs && jsonContent.main && typeof jsonContent.main === "string") { + if (state.traceEnabled) { + trace(state.host, Diagnostics.No_types_specified_in_package_json_but_allowJs_is_set_so_returning_main_value_of_0, jsonContent.main); + } + const mainFilePath = normalizePath(combinePaths(baseDirectory, jsonContent.main)); + return mainFilePath; + } + return undefined; + } + + /* @internal */ + export function readJson(path: string, host: ModuleResolutionHost): { typings?: string, types?: string, main?: string } { + try { + const jsonText = host.readFile(path); + return jsonText ? JSON.parse(jsonText) : {}; + } + catch (e) { + // gracefully handle if readFile fails or returns not JSON + return {}; + } + } + + /* @internal */ + export function getEmitModuleKind(compilerOptions: CompilerOptions) { + return typeof compilerOptions.module === "number" ? + compilerOptions.module : + getEmitScriptTarget(compilerOptions) === ScriptTarget.ES6 ? ModuleKind.ES6 : ModuleKind.CommonJS; + } + + /* @internal */ + export function getEmitScriptTarget(compilerOptions: CompilerOptions) { + return compilerOptions.target || ScriptTarget.ES3; + } + + export function resolveModuleName(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { + const traceEnabled = isTraceEnabled(compilerOptions, host); + if (traceEnabled) { + trace(host, Diagnostics.Resolving_module_0_from_1, moduleName, containingFile); + } + + let moduleResolution = compilerOptions.moduleResolution; + if (moduleResolution === undefined) { + moduleResolution = getEmitModuleKind(compilerOptions) === ModuleKind.CommonJS ? ModuleResolutionKind.NodeJs : ModuleResolutionKind.Classic; + if (traceEnabled) { + trace(host, Diagnostics.Module_resolution_kind_is_not_specified_using_0, ModuleResolutionKind[moduleResolution]); + } + } + else { + if (traceEnabled) { + trace(host, Diagnostics.Explicitly_specified_module_resolution_kind_Colon_0, ModuleResolutionKind[moduleResolution]); + } + } + + let result: ResolvedModuleWithFailedLookupLocations; + switch (moduleResolution) { + case ModuleResolutionKind.NodeJs: + result = nodeModuleNameResolver(moduleName, containingFile, compilerOptions, host); + break; + case ModuleResolutionKind.Classic: + result = classicNameResolver(moduleName, containingFile, compilerOptions, host); + break; + } + + if (traceEnabled) { + if (result.resolvedModule) { + trace(host, Diagnostics.Module_name_0_was_successfully_resolved_to_1, moduleName, result.resolvedModule.resolvedFileName); + } + else { + trace(host, Diagnostics.Module_name_0_was_not_resolved, moduleName); + } + } + + return result; + } + + /* + * Every module resolution kind can has its specific understanding how to load module from a specific path on disk + * I.e. for path '/a/b/c': + * - Node loader will first to try to check if '/a/b/c' points to a file with some supported extension and if this fails + * it will try to load module from directory: directory '/a/b/c' should exist and it should have either 'package.json' with + * 'typings' entry or file 'index' with some supported extension + * - Classic loader will only try to interpret '/a/b/c' as file. + */ + type ResolutionKindSpecificLoader = (candidate: string, extensions: string[], failedLookupLocations: string[], onlyRecordFailures: boolean, state: ModuleResolutionState) => string; + + /** + * Any module resolution kind can be augmented with optional settings: 'baseUrl', 'paths' and 'rootDirs' - they are used to + * mitigate differences between design time structure of the project and its runtime counterpart so the same import name + * can be resolved successfully by TypeScript compiler and runtime module loader. + * If these settings are set then loading procedure will try to use them to resolve module name and it can of failure it will + * fallback to standard resolution routine. + * + * - baseUrl - this setting controls how non-relative module names are resolved. If this setting is specified then non-relative + * names will be resolved relative to baseUrl: i.e. if baseUrl is '/a/b' then candidate location to resolve module name 'c/d' will + * be '/a/b/c/d' + * - paths - this setting can only be used when baseUrl is specified. allows to tune how non-relative module names + * will be resolved based on the content of the module name. + * Structure of 'paths' compiler options + * 'paths': { + * pattern-1: [...substitutions], + * pattern-2: [...substitutions], + * ... + * pattern-n: [...substitutions] + * } + * Pattern here is a string that can contain zero or one '*' character. During module resolution module name will be matched against + * all patterns in the list. Matching for patterns that don't contain '*' means that module name must be equal to pattern respecting the case. + * If pattern contains '*' then to match pattern "*" module name must start with the and end with . + * denotes part of the module name between and . + * If module name can be matches with multiple patterns then pattern with the longest prefix will be picked. + * After selecting pattern we'll use list of substitutions to get candidate locations of the module and the try to load module + * from the candidate location. + * Substitution is a string that can contain zero or one '*'. To get candidate location from substitution we'll pick every + * substitution in the list and replace '*' with string. If candidate location is not rooted it + * will be converted to absolute using baseUrl. + * For example: + * baseUrl: /a/b/c + * "paths": { + * // match all module names + * "*": [ + * "*", // use matched name as is, + * // will be looked as /a/b/c/ + * + * "folder1/*" // substitution will convert matched name to 'folder1/', + * // since it is not rooted then final candidate location will be /a/b/c/folder1/ + * ], + * // match module names that start with 'components/' + * "components/*": [ "/root/components/*" ] // substitution will convert /components/folder1/ to '/root/components/folder1/', + * // it is rooted so it will be final candidate location + * } + * + * 'rootDirs' allows the project to be spreaded across multiple locations and resolve modules with relative names as if + * they were in the same location. For example lets say there are two files + * '/local/src/content/file1.ts' + * '/shared/components/contracts/src/content/protocols/file2.ts' + * After bundling content of '/shared/components/contracts/src' will be merged with '/local/src' so + * if file1 has the following import 'import {x} from "./protocols/file2"' it will be resolved successfully in runtime. + * 'rootDirs' provides the way to tell compiler that in order to get the whole project it should behave as if content of all + * root dirs were merged together. + * I.e. for the example above 'rootDirs' will have two entries: [ '/local/src', '/shared/components/contracts/src' ]. + * Compiler will first convert './protocols/file2' into absolute path relative to the location of containing file: + * '/local/src/content/protocols/file2' and try to load it - failure. + * Then it will search 'rootDirs' looking for a longest matching prefix of this absolute path and if such prefix is found - absolute path will + * be converted to a path relative to found rootDir entry './content/protocols/file2' (*). As a last step compiler will check all remaining + * entries in 'rootDirs', use them to build absolute path out of (*) and try to resolve module from this location. + */ + function tryLoadModuleUsingOptionalResolutionSettings(moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, + failedLookupLocations: string[], supportedExtensions: string[], state: ModuleResolutionState): string { + + if (moduleHasNonRelativeName(moduleName)) { + return tryLoadModuleUsingBaseUrl(moduleName, loader, failedLookupLocations, supportedExtensions, state); + } + else { + return tryLoadModuleUsingRootDirs(moduleName, containingDirectory, loader, failedLookupLocations, supportedExtensions, state); + } + } + + function tryLoadModuleUsingRootDirs(moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, + failedLookupLocations: string[], supportedExtensions: string[], state: ModuleResolutionState): string { + + if (!state.compilerOptions.rootDirs) { + return undefined; + } + + if (state.traceEnabled) { + trace(state.host, Diagnostics.rootDirs_option_is_set_using_it_to_resolve_relative_module_name_0, moduleName); + } + + const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); + + let matchedRootDir: string; + let matchedNormalizedPrefix: string; + for (const rootDir of state.compilerOptions.rootDirs) { + // rootDirs are expected to be absolute + // in case of tsconfig.json this will happen automatically - compiler will expand relative names + // using location of tsconfig.json as base location + let normalizedRoot = normalizePath(rootDir); + if (!endsWith(normalizedRoot, directorySeparator)) { + normalizedRoot += directorySeparator; + } + const isLongestMatchingPrefix = + startsWith(candidate, normalizedRoot) && + (matchedNormalizedPrefix === undefined || matchedNormalizedPrefix.length < normalizedRoot.length); + + if (state.traceEnabled) { + trace(state.host, Diagnostics.Checking_if_0_is_the_longest_matching_prefix_for_1_2, normalizedRoot, candidate, isLongestMatchingPrefix); + } + + if (isLongestMatchingPrefix) { + matchedNormalizedPrefix = normalizedRoot; + matchedRootDir = rootDir; + } + } + if (matchedNormalizedPrefix) { + if (state.traceEnabled) { + trace(state.host, Diagnostics.Longest_matching_prefix_for_0_is_1, candidate, matchedNormalizedPrefix); + } + const suffix = candidate.substr(matchedNormalizedPrefix.length); + + // first - try to load from a initial location + if (state.traceEnabled) { + trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, matchedNormalizedPrefix, candidate); + } + const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(containingDirectory, state.host), state); + if (resolvedFileName) { + return resolvedFileName; + } + + if (state.traceEnabled) { + trace(state.host, Diagnostics.Trying_other_entries_in_rootDirs); + } + // then try to resolve using remaining entries in rootDirs + for (const rootDir of state.compilerOptions.rootDirs) { + if (rootDir === matchedRootDir) { + // skip the initially matched entry + continue; + } + const candidate = combinePaths(normalizePath(rootDir), suffix); + if (state.traceEnabled) { + trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, rootDir, candidate); + } + const baseDirectory = getDirectoryPath(candidate); + const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(baseDirectory, state.host), state); + if (resolvedFileName) { + return resolvedFileName; + } + } + if (state.traceEnabled) { + trace(state.host, Diagnostics.Module_resolution_using_rootDirs_has_failed); + } + } + return undefined; + } + + function tryLoadModuleUsingBaseUrl(moduleName: string, loader: ResolutionKindSpecificLoader, failedLookupLocations: string[], + supportedExtensions: string[], state: ModuleResolutionState): string { + + if (!state.compilerOptions.baseUrl) { + return undefined; + } + if (state.traceEnabled) { + trace(state.host, Diagnostics.baseUrl_option_is_set_to_0_using_this_value_to_resolve_non_relative_module_name_1, state.compilerOptions.baseUrl, moduleName); + } + + // string is for exact match + let matchedPattern: Pattern | string | undefined = undefined; + if (state.compilerOptions.paths) { + if (state.traceEnabled) { + trace(state.host, Diagnostics.paths_option_is_specified_looking_for_a_pattern_to_match_module_name_0, moduleName); + } + matchedPattern = matchPatternOrExact(getOwnKeys(state.compilerOptions.paths), moduleName); + } + + if (matchedPattern) { + const matchedStar = typeof matchedPattern === "string" ? undefined : matchedText(matchedPattern, moduleName); + const matchedPatternText = typeof matchedPattern === "string" ? matchedPattern : patternText(matchedPattern); + if (state.traceEnabled) { + trace(state.host, Diagnostics.Module_name_0_matched_pattern_1, moduleName, matchedPatternText); + } + for (const subst of state.compilerOptions.paths[matchedPatternText]) { + const path = matchedStar ? subst.replace("*", matchedStar) : subst; + const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, path)); + if (state.traceEnabled) { + trace(state.host, Diagnostics.Trying_substitution_0_candidate_module_location_Colon_1, subst, path); + } + const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); + if (resolvedFileName) { + return resolvedFileName; + } + } + return undefined; + } + else { + const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, moduleName)); + + if (state.traceEnabled) { + trace(state.host, Diagnostics.Resolving_module_name_0_relative_to_base_url_1_2, moduleName, state.compilerOptions.baseUrl, candidate); + } + + return loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); + } + } + + /** + * patternStrings contains both pattern strings (containing "*") and regular strings. + * Return an exact match if possible, or a pattern match, or undefined. + * (These are verified by verifyCompilerOptions to have 0 or 1 "*" characters.) + */ + function matchPatternOrExact(patternStrings: string[], candidate: string): string | Pattern | undefined { + const patterns: Pattern[] = []; + for (const patternString of patternStrings) { + const pattern = tryParsePattern(patternString); + if (pattern) { + patterns.push(pattern); + } + else if (patternString === candidate) { + // pattern was matched as is - no need to search further + return patternString; + } + } + + return findBestPatternMatch(patterns, _ => _, candidate); + } + + function patternText({prefix, suffix}: Pattern): string { + return `${prefix}*${suffix}`; + } + + /** + * Given that candidate matches pattern, returns the text matching the '*'. + * E.g.: matchedText(tryParsePattern("foo*baz"), "foobarbaz") === "bar" + */ + function matchedText(pattern: Pattern, candidate: string): string { + Debug.assert(isPatternMatch(pattern, candidate)); + return candidate.substr(pattern.prefix.length, candidate.length - pattern.suffix.length); + } + + /** Return the object corresponding to the best pattern to match `candidate`. */ + /* @internal */ + export function findBestPatternMatch(values: T[], getPattern: (value: T) => Pattern, candidate: string): T | undefined { + let matchedValue: T | undefined = undefined; + // use length of prefix as betterness criteria + let longestMatchPrefixLength = -1; + + for (const v of values) { + const pattern = getPattern(v); + if (isPatternMatch(pattern, candidate) && pattern.prefix.length > longestMatchPrefixLength) { + longestMatchPrefixLength = pattern.prefix.length; + matchedValue = v; + } + } + + return matchedValue; + } + + function isPatternMatch({prefix, suffix}: Pattern, candidate: string) { + return candidate.length >= prefix.length + suffix.length && + startsWith(candidate, prefix) && + endsWith(candidate, suffix); + } + + /* @internal */ + export function tryParsePattern(pattern: string): Pattern | undefined { + // This should be verified outside of here and a proper error thrown. + Debug.assert(hasZeroOrOneAsteriskCharacter(pattern)); + const indexOfStar = pattern.indexOf("*"); + return indexOfStar === -1 ? undefined : { + prefix: pattern.substr(0, indexOfStar), + suffix: pattern.substr(indexOfStar + 1) + }; + } + + export function nodeModuleNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { + const containingDirectory = getDirectoryPath(containingFile); + const supportedExtensions = getSupportedExtensions(compilerOptions); + const traceEnabled = isTraceEnabled(compilerOptions, host); + + const failedLookupLocations: string[] = []; + const state = { compilerOptions, host, traceEnabled, skipTsx: false }; + let resolvedFileName = tryLoadModuleUsingOptionalResolutionSettings(moduleName, containingDirectory, nodeLoadModuleByRelativeName, + failedLookupLocations, supportedExtensions, state); + + let isExternalLibraryImport = false; + if (!resolvedFileName) { + if (moduleHasNonRelativeName(moduleName)) { + if (traceEnabled) { + trace(host, Diagnostics.Loading_module_0_from_node_modules_folder, moduleName); + } + resolvedFileName = loadModuleFromNodeModules(moduleName, containingDirectory, failedLookupLocations, state); + isExternalLibraryImport = resolvedFileName !== undefined; + } + else { + const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); + resolvedFileName = nodeLoadModuleByRelativeName(candidate, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); + } + } + + if (resolvedFileName && host.realpath) { + const originalFileName = resolvedFileName; + resolvedFileName = normalizePath(host.realpath(resolvedFileName)); + if (traceEnabled) { + trace(host, Diagnostics.Resolving_real_path_for_0_result_1, originalFileName, resolvedFileName); + } + } + + return createResolvedModule(resolvedFileName, isExternalLibraryImport, failedLookupLocations); + } + + function nodeLoadModuleByRelativeName(candidate: string, supportedExtensions: string[], failedLookupLocations: string[], + onlyRecordFailures: boolean, state: ModuleResolutionState): string { + + if (state.traceEnabled) { + trace(state.host, Diagnostics.Loading_module_as_file_Slash_folder_candidate_module_location_0, candidate); + } + + const resolvedFileName = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, onlyRecordFailures, state); + + return resolvedFileName || loadNodeModuleFromDirectory(supportedExtensions, candidate, failedLookupLocations, onlyRecordFailures, state); + } + + /* @internal */ + export function directoryProbablyExists(directoryName: string, host: { directoryExists?: (directoryName: string) => boolean }): boolean { + // if host does not support 'directoryExists' assume that directory will exist + return !host.directoryExists || host.directoryExists(directoryName); + } + + /** + * @param {boolean} onlyRecordFailures - if true then function won't try to actually load files but instead record all attempts as failures. This flag is necessary + * in cases when we know upfront that all load attempts will fail (because containing folder does not exists) however we still need to record all failed lookup locations. + */ + function loadModuleFromFile(candidate: string, extensions: string[], failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string | undefined { + // First, try adding an extension. An import of "foo" could be matched by a file "foo.ts", or "foo.js" by "foo.js.ts" + const resolvedByAddingExtension = tryAddingExtensions(candidate, extensions, failedLookupLocation, onlyRecordFailures, state); + if (resolvedByAddingExtension) { + return resolvedByAddingExtension; + } + + // If that didn't work, try stripping a ".js" or ".jsx" extension and replacing it with a TypeScript one; + // e.g. "./foo.js" can be matched by "./foo.ts" or "./foo.d.ts" + if (hasJavaScriptFileExtension(candidate)) { + const extensionless = removeFileExtension(candidate); + if (state.traceEnabled) { + const extension = candidate.substring(extensionless.length); + trace(state.host, Diagnostics.File_name_0_has_a_1_extension_stripping_it, candidate, extension); + } + return tryAddingExtensions(extensionless, extensions, failedLookupLocation, onlyRecordFailures, state); + } + } + + /** Try to return an existing file that adds one of the `extensions` to `candidate`. */ + function tryAddingExtensions(candidate: string, extensions: string[], failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string | undefined { + if (!onlyRecordFailures) { + // check if containing folder exists - if it doesn't then just record failures for all supported extensions without disk probing + const directory = getDirectoryPath(candidate); + if (directory) { + onlyRecordFailures = !directoryProbablyExists(directory, state.host); + } + } + return forEach(extensions, ext => + !(state.skipTsx && isJsxOrTsxExtension(ext)) && tryFile(candidate + ext, failedLookupLocation, onlyRecordFailures, state)); + } + + /** Return the file if it exists. */ + function tryFile(fileName: string, failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string | undefined { + if (!onlyRecordFailures && state.host.fileExists(fileName)) { + if (state.traceEnabled) { + trace(state.host, Diagnostics.File_0_exist_use_it_as_a_name_resolution_result, fileName); + } + return fileName; + } + else { + if (state.traceEnabled) { + trace(state.host, Diagnostics.File_0_does_not_exist, fileName); + } + failedLookupLocation.push(fileName); + return undefined; + } + } + + /* @internal */ + export function loadNodeModuleFromDirectory(extensions: string[], candidate: string, failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string { + const packageJsonPath = pathToPackageJson(candidate); + const directoryExists = !onlyRecordFailures && directoryProbablyExists(candidate, state.host); + if (directoryExists && state.host.fileExists(packageJsonPath)) { + if (state.traceEnabled) { + trace(state.host, Diagnostics.Found_package_json_at_0, packageJsonPath); + } + const typesFile = tryReadTypesSection(packageJsonPath, candidate, state); + if (typesFile) { + const onlyRecordFailures = !directoryProbablyExists(getDirectoryPath(typesFile), state.host); + // The package.json "typings" property must specify the file with extension, so just try that exact filename. + const result = tryFile(typesFile, failedLookupLocation, onlyRecordFailures, state); + if (result) { + return result; + } + } + else { + if (state.traceEnabled) { + trace(state.host, Diagnostics.package_json_does_not_have_types_field); + } + } + } + else { + if (state.traceEnabled) { + trace(state.host, Diagnostics.File_0_does_not_exist, packageJsonPath); + } + // record package json as one of failed lookup locations - in the future if this file will appear it will invalidate resolution results + failedLookupLocation.push(packageJsonPath); + } + + return loadModuleFromFile(combinePaths(candidate, "index"), extensions, failedLookupLocation, !directoryExists, state); + } + + /* @internal */ + export function pathToPackageJson(directory: string): string { + return combinePaths(directory, "package.json"); + } + + function loadModuleFromNodeModulesFolder(moduleName: string, directory: string, failedLookupLocations: string[], state: ModuleResolutionState): string { + const nodeModulesFolder = combinePaths(directory, "node_modules"); + const nodeModulesFolderExists = directoryProbablyExists(nodeModulesFolder, state.host); + const candidate = normalizePath(combinePaths(nodeModulesFolder, moduleName)); + const supportedExtensions = getSupportedExtensions(state.compilerOptions); + + let result = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, !nodeModulesFolderExists, state); + if (result) { + return result; + } + result = loadNodeModuleFromDirectory(supportedExtensions, candidate, failedLookupLocations, !nodeModulesFolderExists, state); + if (result) { + return result; + } + } + + /* @internal */ + export function loadModuleFromNodeModules(moduleName: string, directory: string, failedLookupLocations: string[], state: ModuleResolutionState): string { + directory = normalizeSlashes(directory); + while (true) { + const baseName = getBaseFileName(directory); + if (baseName !== "node_modules") { + // Try to load source from the package + const packageResult = loadModuleFromNodeModulesFolder(moduleName, directory, failedLookupLocations, state); + if (packageResult && hasTypeScriptFileExtension(packageResult)) { + // Always prefer a TypeScript (.ts, .tsx, .d.ts) file shipped with the package + return packageResult; + } + else { + // Else prefer a types package over non-TypeScript results (e.g. JavaScript files) + const typesResult = loadModuleFromNodeModulesFolder(combinePaths("@types", moduleName), directory, failedLookupLocations, state); + if (typesResult || packageResult) { + return typesResult || packageResult; + } + } + } + + const parentPath = getDirectoryPath(directory); + if (parentPath === directory) { + break; + } + + directory = parentPath; + } + return undefined; + } + + export function classicNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { + const traceEnabled = isTraceEnabled(compilerOptions, host); + const state = { compilerOptions, host, traceEnabled, skipTsx: !compilerOptions.jsx }; + const failedLookupLocations: string[] = []; + const supportedExtensions = getSupportedExtensions(compilerOptions); + let containingDirectory = getDirectoryPath(containingFile); + + const resolvedFileName = tryLoadModuleUsingOptionalResolutionSettings(moduleName, containingDirectory, loadModuleFromFile, failedLookupLocations, supportedExtensions, state); + if (resolvedFileName) { + return createResolvedModule(resolvedFileName, /*isExternalLibraryImport*/false, failedLookupLocations); + } + + let referencedSourceFile: string; + if (moduleHasNonRelativeName(moduleName)) { + while (true) { + const searchName = normalizePath(combinePaths(containingDirectory, moduleName)); + referencedSourceFile = loadModuleFromFile(searchName, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); + if (referencedSourceFile) { + break; + } + const parentPath = getDirectoryPath(containingDirectory); + if (parentPath === containingDirectory) { + break; + } + containingDirectory = parentPath; + } + } + else { + const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); + referencedSourceFile = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); + } + + + return referencedSourceFile + ? { resolvedModule: { resolvedFileName: referencedSourceFile }, failedLookupLocations } + : { resolvedModule: undefined, failedLookupLocations }; + } } diff --git a/src/compiler/declarationEmitter.ts b/src/compiler/declarationEmitter.ts index d93a8a0aed027..3642ee7f10a50 100644 --- a/src/compiler/declarationEmitter.ts +++ b/src/compiler/declarationEmitter.ts @@ -157,9 +157,7 @@ namespace ts { if (usedTypeDirectiveReferences) { for (const directive in usedTypeDirectiveReferences) { - if (hasProperty(usedTypeDirectiveReferences, directive)) { - referencesOutput += `/// ${newLine}`; - } + referencesOutput += `/// ${newLine}`; } } @@ -269,10 +267,10 @@ namespace ts { } if (!usedTypeDirectiveReferences) { - usedTypeDirectiveReferences = {}; + usedTypeDirectiveReferences = createMap(); } for (const directive of typeReferenceDirectives) { - if (!hasProperty(usedTypeDirectiveReferences, directive)) { + if (!(directive in usedTypeDirectiveReferences)) { usedTypeDirectiveReferences[directive] = directive; } } @@ -441,7 +439,7 @@ namespace ts { } } - function emitEntityName(entityName: EntityName | PropertyAccessExpression) { + function emitEntityName(entityName: EntityNameOrEntityNameExpression) { const visibilityResult = resolver.isEntityNameVisible(entityName, // Aliases can be written asynchronously so use correct enclosing declaration entityName.parent.kind === SyntaxKind.ImportEqualsDeclaration ? entityName.parent : enclosingDeclaration); @@ -452,9 +450,9 @@ namespace ts { } function emitExpressionWithTypeArguments(node: ExpressionWithTypeArguments) { - if (isSupportedExpressionWithTypeArguments(node)) { + if (isEntityNameExpression(node.expression)) { Debug.assert(node.expression.kind === SyntaxKind.Identifier || node.expression.kind === SyntaxKind.PropertyAccessExpression); - emitEntityName(node.expression); + emitEntityName(node.expression); if (node.typeArguments) { write("<"); emitCommaList(node.typeArguments, emitType); @@ -537,14 +535,14 @@ namespace ts { // do not need to keep track of created temp names. function getExportDefaultTempVariableName(): string { const baseName = "_default"; - if (!hasProperty(currentIdentifiers, baseName)) { + if (!(baseName in currentIdentifiers)) { return baseName; } let count = 0; while (true) { count++; const name = baseName + "_" + count; - if (!hasProperty(currentIdentifiers, name)) { + if (!(name in currentIdentifiers)) { return name; } } @@ -1019,7 +1017,7 @@ namespace ts { } function emitTypeOfTypeReference(node: ExpressionWithTypeArguments) { - if (isSupportedExpressionWithTypeArguments(node)) { + if (isEntityNameExpression(node.expression)) { emitTypeWithNewGetSymbolAccessibilityDiagnostic(node, getHeritageClauseVisibilityError); } else if (!isImplementsList && node.expression.kind === SyntaxKind.NullKeyword) { diff --git a/src/compiler/diagnosticMessages.json b/src/compiler/diagnosticMessages.json index 044c74c17012d..6210a20afa64c 100644 --- a/src/compiler/diagnosticMessages.json +++ b/src/compiler/diagnosticMessages.json @@ -1947,6 +1947,14 @@ "category": "Error", "code": 2689 }, + "A class must be declared after its base class.": { + "category": "Error", + "code": 2690 + }, + "An import path cannot end with a '{0}' extension. Consider importing '{1}' instead.": { + "category": "Error", + "code": 2691 + }, "Import declaration '{0}' is using private name '{1}'.": { "category": "Error", "code": 4000 @@ -2227,7 +2235,7 @@ "category": "Error", "code": 4082 }, - "Conflicting library definitions for '{0}' found at '{1}' and '{2}'. Copy the correct file to the 'typings' folder to resolve this conflict.": { + "Conflicting definitions for '{0}' found at '{1}' and '{2}'. Consider installing a specific version of this library to resolve the conflict.": { "category": "Message", "code": 4090 }, @@ -2824,6 +2832,10 @@ "category": "Message", "code": 6137 }, + "Property '{0}' is declared but never used.": { + "category": "Error", + "code": 6138 + }, "Variable '{0}' implicitly has an '{1}' type.": { "category": "Error", "code": 7005 diff --git a/src/compiler/emitter.ts b/src/compiler/emitter.ts index 64bb4b70e9048..ca015edc73ea3 100644 --- a/src/compiler/emitter.ts +++ b/src/compiler/emitter.ts @@ -24,7 +24,7 @@ namespace ts { Return = 1 << 3 } - const entities: Map = { + const entities = createMap({ "quot": 0x0022, "amp": 0x0026, "apos": 0x0027, @@ -278,7 +278,7 @@ namespace ts { "clubs": 0x2663, "hearts": 0x2665, "diams": 0x2666 - }; + }); // Flags enum to track count of temp variables and a few dedicated names const enum TempFlags { @@ -407,7 +407,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge function isUniqueLocalName(name: string, container: Node): boolean { for (let node = container; isNodeDescendentOf(node, container); node = node.nextContainer) { - if (node.locals && hasProperty(node.locals, name)) { + if (node.locals && name in node.locals) { // We conservatively include alias symbols to cover cases where they're emitted as locals if (node.locals[name].flags & (SymbolFlags.Value | SymbolFlags.ExportValue | SymbolFlags.Alias)) { return false; @@ -489,13 +489,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge function setLabeledJump(state: ConvertedLoopState, isBreak: boolean, labelText: string, labelMarker: string): void { if (isBreak) { if (!state.labeledNonLocalBreaks) { - state.labeledNonLocalBreaks = {}; + state.labeledNonLocalBreaks = createMap(); } state.labeledNonLocalBreaks[labelText] = labelMarker; } else { if (!state.labeledNonLocalContinues) { - state.labeledNonLocalContinues = {}; + state.labeledNonLocalContinues = createMap(); } state.labeledNonLocalContinues[labelText] = labelMarker; } @@ -577,27 +577,27 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge const setSourceMapWriterEmit = compilerOptions.sourceMap || compilerOptions.inlineSourceMap ? changeSourceMapEmit : function (writer: SourceMapWriter) { }; - const moduleEmitDelegates: Map<(node: SourceFile, emitRelativePathAsModuleName?: boolean) => void> = { + const moduleEmitDelegates = createMap<(node: SourceFile, emitRelativePathAsModuleName?: boolean) => void>({ [ModuleKind.ES6]: emitES6Module, [ModuleKind.AMD]: emitAMDModule, [ModuleKind.System]: emitSystemModule, [ModuleKind.UMD]: emitUMDModule, [ModuleKind.CommonJS]: emitCommonJSModule, - }; + }); - const bundleEmitDelegates: Map<(node: SourceFile, emitRelativePathAsModuleName?: boolean) => void> = { + const bundleEmitDelegates = createMap<(node: SourceFile, emitRelativePathAsModuleName?: boolean) => void>({ [ModuleKind.ES6]() {}, [ModuleKind.AMD]: emitAMDModule, [ModuleKind.System]: emitSystemModule, [ModuleKind.UMD]() {}, [ModuleKind.CommonJS]() {}, - }; + }); return doEmit; function doEmit(jsFilePath: string, sourceMapFilePath: string, sourceFiles: SourceFile[], isBundledEmit: boolean) { sourceMap.initialize(jsFilePath, sourceMapFilePath, sourceFiles, isBundledEmit); - generatedNameSet = {}; + generatedNameSet = createMap(); nodeToGeneratedName = []; decoratedClassAliases = []; isOwnFileEmit = !isBundledEmit; @@ -669,8 +669,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge function isUniqueName(name: string): boolean { return !resolver.hasGlobalName(name) && - !hasProperty(currentFileIdentifiers, name) && - !hasProperty(generatedNameSet, name); + !(name in currentFileIdentifiers) && + !(name in generatedNameSet); } // Return the next available name in the pattern _a ... _z, _0, _1, ... @@ -2578,7 +2578,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge operand = (operand).expression; } - // We have an expression of the form: (SubExpr) + // We have an expression of the form: (SubExpr) or (SubExpr as Type) // Emitting this as (SubExpr) is really not desirable. We would like to emit the subexpr as is. // Omitting the parentheses, however, could cause change in the semantics of the generated // code if the casted expression has a lower precedence than the rest of the expression, e.g.: @@ -2592,6 +2592,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge operand.kind !== SyntaxKind.DeleteExpression && operand.kind !== SyntaxKind.PostfixUnaryExpression && operand.kind !== SyntaxKind.NewExpression && + !(operand.kind === SyntaxKind.BinaryExpression && node.expression.kind === SyntaxKind.AsExpression) && !(operand.kind === SyntaxKind.CallExpression && node.parent.kind === SyntaxKind.NewExpression) && !(operand.kind === SyntaxKind.FunctionExpression && node.parent.kind === SyntaxKind.CallExpression) && !(operand.kind === SyntaxKind.NumericLiteral && node.parent.kind === SyntaxKind.PropertyAccessExpression)) { @@ -2645,7 +2646,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge return false; } - return !exportEquals && exportSpecifiers && hasProperty(exportSpecifiers, (node).text); + return !exportEquals && exportSpecifiers && (node).text in exportSpecifiers; } function emitPrefixUnaryExpression(node: PrefixUnaryExpression) { @@ -3256,13 +3257,13 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge // Don't initialize seen unless we have at least one element. // Emit a comma to separate for all but the first element. if (!seen) { - seen = {}; + seen = createMap(); } else { write(", "); } - if (!hasProperty(seen, id.text)) { + if (!(id.text in seen)) { emit(id); seen[id.text] = id.text; } @@ -3855,7 +3856,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge if (convertedLoopState) { if (!convertedLoopState.labels) { - convertedLoopState.labels = {}; + convertedLoopState.labels = createMap(); } convertedLoopState.labels[node.label.text] = node.label.text; } @@ -3969,7 +3970,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge return; } - if (!exportEquals && exportSpecifiers && hasProperty(exportSpecifiers, name.text)) { + if (!exportEquals && exportSpecifiers && name.text in exportSpecifiers) { for (const specifier of exportSpecifiers[name.text]) { writeLine(); emitStart(specifier.name); @@ -5310,18 +5311,22 @@ const _super = (function (geti, seti) { emitSignatureParameters(ctor); } else { - // Based on EcmaScript6 section 14.5.14: Runtime Semantics: ClassDefinitionEvaluation. - // If constructor is empty, then, - // If ClassHeritageopt is present, then - // Let constructor be the result of parsing the String "constructor(... args){ super (...args);}" using the syntactic grammar with the goal symbol MethodDefinition. + // The ES2015 spec specifies in 14.5.14. Runtime Semantics: ClassDefinitionEvaluation: + // If constructor is empty, then + // If ClassHeritag_eopt is present and protoParent is not null, then + // Let constructor be the result of parsing the source text + // constructor(...args) { super (...args);} + // using the syntactic grammar with the goal symbol MethodDefinition[~Yield]. // Else, - // Let constructor be the result of parsing the String "constructor( ){ }" using the syntactic grammar with the goal symbol MethodDefinition - if (baseTypeElement) { - write("(...args)"); - } - else { - write("()"); - } + // Let constructor be the result of parsing the source text + // constructor( ){ } + // using the syntactic grammar with the goal symbol MethodDefinition[~Yield]. + // + // While we could emit the '...args' rest parameter, certain later tools in the pipeline might + // downlevel the '...args' portion less efficiently by naively copying the contents of 'arguments' to an array. + // Instead, we'll avoid using a rest parameter and spread into the super call as + // 'super(...arguments)' instead of 'super(...args)', as you can see below. + write("()"); } } @@ -5359,7 +5364,8 @@ const _super = (function (geti, seti) { write("_super.apply(this, arguments);"); } else { - write("super(...args);"); + // See comment above on using '...arguments' instead of '...args'. + write("super(...arguments);"); } emitEnd(baseTypeElement); } @@ -6460,7 +6466,7 @@ const _super = (function (geti, seti) { * Here we check if alternative name was provided for a given moduleName and return it if possible. */ function tryRenameExternalModule(moduleName: LiteralExpression): string { - if (renamedDependencies && hasProperty(renamedDependencies, moduleName.text)) { + if (renamedDependencies && moduleName.text in renamedDependencies) { return `"${renamedDependencies[moduleName.text]}"`; } return undefined; @@ -6802,7 +6808,7 @@ const _super = (function (geti, seti) { function collectExternalModuleInfo(sourceFile: SourceFile) { externalImports = []; - exportSpecifiers = {}; + exportSpecifiers = createMap(); exportEquals = undefined; hasExportStarsToExportValues = false; for (const node of sourceFile.statements) { @@ -6940,7 +6946,7 @@ const _super = (function (geti, seti) { } // local names set should only be added if we have anything exported - if (!exportedDeclarations && isEmpty(exportSpecifiers)) { + if (!exportedDeclarations && !someProperties(exportSpecifiers)) { // no exported declarations (export var ...) or export specifiers (export {x}) // check if we have any non star export declarations. let hasExportDeclarationWithExportClause = false; @@ -7080,7 +7086,7 @@ const _super = (function (geti, seti) { if (hoistedVars) { writeLine(); write("var "); - const seen: Map = {}; + const seen = createMap(); for (let i = 0; i < hoistedVars.length; i++) { const local = hoistedVars[i]; const name = local.kind === SyntaxKind.Identifier @@ -7090,7 +7096,7 @@ const _super = (function (geti, seti) { if (name) { // do not emit duplicate entries (in case of declaration merging) in the list of hoisted variables const text = unescapeIdentifier(name.text); - if (hasProperty(seen, text)) { + if (text in seen) { continue; } else { @@ -7446,7 +7452,7 @@ const _super = (function (geti, seti) { writeModuleName(node, emitRelativePathAsModuleName); write("["); - const groupIndices: Map = {}; + const groupIndices = createMap(); const dependencyGroups: DependencyGroup[] = []; for (let i = 0; i < externalImports.length; i++) { @@ -7459,7 +7465,7 @@ const _super = (function (geti, seti) { // for deduplication purposes in key remove leading and trailing quotes so 'a' and "a" will be considered the same const key = text.substr(1, text.length - 2); - if (hasProperty(groupIndices, key)) { + if (key in groupIndices) { // deduplicate/group entries in dependency list by the dependency name const groupIndex = groupIndices[key]; dependencyGroups[groupIndex].push(externalImports[i]); diff --git a/src/compiler/parser.ts b/src/compiler/parser.ts index 17ad4ea155f95..3e20f899c2dab 100644 --- a/src/compiler/parser.ts +++ b/src/compiler/parser.ts @@ -417,14 +417,16 @@ namespace ts { case SyntaxKind.JSDocPropertyTag: return visitNode(cbNode, (node).typeExpression) || visitNode(cbNode, (node).name); + case SyntaxKind.JSDocLiteralType: + return visitNode(cbNode, (node).literal); } } export function createSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, setParentNodes = false, scriptKind?: ScriptKind): SourceFile { - const start = performance.mark(); + performance.mark("beforeParse"); const result = Parser.parseSourceFile(fileName, sourceText, languageVersion, /*syntaxCursor*/ undefined, setParentNodes, scriptKind); - - performance.measure("Parse", start); + performance.mark("afterParse"); + performance.measure("Parse", "beforeParse", "afterParse"); return result; } @@ -628,7 +630,7 @@ namespace ts { parseDiagnostics = []; parsingContext = 0; - identifiers = {}; + identifiers = createMap(); identifierCount = 0; nodeCount = 0; @@ -1085,7 +1087,7 @@ namespace ts { function internIdentifier(text: string): string { text = escapeIdentifier(text); - return hasProperty(identifiers, text) ? identifiers[text] : (identifiers[text] = text); + return identifiers[text] || (identifiers[text] = text); } // An identifier that starts with two underscores has an extra underscore character prepended to it to avoid issues @@ -3416,6 +3418,7 @@ namespace ts { * 6) - UnaryExpression[?yield] * 7) ~ UnaryExpression[?yield] * 8) ! UnaryExpression[?yield] + * 9) [+Await] await UnaryExpression[?yield] */ function parseSimpleUnaryExpression(): UnaryExpression { switch (token()) { @@ -3430,6 +3433,8 @@ namespace ts { return parseTypeOfExpression(); case SyntaxKind.VoidKeyword: return parseVoidExpression(); + case SyntaxKind.AwaitKeyword: + return parseAwaitExpression(); case SyntaxKind.LessThanToken: // This is modified UnaryExpression grammar in TypeScript // UnaryExpression (modified): @@ -5890,9 +5895,13 @@ namespace ts { case SyntaxKind.SymbolKeyword: case SyntaxKind.VoidKeyword: return parseTokenNode(); + case SyntaxKind.StringLiteral: + case SyntaxKind.NumericLiteral: + case SyntaxKind.TrueKeyword: + case SyntaxKind.FalseKeyword: + return parseJSDocLiteralType(); } - // TODO (drosen): Parse string literal types in JSDoc as well. return parseJSDocTypeReference(); } @@ -6071,6 +6080,12 @@ namespace ts { return finishNode(result); } + function parseJSDocLiteralType(): JSDocLiteralType { + const result = createNode(SyntaxKind.JSDocLiteralType); + result.literal = parseLiteralTypeNode(); + return finishNode(result); + } + function parseJSDocUnknownOrNullableType(): JSDocUnknownType | JSDocNullableType { const pos = scanner.getStartPos(); // skip the ? diff --git a/src/compiler/performance.ts b/src/compiler/performance.ts index 89db876ae5e48..e8f064e81cdf3 100644 --- a/src/compiler/performance.ts +++ b/src/compiler/performance.ts @@ -6,104 +6,90 @@ namespace ts { } /*@internal*/ +/** Performance measurements for the compiler. */ namespace ts.performance { - /** Performance measurements for the compiler. */ declare const onProfilerEvent: { (markName: string): void; profiler: boolean; }; - let profilerEvent: (markName: string) => void; - let counters: Map; + + const profilerEvent = typeof onProfilerEvent === "function" && onProfilerEvent.profiler === true + ? onProfilerEvent + : (markName: string) => { }; + + let enabled = false; + let profilerStart = 0; + let counts: Map; + let marks: Map; let measures: Map; /** - * Emit a performance event if ts-profiler is connected. This is primarily used - * to generate heap snapshots. + * Marks a performance event. * - * @param eventName A name for the event. + * @param markName The name of the mark. */ - export function emit(eventName: string) { - if (profilerEvent) { - profilerEvent(eventName); + export function mark(markName: string) { + if (enabled) { + marks[markName] = timestamp(); + counts[markName] = (counts[markName] || 0) + 1; + profilerEvent(markName); } } /** - * Increments a counter with the specified name. + * Adds a performance measurement with the specified name. * - * @param counterName The name of the counter. + * @param measureName The name of the performance measurement. + * @param startMarkName The name of the starting mark. If not supplied, the point at which the + * profiler was enabled is used. + * @param endMarkName The name of the ending mark. If not supplied, the current timestamp is + * used. */ - export function increment(counterName: string) { - if (counters) { - counters[counterName] = (getProperty(counters, counterName) || 0) + 1; + export function measure(measureName: string, startMarkName?: string, endMarkName?: string) { + if (enabled) { + const end = endMarkName && marks[endMarkName] || timestamp(); + const start = startMarkName && marks[startMarkName] || profilerStart; + measures[measureName] = (measures[measureName] || 0) + (end - start); } } /** - * Gets the value of the counter with the specified name. + * Gets the number of times a marker was encountered. * - * @param counterName The name of the counter. - */ - export function getCount(counterName: string) { - return counters && getProperty(counters, counterName) || 0; - } - - /** - * Marks the start of a performance measurement. + * @param markName The name of the mark. */ - export function mark() { - return measures ? timestamp() : 0; + export function getCount(markName: string) { + return counts && counts[markName] || 0; } /** - * Adds a performance measurement with the specified name. + * Gets the total duration of all measurements with the supplied name. * - * @param measureName The name of the performance measurement. - * @param marker The timestamp of the starting mark. + * @param measureName The name of the measure whose durations should be accumulated. */ - export function measure(measureName: string, marker: number) { - if (measures) { - measures[measureName] = (getProperty(measures, measureName) || 0) + (timestamp() - marker); - } + export function getDuration(measureName: string) { + return measures && measures[measureName] || 0; } /** * Iterate over each measure, performing some action - * + * * @param cb The action to perform for each measure */ export function forEachMeasure(cb: (measureName: string, duration: number) => void) { - return forEachKey(measures, key => cb(key, measures[key])); - } - - /** - * Gets the total duration of all measurements with the supplied name. - * - * @param measureName The name of the measure whose durations should be accumulated. - */ - export function getDuration(measureName: string) { - return measures && getProperty(measures, measureName) || 0; + for (const key in measures) { + cb(key, measures[key]); + } } /** Enables (and resets) performance measurements for the compiler. */ export function enable() { - counters = { }; - measures = { - "I/O Read": 0, - "I/O Write": 0, - "Program": 0, - "Parse": 0, - "Bind": 0, - "Check": 0, - "Emit": 0, - }; - - profilerEvent = typeof onProfilerEvent === "function" && onProfilerEvent.profiler === true - ? onProfilerEvent - : undefined; + counts = createMap(); + marks = createMap(); + measures = createMap(); + enabled = true; + profilerStart = timestamp(); } - /** Disables (and clears) performance measurements for the compiler. */ + /** Disables performance measurements for the compiler. */ export function disable() { - counters = undefined; - measures = undefined; - profilerEvent = undefined; + enabled = false; } } diff --git a/src/compiler/program.ts b/src/compiler/program.ts index f0db090797d3c..932f400660f7b 100644 --- a/src/compiler/program.ts +++ b/src/compiler/program.ts @@ -76,103 +76,6 @@ namespace ts { return getNormalizedPathFromPathComponents(commonPathComponents); } - function trace(host: ModuleResolutionHost, message: DiagnosticMessage, ...args: any[]): void; - function trace(host: ModuleResolutionHost, message: DiagnosticMessage): void { - host.trace(formatMessage.apply(undefined, arguments)); - } - - function isTraceEnabled(compilerOptions: CompilerOptions, host: ModuleResolutionHost): boolean { - return compilerOptions.traceResolution && host.trace !== undefined; - } - - /* @internal */ - export function hasZeroOrOneAsteriskCharacter(str: string): boolean { - let seenAsterisk = false; - for (let i = 0; i < str.length; i++) { - if (str.charCodeAt(i) === CharacterCodes.asterisk) { - if (!seenAsterisk) { - seenAsterisk = true; - } - else { - // have already seen asterisk - return false; - } - } - } - return true; - } - - function createResolvedModule(resolvedFileName: string, isExternalLibraryImport: boolean, failedLookupLocations: string[]): ResolvedModuleWithFailedLookupLocations { - return { resolvedModule: resolvedFileName ? { resolvedFileName, isExternalLibraryImport } : undefined, failedLookupLocations }; - } - - function moduleHasNonRelativeName(moduleName: string): boolean { - return !(isRootedDiskPath(moduleName) || isExternalModuleNameRelative(moduleName)); - } - - interface ModuleResolutionState { - host: ModuleResolutionHost; - compilerOptions: CompilerOptions; - traceEnabled: boolean; - // skip .tsx files if jsx is not enabled - skipTsx: boolean; - } - - function tryReadTypesSection(packageJsonPath: string, baseDirectory: string, state: ModuleResolutionState): string { - let jsonContent: { typings?: string, types?: string, main?: string }; - try { - const jsonText = state.host.readFile(packageJsonPath); - jsonContent = jsonText ? <{ typings?: string, types?: string, main?: string }>JSON.parse(jsonText) : {}; - } - catch (e) { - // gracefully handle if readFile fails or returns not JSON - jsonContent = {}; - } - - let typesFile: string; - let fieldName: string; - // first try to read content of 'typings' section (backward compatibility) - if (jsonContent.typings) { - if (typeof jsonContent.typings === "string") { - fieldName = "typings"; - typesFile = jsonContent.typings; - } - else { - if (state.traceEnabled) { - trace(state.host, Diagnostics.Expected_type_of_0_field_in_package_json_to_be_string_got_1, "typings", typeof jsonContent.typings); - } - } - } - // then read 'types' - if (!typesFile && jsonContent.types) { - if (typeof jsonContent.types === "string") { - fieldName = "types"; - typesFile = jsonContent.types; - } - else { - if (state.traceEnabled) { - trace(state.host, Diagnostics.Expected_type_of_0_field_in_package_json_to_be_string_got_1, "types", typeof jsonContent.types); - } - } - } - if (typesFile) { - const typesFilePath = normalizePath(combinePaths(baseDirectory, typesFile)); - if (state.traceEnabled) { - trace(state.host, Diagnostics.package_json_has_0_field_1_that_references_2, fieldName, typesFile, typesFilePath); - } - return typesFilePath; - } - // Use the main module for inferring types if no types package specified and the allowJs is set - if (state.compilerOptions.allowJs && jsonContent.main && typeof jsonContent.main === "string") { - if (state.traceEnabled) { - trace(state.host, Diagnostics.No_types_specified_in_package_json_but_allowJs_is_set_so_returning_main_value_of_0, jsonContent.main); - } - const mainFilePath = normalizePath(combinePaths(baseDirectory, jsonContent.main)); - return mainFilePath; - } - return undefined; - } - const typeReferenceExtensions = [".d.ts"]; function getEffectiveTypeRoots(options: CompilerOptions, host: ModuleResolutionHost) { @@ -293,552 +196,6 @@ namespace ts { }; } - export function resolveModuleName(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { - const traceEnabled = isTraceEnabled(compilerOptions, host); - if (traceEnabled) { - trace(host, Diagnostics.Resolving_module_0_from_1, moduleName, containingFile); - } - - let moduleResolution = compilerOptions.moduleResolution; - if (moduleResolution === undefined) { - moduleResolution = getEmitModuleKind(compilerOptions) === ModuleKind.CommonJS ? ModuleResolutionKind.NodeJs : ModuleResolutionKind.Classic; - if (traceEnabled) { - trace(host, Diagnostics.Module_resolution_kind_is_not_specified_using_0, ModuleResolutionKind[moduleResolution]); - } - } - else { - if (traceEnabled) { - trace(host, Diagnostics.Explicitly_specified_module_resolution_kind_Colon_0, ModuleResolutionKind[moduleResolution]); - } - } - - let result: ResolvedModuleWithFailedLookupLocations; - switch (moduleResolution) { - case ModuleResolutionKind.NodeJs: - result = nodeModuleNameResolver(moduleName, containingFile, compilerOptions, host); - break; - case ModuleResolutionKind.Classic: - result = classicNameResolver(moduleName, containingFile, compilerOptions, host); - break; - } - - if (traceEnabled) { - if (result.resolvedModule) { - trace(host, Diagnostics.Module_name_0_was_successfully_resolved_to_1, moduleName, result.resolvedModule.resolvedFileName); - } - else { - trace(host, Diagnostics.Module_name_0_was_not_resolved, moduleName); - } - } - - return result; - } - - /* - * Every module resolution kind can has its specific understanding how to load module from a specific path on disk - * I.e. for path '/a/b/c': - * - Node loader will first to try to check if '/a/b/c' points to a file with some supported extension and if this fails - * it will try to load module from directory: directory '/a/b/c' should exist and it should have either 'package.json' with - * 'typings' entry or file 'index' with some supported extension - * - Classic loader will only try to interpret '/a/b/c' as file. - */ - type ResolutionKindSpecificLoader = (candidate: string, extensions: string[], failedLookupLocations: string[], onlyRecordFailures: boolean, state: ModuleResolutionState) => string; - - /** - * Any module resolution kind can be augmented with optional settings: 'baseUrl', 'paths' and 'rootDirs' - they are used to - * mitigate differences between design time structure of the project and its runtime counterpart so the same import name - * can be resolved successfully by TypeScript compiler and runtime module loader. - * If these settings are set then loading procedure will try to use them to resolve module name and it can of failure it will - * fallback to standard resolution routine. - * - * - baseUrl - this setting controls how non-relative module names are resolved. If this setting is specified then non-relative - * names will be resolved relative to baseUrl: i.e. if baseUrl is '/a/b' then candidate location to resolve module name 'c/d' will - * be '/a/b/c/d' - * - paths - this setting can only be used when baseUrl is specified. allows to tune how non-relative module names - * will be resolved based on the content of the module name. - * Structure of 'paths' compiler options - * 'paths': { - * pattern-1: [...substitutions], - * pattern-2: [...substitutions], - * ... - * pattern-n: [...substitutions] - * } - * Pattern here is a string that can contain zero or one '*' character. During module resolution module name will be matched against - * all patterns in the list. Matching for patterns that don't contain '*' means that module name must be equal to pattern respecting the case. - * If pattern contains '*' then to match pattern "*" module name must start with the and end with . - * denotes part of the module name between and . - * If module name can be matches with multiple patterns then pattern with the longest prefix will be picked. - * After selecting pattern we'll use list of substitutions to get candidate locations of the module and the try to load module - * from the candidate location. - * Substitution is a string that can contain zero or one '*'. To get candidate location from substitution we'll pick every - * substitution in the list and replace '*' with string. If candidate location is not rooted it - * will be converted to absolute using baseUrl. - * For example: - * baseUrl: /a/b/c - * "paths": { - * // match all module names - * "*": [ - * "*", // use matched name as is, - * // will be looked as /a/b/c/ - * - * "folder1/*" // substitution will convert matched name to 'folder1/', - * // since it is not rooted then final candidate location will be /a/b/c/folder1/ - * ], - * // match module names that start with 'components/' - * "components/*": [ "/root/components/*" ] // substitution will convert /components/folder1/ to '/root/components/folder1/', - * // it is rooted so it will be final candidate location - * } - * - * 'rootDirs' allows the project to be spreaded across multiple locations and resolve modules with relative names as if - * they were in the same location. For example lets say there are two files - * '/local/src/content/file1.ts' - * '/shared/components/contracts/src/content/protocols/file2.ts' - * After bundling content of '/shared/components/contracts/src' will be merged with '/local/src' so - * if file1 has the following import 'import {x} from "./protocols/file2"' it will be resolved successfully in runtime. - * 'rootDirs' provides the way to tell compiler that in order to get the whole project it should behave as if content of all - * root dirs were merged together. - * I.e. for the example above 'rootDirs' will have two entries: [ '/local/src', '/shared/components/contracts/src' ]. - * Compiler will first convert './protocols/file2' into absolute path relative to the location of containing file: - * '/local/src/content/protocols/file2' and try to load it - failure. - * Then it will search 'rootDirs' looking for a longest matching prefix of this absolute path and if such prefix is found - absolute path will - * be converted to a path relative to found rootDir entry './content/protocols/file2' (*). As a last step compiler will check all remaining - * entries in 'rootDirs', use them to build absolute path out of (*) and try to resolve module from this location. - */ - function tryLoadModuleUsingOptionalResolutionSettings(moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, - failedLookupLocations: string[], supportedExtensions: string[], state: ModuleResolutionState): string { - - if (moduleHasNonRelativeName(moduleName)) { - return tryLoadModuleUsingBaseUrl(moduleName, loader, failedLookupLocations, supportedExtensions, state); - } - else { - return tryLoadModuleUsingRootDirs(moduleName, containingDirectory, loader, failedLookupLocations, supportedExtensions, state); - } - } - - function tryLoadModuleUsingRootDirs(moduleName: string, containingDirectory: string, loader: ResolutionKindSpecificLoader, - failedLookupLocations: string[], supportedExtensions: string[], state: ModuleResolutionState): string { - - if (!state.compilerOptions.rootDirs) { - return undefined; - } - - if (state.traceEnabled) { - trace(state.host, Diagnostics.rootDirs_option_is_set_using_it_to_resolve_relative_module_name_0, moduleName); - } - - const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); - - let matchedRootDir: string; - let matchedNormalizedPrefix: string; - for (const rootDir of state.compilerOptions.rootDirs) { - // rootDirs are expected to be absolute - // in case of tsconfig.json this will happen automatically - compiler will expand relative names - // using location of tsconfig.json as base location - let normalizedRoot = normalizePath(rootDir); - if (!endsWith(normalizedRoot, directorySeparator)) { - normalizedRoot += directorySeparator; - } - const isLongestMatchingPrefix = - startsWith(candidate, normalizedRoot) && - (matchedNormalizedPrefix === undefined || matchedNormalizedPrefix.length < normalizedRoot.length); - - if (state.traceEnabled) { - trace(state.host, Diagnostics.Checking_if_0_is_the_longest_matching_prefix_for_1_2, normalizedRoot, candidate, isLongestMatchingPrefix); - } - - if (isLongestMatchingPrefix) { - matchedNormalizedPrefix = normalizedRoot; - matchedRootDir = rootDir; - } - } - if (matchedNormalizedPrefix) { - if (state.traceEnabled) { - trace(state.host, Diagnostics.Longest_matching_prefix_for_0_is_1, candidate, matchedNormalizedPrefix); - } - const suffix = candidate.substr(matchedNormalizedPrefix.length); - - // first - try to load from a initial location - if (state.traceEnabled) { - trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, matchedNormalizedPrefix, candidate); - } - const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(containingDirectory, state.host), state); - if (resolvedFileName) { - return resolvedFileName; - } - - if (state.traceEnabled) { - trace(state.host, Diagnostics.Trying_other_entries_in_rootDirs); - } - // then try to resolve using remaining entries in rootDirs - for (const rootDir of state.compilerOptions.rootDirs) { - if (rootDir === matchedRootDir) { - // skip the initially matched entry - continue; - } - const candidate = combinePaths(normalizePath(rootDir), suffix); - if (state.traceEnabled) { - trace(state.host, Diagnostics.Loading_0_from_the_root_dir_1_candidate_location_2, suffix, rootDir, candidate); - } - const baseDirectory = getDirectoryPath(candidate); - const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(baseDirectory, state.host), state); - if (resolvedFileName) { - return resolvedFileName; - } - } - if (state.traceEnabled) { - trace(state.host, Diagnostics.Module_resolution_using_rootDirs_has_failed); - } - } - return undefined; - } - - function tryLoadModuleUsingBaseUrl(moduleName: string, loader: ResolutionKindSpecificLoader, failedLookupLocations: string[], - supportedExtensions: string[], state: ModuleResolutionState): string { - - if (!state.compilerOptions.baseUrl) { - return undefined; - } - if (state.traceEnabled) { - trace(state.host, Diagnostics.baseUrl_option_is_set_to_0_using_this_value_to_resolve_non_relative_module_name_1, state.compilerOptions.baseUrl, moduleName); - } - - // string is for exact match - let matchedPattern: Pattern | string | undefined = undefined; - if (state.compilerOptions.paths) { - if (state.traceEnabled) { - trace(state.host, Diagnostics.paths_option_is_specified_looking_for_a_pattern_to_match_module_name_0, moduleName); - } - matchedPattern = matchPatternOrExact(getKeys(state.compilerOptions.paths), moduleName); - } - - if (matchedPattern) { - const matchedStar = typeof matchedPattern === "string" ? undefined : matchedText(matchedPattern, moduleName); - const matchedPatternText = typeof matchedPattern === "string" ? matchedPattern : patternText(matchedPattern); - if (state.traceEnabled) { - trace(state.host, Diagnostics.Module_name_0_matched_pattern_1, moduleName, matchedPatternText); - } - for (const subst of state.compilerOptions.paths[matchedPatternText]) { - const path = matchedStar ? subst.replace("*", matchedStar) : subst; - const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, path)); - if (state.traceEnabled) { - trace(state.host, Diagnostics.Trying_substitution_0_candidate_module_location_Colon_1, subst, path); - } - const resolvedFileName = loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); - if (resolvedFileName) { - return resolvedFileName; - } - } - return undefined; - } - else { - const candidate = normalizePath(combinePaths(state.compilerOptions.baseUrl, moduleName)); - - if (state.traceEnabled) { - trace(state.host, Diagnostics.Resolving_module_name_0_relative_to_base_url_1_2, moduleName, state.compilerOptions.baseUrl, candidate); - } - - return loader(candidate, supportedExtensions, failedLookupLocations, !directoryProbablyExists(getDirectoryPath(candidate), state.host), state); - } - } - - /** - * patternStrings contains both pattern strings (containing "*") and regular strings. - * Return an exact match if possible, or a pattern match, or undefined. - * (These are verified by verifyCompilerOptions to have 0 or 1 "*" characters.) - */ - function matchPatternOrExact(patternStrings: string[], candidate: string): string | Pattern | undefined { - const patterns: Pattern[] = []; - for (const patternString of patternStrings) { - const pattern = tryParsePattern(patternString); - if (pattern) { - patterns.push(pattern); - } - else if (patternString === candidate) { - // pattern was matched as is - no need to search further - return patternString; - } - } - - return findBestPatternMatch(patterns, _ => _, candidate); - } - - function patternText({prefix, suffix}: Pattern): string { - return `${prefix}*${suffix}`; - } - - /** - * Given that candidate matches pattern, returns the text matching the '*'. - * E.g.: matchedText(tryParsePattern("foo*baz"), "foobarbaz") === "bar" - */ - function matchedText(pattern: Pattern, candidate: string): string { - Debug.assert(isPatternMatch(pattern, candidate)); - return candidate.substr(pattern.prefix.length, candidate.length - pattern.suffix.length); - } - - /** Return the object corresponding to the best pattern to match `candidate`. */ - /* @internal */ - export function findBestPatternMatch(values: T[], getPattern: (value: T) => Pattern, candidate: string): T | undefined { - let matchedValue: T | undefined = undefined; - // use length of prefix as betterness criteria - let longestMatchPrefixLength = -1; - - for (const v of values) { - const pattern = getPattern(v); - if (isPatternMatch(pattern, candidate) && pattern.prefix.length > longestMatchPrefixLength) { - longestMatchPrefixLength = pattern.prefix.length; - matchedValue = v; - } - } - - return matchedValue; - } - - function isPatternMatch({prefix, suffix}: Pattern, candidate: string) { - return candidate.length >= prefix.length + suffix.length && - startsWith(candidate, prefix) && - endsWith(candidate, suffix); - } - - /* @internal */ - export function tryParsePattern(pattern: string): Pattern | undefined { - // This should be verified outside of here and a proper error thrown. - Debug.assert(hasZeroOrOneAsteriskCharacter(pattern)); - const indexOfStar = pattern.indexOf("*"); - return indexOfStar === -1 ? undefined : { - prefix: pattern.substr(0, indexOfStar), - suffix: pattern.substr(indexOfStar + 1) - }; - } - - export function nodeModuleNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { - const containingDirectory = getDirectoryPath(containingFile); - const supportedExtensions = getSupportedExtensions(compilerOptions); - const traceEnabled = isTraceEnabled(compilerOptions, host); - - const failedLookupLocations: string[] = []; - const state = { compilerOptions, host, traceEnabled, skipTsx: false }; - let resolvedFileName = tryLoadModuleUsingOptionalResolutionSettings(moduleName, containingDirectory, nodeLoadModuleByRelativeName, - failedLookupLocations, supportedExtensions, state); - - let isExternalLibraryImport = false; - if (!resolvedFileName) { - if (moduleHasNonRelativeName(moduleName)) { - if (traceEnabled) { - trace(host, Diagnostics.Loading_module_0_from_node_modules_folder, moduleName); - } - resolvedFileName = loadModuleFromNodeModules(moduleName, containingDirectory, failedLookupLocations, state); - isExternalLibraryImport = resolvedFileName !== undefined; - } - else { - const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); - resolvedFileName = nodeLoadModuleByRelativeName(candidate, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); - } - } - - if (resolvedFileName && host.realpath) { - const originalFileName = resolvedFileName; - resolvedFileName = normalizePath(host.realpath(resolvedFileName)); - if (traceEnabled) { - trace(host, Diagnostics.Resolving_real_path_for_0_result_1, originalFileName, resolvedFileName); - } - } - - return createResolvedModule(resolvedFileName, isExternalLibraryImport, failedLookupLocations); - } - - function nodeLoadModuleByRelativeName(candidate: string, supportedExtensions: string[], failedLookupLocations: string[], - onlyRecordFailures: boolean, state: ModuleResolutionState): string { - - if (state.traceEnabled) { - trace(state.host, Diagnostics.Loading_module_as_file_Slash_folder_candidate_module_location_0, candidate); - } - - const resolvedFileName = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, onlyRecordFailures, state); - - return resolvedFileName || loadNodeModuleFromDirectory(supportedExtensions, candidate, failedLookupLocations, onlyRecordFailures, state); - } - - /* @internal */ - export function directoryProbablyExists(directoryName: string, host: { directoryExists?: (directoryName: string) => boolean }): boolean { - // if host does not support 'directoryExists' assume that directory will exist - return !host.directoryExists || host.directoryExists(directoryName); - } - - /** - * @param {boolean} onlyRecordFailures - if true then function won't try to actually load files but instead record all attempts as failures. This flag is necessary - * in cases when we know upfront that all load attempts will fail (because containing folder does not exists) however we still need to record all failed lookup locations. - */ - function loadModuleFromFile(candidate: string, extensions: string[], failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string { - // First try to keep/add an extension: importing "./foo.ts" can be matched by a file "./foo.ts", and "./foo" by "./foo.d.ts" - const resolvedByAddingOrKeepingExtension = loadModuleFromFileWorker(candidate, extensions, failedLookupLocation, onlyRecordFailures, state); - if (resolvedByAddingOrKeepingExtension) { - return resolvedByAddingOrKeepingExtension; - } - // Then try stripping a ".js" or ".jsx" extension and replacing it with a TypeScript one, e.g. "./foo.js" can be matched by "./foo.ts" or "./foo.d.ts" - if (hasJavaScriptFileExtension(candidate)) { - const extensionless = removeFileExtension(candidate); - if (state.traceEnabled) { - const extension = candidate.substring(extensionless.length); - trace(state.host, Diagnostics.File_name_0_has_a_1_extension_stripping_it, candidate, extension); - } - return loadModuleFromFileWorker(extensionless, extensions, failedLookupLocation, onlyRecordFailures, state); - } - } - - function loadModuleFromFileWorker(candidate: string, extensions: string[], failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string { - if (!onlyRecordFailures) { - // check if containing folder exists - if it doesn't then just record failures for all supported extensions without disk probing - const directory = getDirectoryPath(candidate); - if (directory) { - onlyRecordFailures = !directoryProbablyExists(directory, state.host); - } - } - return forEach(extensions, tryLoad); - - function tryLoad(ext: string): string { - if (state.skipTsx && isJsxOrTsxExtension(ext)) { - return undefined; - } - const fileName = fileExtensionIs(candidate, ext) ? candidate : candidate + ext; - if (!onlyRecordFailures && state.host.fileExists(fileName)) { - if (state.traceEnabled) { - trace(state.host, Diagnostics.File_0_exist_use_it_as_a_name_resolution_result, fileName); - } - return fileName; - } - else { - if (state.traceEnabled) { - trace(state.host, Diagnostics.File_0_does_not_exist, fileName); - } - failedLookupLocation.push(fileName); - return undefined; - } - } - } - - function loadNodeModuleFromDirectory(extensions: string[], candidate: string, failedLookupLocation: string[], onlyRecordFailures: boolean, state: ModuleResolutionState): string { - const packageJsonPath = combinePaths(candidate, "package.json"); - const directoryExists = !onlyRecordFailures && directoryProbablyExists(candidate, state.host); - if (directoryExists && state.host.fileExists(packageJsonPath)) { - if (state.traceEnabled) { - trace(state.host, Diagnostics.Found_package_json_at_0, packageJsonPath); - } - const typesFile = tryReadTypesSection(packageJsonPath, candidate, state); - if (typesFile) { - const result = loadModuleFromFile(typesFile, extensions, failedLookupLocation, !directoryProbablyExists(getDirectoryPath(typesFile), state.host), state); - if (result) { - return result; - } - } - else { - if (state.traceEnabled) { - trace(state.host, Diagnostics.package_json_does_not_have_types_field); - } - } - } - else { - if (state.traceEnabled) { - trace(state.host, Diagnostics.File_0_does_not_exist, packageJsonPath); - } - // record package json as one of failed lookup locations - in the future if this file will appear it will invalidate resolution results - failedLookupLocation.push(packageJsonPath); - } - - return loadModuleFromFile(combinePaths(candidate, "index"), extensions, failedLookupLocation, !directoryExists, state); - } - - function loadModuleFromNodeModulesFolder(moduleName: string, directory: string, failedLookupLocations: string[], state: ModuleResolutionState): string { - const nodeModulesFolder = combinePaths(directory, "node_modules"); - const nodeModulesFolderExists = directoryProbablyExists(nodeModulesFolder, state.host); - const candidate = normalizePath(combinePaths(nodeModulesFolder, moduleName)); - const supportedExtensions = getSupportedExtensions(state.compilerOptions); - - let result = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, !nodeModulesFolderExists, state); - if (result) { - return result; - } - result = loadNodeModuleFromDirectory(supportedExtensions, candidate, failedLookupLocations, !nodeModulesFolderExists, state); - if (result) { - return result; - } - } - - function loadModuleFromNodeModules(moduleName: string, directory: string, failedLookupLocations: string[], state: ModuleResolutionState): string { - directory = normalizeSlashes(directory); - while (true) { - const baseName = getBaseFileName(directory); - if (baseName !== "node_modules") { - // Try to load source from the package - const packageResult = loadModuleFromNodeModulesFolder(moduleName, directory, failedLookupLocations, state); - if (packageResult && hasTypeScriptFileExtension(packageResult)) { - // Always prefer a TypeScript (.ts, .tsx, .d.ts) file shipped with the package - return packageResult; - } - else { - // Else prefer a types package over non-TypeScript results (e.g. JavaScript files) - const typesResult = loadModuleFromNodeModulesFolder(combinePaths("@types", moduleName), directory, failedLookupLocations, state); - if (typesResult || packageResult) { - return typesResult || packageResult; - } - } - } - - const parentPath = getDirectoryPath(directory); - if (parentPath === directory) { - break; - } - - directory = parentPath; - } - return undefined; - } - - export function classicNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations { - const traceEnabled = isTraceEnabled(compilerOptions, host); - const state = { compilerOptions, host, traceEnabled, skipTsx: !compilerOptions.jsx }; - const failedLookupLocations: string[] = []; - const supportedExtensions = getSupportedExtensions(compilerOptions); - let containingDirectory = getDirectoryPath(containingFile); - - const resolvedFileName = tryLoadModuleUsingOptionalResolutionSettings(moduleName, containingDirectory, loadModuleFromFile, failedLookupLocations, supportedExtensions, state); - if (resolvedFileName) { - return createResolvedModule(resolvedFileName, /*isExternalLibraryImport*/false, failedLookupLocations); - } - - let referencedSourceFile: string; - if (moduleHasNonRelativeName(moduleName)) { - while (true) { - const searchName = normalizePath(combinePaths(containingDirectory, moduleName)); - referencedSourceFile = loadModuleFromFile(searchName, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); - if (referencedSourceFile) { - break; - } - const parentPath = getDirectoryPath(containingDirectory); - if (parentPath === containingDirectory) { - break; - } - containingDirectory = parentPath; - } - } - else { - const candidate = normalizePath(combinePaths(containingDirectory, moduleName)); - referencedSourceFile = loadModuleFromFile(candidate, supportedExtensions, failedLookupLocations, /*onlyRecordFailures*/ false, state); - } - - - return referencedSourceFile - ? { resolvedModule: { resolvedFileName: referencedSourceFile }, failedLookupLocations } - : { resolvedModule: undefined, failedLookupLocations }; - } - - /* @internal */ - export const defaultInitCompilerOptions: CompilerOptions = { - module: ModuleKind.CommonJS, - target: ScriptTarget.ES5, - noImplicitAny: false, - sourceMap: false, - }; - interface OutputFingerprint { hash: string; byteOrderMark: boolean; @@ -846,7 +203,7 @@ namespace ts { } export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost { - const existingDirectories: Map = {}; + const existingDirectories = createMap(); function getCanonicalFileName(fileName: string): string { // if underlying system can distinguish between two files whose names differs only in cases then file name already in canonical form. @@ -860,9 +217,10 @@ namespace ts { function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void): SourceFile { let text: string; try { - const start = performance.mark(); + performance.mark("beforeIORead"); text = sys.readFile(fileName, options.charset); - performance.measure("I/O Read", start); + performance.mark("afterIORead"); + performance.measure("I/O Read", "beforeIORead", "afterIORead"); } catch (e) { if (onError) { @@ -877,7 +235,7 @@ namespace ts { } function directoryExists(directoryPath: string): boolean { - if (hasProperty(existingDirectories, directoryPath)) { + if (directoryPath in existingDirectories) { return true; } if (sys.directoryExists(directoryPath)) { @@ -899,13 +257,13 @@ namespace ts { function writeFileIfUpdated(fileName: string, data: string, writeByteOrderMark: boolean): void { if (!outputFingerprints) { - outputFingerprints = {}; + outputFingerprints = createMap(); } const hash = sys.createHash(data); const mtimeBefore = sys.getModifiedTime(fileName); - if (mtimeBefore && hasProperty(outputFingerprints, fileName)) { + if (mtimeBefore && fileName in outputFingerprints) { const fingerprint = outputFingerprints[fileName]; // If output has not been changed, and the file has no external modification @@ -929,7 +287,7 @@ namespace ts { function writeFile(fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void) { try { - const start = performance.mark(); + performance.mark("beforeIOWrite"); ensureDirectoriesExist(getDirectoryPath(normalizePath(fileName))); if (isWatchSet(options) && sys.createHash && sys.getModifiedTime) { @@ -939,7 +297,8 @@ namespace ts { sys.writeFile(fileName, data, writeByteOrderMark); } - performance.measure("I/O Write", start); + performance.mark("afterIOWrite"); + performance.measure("I/O Write", "beforeIOWrite", "afterIOWrite"); } catch (e) { if (onError) { @@ -1040,47 +399,47 @@ namespace ts { return []; } const resolutions: T[] = []; - const cache: Map = {}; + const cache = createMap(); for (const name of names) { - let result: T; - if (hasProperty(cache, name)) { - result = cache[name]; - } - else { - result = loader(name, containingFile); - cache[name] = result; - } + const result = name in cache + ? cache[name] + : cache[name] = loader(name, containingFile); resolutions.push(result); } return resolutions; } - function getInferredTypesRoot(options: CompilerOptions, rootFiles: string[], host: CompilerHost) { - return computeCommonSourceDirectoryOfFilenames(rootFiles, host.getCurrentDirectory(), f => host.getCanonicalFileName(f)); - } - /** - * Given a set of options and a set of root files, returns the set of type directive names + * Given a set of options, returns the set of type directive names * that should be included for this program automatically. * This list could either come from the config file, * or from enumerating the types root + initial secondary types lookup location. * More type directives might appear in the program later as a result of loading actual source files; * this list is only the set of defaults that are implicitly included. */ - export function getAutomaticTypeDirectiveNames(options: CompilerOptions, rootFiles: string[], host: CompilerHost): string[] { + export function getAutomaticTypeDirectiveNames(options: CompilerOptions, host: ModuleResolutionHost): string[] { // Use explicit type list from tsconfig.json if (options.types) { return options.types; } // Walk the primary type lookup locations - let result: string[] = []; + const result: string[] = []; if (host.directoryExists && host.getDirectories) { const typeRoots = getEffectiveTypeRoots(options, host); if (typeRoots) { for (const root of typeRoots) { if (host.directoryExists(root)) { - result = result.concat(host.getDirectories(root)); + for (const typeDirectivePath of host.getDirectories(root)) { + const normalized = normalizePath(typeDirectivePath); + const packageJsonPath = pathToPackageJson(combinePaths(root, normalized)); + // tslint:disable-next-line:no-null-keyword + const isNotNeededPackage = host.fileExists(packageJsonPath) && readJson(packageJsonPath, host).typings === null; + if (!isNotNeededPackage) { + // Return just the type directive names + result.push(getBaseFileName(normalized)); + } + } } } } @@ -1096,7 +455,7 @@ namespace ts { let noDiagnosticsTypeChecker: TypeChecker; let classifiableNames: Map; - let resolvedTypeReferenceDirectives: Map = {}; + let resolvedTypeReferenceDirectives = createMap(); let fileProcessingDiagnostics = createDiagnosticCollection(); // The below settings are to track if a .js file should be add to the program if loaded via searching under node_modules. @@ -1111,12 +470,12 @@ namespace ts { // If a module has some of its imports skipped due to being at the depth limit under node_modules, then track // this, as it may be imported at a shallower depth later, and then it will need its skipped imports processed. - const modulesWithElidedImports: Map = {}; + const modulesWithElidedImports = createMap(); // Track source files that are source files found by searching under node_modules, as these shouldn't be compiled. - const sourceFilesFoundSearchingNodeModules: Map = {}; + const sourceFilesFoundSearchingNodeModules = createMap(); - const start = performance.mark(); + performance.mark("beforeProgram"); host = host || createCompilerHost(options); @@ -1155,11 +514,11 @@ namespace ts { forEach(rootNames, name => processRootFile(name, /*isDefaultLib*/ false)); // load type declarations specified via 'types' argument or implicitly from types/ and node_modules/@types folders - const typeReferences: string[] = getAutomaticTypeDirectiveNames(options, rootNames, host); + const typeReferences: string[] = getAutomaticTypeDirectiveNames(options, host); if (typeReferences) { - const inferredRoot = getInferredTypesRoot(options, rootNames, host); - const containingFilename = combinePaths(inferredRoot, "__inferred type names__.ts"); + // This containingFilename needs to match with the one used in managed-side + const containingFilename = combinePaths(host.getCurrentDirectory(), "__inferred type names__.ts"); const resolutions = resolveTypeReferenceDirectiveNamesWorker(typeReferences, containingFilename); for (let i = 0; i < typeReferences.length; i++) { processTypeReferenceDirective(typeReferences[i], resolutions[i]); @@ -1215,8 +574,8 @@ namespace ts { }; verifyCompilerOptions(); - - performance.measure("Program", start); + performance.mark("afterProgram"); + performance.measure("Program", "beforeProgram", "afterProgram"); return program; @@ -1243,10 +602,10 @@ namespace ts { if (!classifiableNames) { // Initialize a checker so that all our files are bound. getTypeChecker(); - classifiableNames = {}; + classifiableNames = createMap(); for (const sourceFile of files) { - copyMap(sourceFile.classifiableNames, classifiableNames); + copyProperties(sourceFile.classifiableNames, classifiableNames); } } @@ -1274,7 +633,7 @@ namespace ts { (oldOptions.maxNodeModuleJsDepth !== options.maxNodeModuleJsDepth) || !arrayIsEqualTo(oldOptions.typeRoots, oldOptions.typeRoots) || !arrayIsEqualTo(oldOptions.rootDirs, options.rootDirs) || - !mapIsEqualTo(oldOptions.paths, options.paths)) { + !equalOwnProperties(oldOptions.paths, options.paths)) { return false; } @@ -1396,7 +755,7 @@ namespace ts { getSourceFile: program.getSourceFile, getSourceFileByPath: program.getSourceFileByPath, getSourceFiles: program.getSourceFiles, - isSourceFileFromExternalLibrary: (file: SourceFile) => !!lookUp(sourceFilesFoundSearchingNodeModules, file.path), + isSourceFileFromExternalLibrary: (file: SourceFile) => !!sourceFilesFoundSearchingNodeModules[file.path], writeFile: writeFileCallback || ( (fileName, data, writeByteOrderMark, onError, sourceFiles) => host.writeFile(fileName, data, writeByteOrderMark, onError, sourceFiles)), isEmitBlocked, @@ -1463,7 +822,7 @@ namespace ts { // checked is to not pass the file to getEmitResolver. const emitResolver = getDiagnosticsProducingTypeChecker().getEmitResolver((options.outFile || options.out) ? undefined : sourceFile); - const start = performance.mark(); + performance.mark("beforeEmit"); const emitResult = emitFiles( emitResolver, @@ -1471,7 +830,8 @@ namespace ts { sourceFile, emitOnlyDtsFiles); - performance.measure("Emit", start); + performance.mark("afterEmit"); + performance.measure("Emit", "beforeEmit", "afterEmit"); return emitResult; } @@ -1938,7 +1298,7 @@ namespace ts { // If the file was previously found via a node_modules search, but is now being processed as a root file, // then everything it sucks in may also be marked incorrectly, and needs to be checked again. - if (file && lookUp(sourceFilesFoundSearchingNodeModules, file.path) && currentNodeModulesDepth == 0) { + if (file && sourceFilesFoundSearchingNodeModules[file.path] && currentNodeModulesDepth == 0) { sourceFilesFoundSearchingNodeModules[file.path] = false; if (!options.noResolve) { processReferencedFiles(file, getDirectoryPath(fileName), isDefaultLib); @@ -1949,7 +1309,7 @@ namespace ts { processImportedModules(file, getDirectoryPath(fileName)); } // See if we need to reprocess the imports due to prior skipped imports - else if (file && lookUp(modulesWithElidedImports, file.path)) { + else if (file && modulesWithElidedImports[file.path]) { if (currentNodeModulesDepth < maxNodeModulesJsDepth) { modulesWithElidedImports[file.path] = false; processImportedModules(file, getDirectoryPath(fileName)); @@ -2016,15 +1376,17 @@ namespace ts { } function processTypeReferenceDirectives(file: SourceFile) { - const typeDirectives = map(file.typeReferenceDirectives, l => l.fileName); + // We lower-case all type references because npm automatically lowercases all packages. See GH#9824. + const typeDirectives = map(file.typeReferenceDirectives, ref => ref.fileName.toLocaleLowerCase()); const resolutions = resolveTypeReferenceDirectiveNamesWorker(typeDirectives, file.fileName); for (let i = 0; i < typeDirectives.length; i++) { const ref = file.typeReferenceDirectives[i]; const resolvedTypeReferenceDirective = resolutions[i]; // store resolved type directive on the file - setResolvedTypeReferenceDirective(file, ref.fileName, resolvedTypeReferenceDirective); - processTypeReferenceDirective(ref.fileName, resolvedTypeReferenceDirective, file, ref.pos, ref.end); + const fileName = ref.fileName.toLocaleLowerCase(); + setResolvedTypeReferenceDirective(file, fileName, resolvedTypeReferenceDirective); + processTypeReferenceDirective(fileName, resolvedTypeReferenceDirective, file, ref.pos, ref.end); } } @@ -2049,7 +1411,7 @@ namespace ts { const otherFileText = host.readFile(resolvedTypeReferenceDirective.resolvedFileName); if (otherFileText !== getSourceFile(previousResolution.resolvedFileName).text) { fileProcessingDiagnostics.add(createDiagnostic(refFile, refPos, refEnd, - Diagnostics.Conflicting_library_definitions_for_0_found_at_1_and_2_Copy_the_correct_file_to_the_typings_folder_to_resolve_this_conflict, + Diagnostics.Conflicting_definitions_for_0_found_at_1_and_2_Consider_installing_a_specific_version_of_this_library_to_resolve_the_conflict, typeReferenceDirective, resolvedTypeReferenceDirective.resolvedFileName, previousResolution.resolvedFileName @@ -2089,7 +1451,7 @@ namespace ts { function processImportedModules(file: SourceFile, basePath: string) { collectExternalModuleReferences(file); if (file.imports.length || file.moduleAugmentations.length) { - file.resolvedModules = {}; + file.resolvedModules = createMap(); const moduleNames = map(concatenate(file.imports, file.moduleAugmentations), getTextOfLiteral); const resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(file.fileName, currentDirectory)); for (let i = 0; i < moduleNames.length; i++) { diff --git a/src/compiler/scanner.ts b/src/compiler/scanner.ts index 6dd84298008ce..c1431ca23ed50 100644 --- a/src/compiler/scanner.ts +++ b/src/compiler/scanner.ts @@ -55,7 +55,7 @@ namespace ts { tryScan(callback: () => T): T; } - const textToToken: Map = { + const textToToken = createMap({ "abstract": SyntaxKind.AbstractKeyword, "any": SyntaxKind.AnyKeyword, "as": SyntaxKind.AsKeyword, @@ -179,7 +179,7 @@ namespace ts { "|=": SyntaxKind.BarEqualsToken, "^=": SyntaxKind.CaretEqualsToken, "@": SyntaxKind.AtToken, - }; + }); /* As per ECMAScript Language Specification 3th Edition, Section 7.6: Identifiers @@ -274,9 +274,7 @@ namespace ts { function makeReverseMap(source: Map): string[] { const result: string[] = []; for (const name in source) { - if (source.hasOwnProperty(name)) { - result[source[name]] = name; - } + result[source[name]] = name; } return result; } diff --git a/src/compiler/sourcemap.ts b/src/compiler/sourcemap.ts index 2d8c36a3d0252..4046867faf108 100644 --- a/src/compiler/sourcemap.ts +++ b/src/compiler/sourcemap.ts @@ -46,6 +46,7 @@ namespace ts { export function createSourceMapWriter(host: EmitHost, writer: EmitTextWriter): SourceMapWriter { const compilerOptions = host.getCompilerOptions(); + const extendedDiagnostics = compilerOptions.extendedDiagnostics; let currentSourceFile: SourceFile; let sourceMapDir: string; // The directory in which sourcemap will be let stopOverridingSpan = false; @@ -240,7 +241,9 @@ namespace ts { return; } - const start = performance.mark(); + if (extendedDiagnostics) { + performance.mark("beforeSourcemap"); + } const sourceLinePos = getLineAndCharacterOfPosition(currentSourceFile, pos); @@ -282,7 +285,10 @@ namespace ts { updateLastEncodedAndRecordedSpans(); - performance.measure("Source Map", start); + if (extendedDiagnostics) { + performance.mark("afterSourcemap"); + performance.measure("Source Map", "beforeSourcemap", "afterSourcemap"); + } } function getStartPos(range: TextRange) { diff --git a/src/compiler/sys.ts b/src/compiler/sys.ts index 29ae2c60af165..350d75429b754 100644 --- a/src/compiler/sys.ts +++ b/src/compiler/sys.ts @@ -233,15 +233,15 @@ namespace ts { const useNonPollingWatchers = process.env["TSC_NONPOLLING_WATCHER"]; function createWatchedFileSet() { - const dirWatchers: Map = {}; + const dirWatchers = createMap(); // One file can have multiple watchers - const fileWatcherCallbacks: Map = {}; + const fileWatcherCallbacks = createMap(); return { addFile, removeFile }; function reduceDirWatcherRefCountForFile(fileName: string) { const dirName = getDirectoryPath(fileName); - if (hasProperty(dirWatchers, dirName)) { - const watcher = dirWatchers[dirName]; + const watcher = dirWatchers[dirName]; + if (watcher) { watcher.referenceCount -= 1; if (watcher.referenceCount <= 0) { watcher.close(); @@ -251,13 +251,12 @@ namespace ts { } function addDirWatcher(dirPath: string): void { - if (hasProperty(dirWatchers, dirPath)) { - const watcher = dirWatchers[dirPath]; + let watcher = dirWatchers[dirPath]; + if (watcher) { watcher.referenceCount += 1; return; } - - const watcher: DirectoryWatcher = _fs.watch( + watcher = _fs.watch( dirPath, { persistent: true }, (eventName: string, relativeFileName: string) => fileEventHandler(eventName, relativeFileName, dirPath) @@ -268,12 +267,7 @@ namespace ts { } function addFileWatcherCallback(filePath: string, callback: FileWatcherCallback): void { - if (hasProperty(fileWatcherCallbacks, filePath)) { - fileWatcherCallbacks[filePath].push(callback); - } - else { - fileWatcherCallbacks[filePath] = [callback]; - } + (fileWatcherCallbacks[filePath] || (fileWatcherCallbacks[filePath] = [])).push(callback); } function addFile(fileName: string, callback: FileWatcherCallback): WatchedFile { @@ -289,8 +283,9 @@ namespace ts { } function removeFileWatcherCallback(filePath: string, callback: FileWatcherCallback) { - if (hasProperty(fileWatcherCallbacks, filePath)) { - const newCallbacks = copyListRemovingItem(callback, fileWatcherCallbacks[filePath]); + const callbacks = fileWatcherCallbacks[filePath]; + if (callbacks) { + const newCallbacks = copyListRemovingItem(callback, callbacks); if (newCallbacks.length === 0) { delete fileWatcherCallbacks[filePath]; } @@ -306,7 +301,7 @@ namespace ts { ? undefined : ts.getNormalizedAbsolutePath(relativeFileName, baseDirPath); // Some applications save a working file via rename operations - if ((eventName === "change" || eventName === "rename") && hasProperty(fileWatcherCallbacks, fileName)) { + if ((eventName === "change" || eventName === "rename") && fileWatcherCallbacks[fileName]) { for (const fileCallback of fileWatcherCallbacks[fileName]) { fileCallback(fileName); } diff --git a/src/compiler/tsc.ts b/src/compiler/tsc.ts index 10538d0c009ee..62b58609086e6 100644 --- a/src/compiler/tsc.ts +++ b/src/compiler/tsc.ts @@ -122,11 +122,11 @@ namespace ts { const gutterSeparator = " "; const resetEscapeSequence = "\u001b[0m"; const ellipsis = "..."; - const categoryFormatMap: Map = { + const categoryFormatMap = createMap({ [DiagnosticCategory.Warning]: yellowForegroundEscapeSequence, [DiagnosticCategory.Error]: redForegroundEscapeSequence, [DiagnosticCategory.Message]: blueForegroundEscapeSequence, - }; + }); function formatAndReset(text: string, formatStyle: string) { return formatStyle + text + resetEscapeSequence; @@ -432,7 +432,7 @@ namespace ts { } // reset the cache of existing files - cachedExistingFiles = {}; + cachedExistingFiles = createMap(); const compileResult = compile(rootFileNames, compilerOptions, compilerHost); @@ -445,10 +445,9 @@ namespace ts { } function cachedFileExists(fileName: string): boolean { - if (hasProperty(cachedExistingFiles, fileName)) { - return cachedExistingFiles[fileName]; - } - return cachedExistingFiles[fileName] = hostFileExists(fileName); + return fileName in cachedExistingFiles + ? cachedExistingFiles[fileName] + : cachedExistingFiles[fileName] = hostFileExists(fileName); } function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void) { @@ -676,7 +675,7 @@ namespace ts { const usageColumn: string[] = []; // Things like "-d, --declaration" go in here. const descriptionColumn: string[] = []; - const optionsDescriptionMap: Map = {}; // Map between option.description and list of option.type if it is a kind + const optionsDescriptionMap = createMap(); // Map between option.description and list of option.type if it is a kind for (let i = 0; i < optsList.length; i++) { const option = optsList[i]; @@ -704,9 +703,10 @@ namespace ts { description = getDiagnosticText(option.description); const options: string[] = []; const element = (option).element; - forEachKey(>element.type, key => { + const typeMap = >element.type; + for (const key in typeMap) { options.push(`'${key}'`); - }); + } optionsDescriptionMap[description] = options; } else { @@ -763,68 +763,11 @@ namespace ts { reportDiagnostic(createCompilerDiagnostic(Diagnostics.A_tsconfig_json_file_is_already_defined_at_Colon_0, file), /* host */ undefined); } else { - const compilerOptions = extend(options, defaultInitCompilerOptions); - const configurations: any = { - compilerOptions: serializeCompilerOptions(compilerOptions) - }; - - if (fileNames && fileNames.length) { - // only set the files property if we have at least one file - configurations.files = fileNames; - } - else { - configurations.exclude = ["node_modules"]; - if (compilerOptions.outDir) { - configurations.exclude.push(compilerOptions.outDir); - } - } - - sys.writeFile(file, JSON.stringify(configurations, undefined, 4)); + sys.writeFile(file, JSON.stringify(generateTSConfig(options, fileNames), undefined, 4)); reportDiagnostic(createCompilerDiagnostic(Diagnostics.Successfully_created_a_tsconfig_json_file), /* host */ undefined); } return; - - function serializeCompilerOptions(options: CompilerOptions): Map { - const result: Map = {}; - const optionsNameMap = getOptionNameMap().optionNameMap; - - for (const name in options) { - if (hasProperty(options, name)) { - // tsconfig only options cannot be specified via command line, - // so we can assume that only types that can appear here string | number | boolean - const value = options[name]; - switch (name) { - case "init": - case "watch": - case "version": - case "help": - case "project": - break; - default: - let optionDefinition = optionsNameMap[name.toLowerCase()]; - if (optionDefinition) { - if (typeof optionDefinition.type === "string") { - // string, number or boolean - result[name] = value; - } - else { - // Enum - const typeMap = >optionDefinition.type; - for (const key in typeMap) { - if (hasProperty(typeMap, key)) { - if (typeMap[key] === value) - result[name] = key; - } - } - } - } - break; - } - } - } - return result; - } } } diff --git a/src/compiler/types.ts b/src/compiler/types.ts index f2e95458032d1..202f45eb15388 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -1,9 +1,13 @@ - namespace ts { - export interface Map { + + export interface MapLike { [index: string]: T; } + export interface Map extends MapLike { + __mapBrand: any; + } + // branded string type used to store absolute, normalized and canonicalized paths // arbitrary file name can be converted to Path via toPath function export type Path = string & { __pathBrand: any }; @@ -347,6 +351,7 @@ namespace ts { JSDocTypedefTag, JSDocPropertyTag, JSDocTypeLiteral, + JSDocLiteralType, // Synthesized list SyntaxList, @@ -377,9 +382,9 @@ namespace ts { LastBinaryOperator = CaretEqualsToken, FirstNode = QualifiedName, FirstJSDocNode = JSDocTypeExpression, - LastJSDocNode = JSDocTypeLiteral, + LastJSDocNode = JSDocLiteralType, FirstJSDocTagNode = JSDocComment, - LastJSDocTagNode = JSDocTypeLiteral + LastJSDocTagNode = JSDocLiteralType } export const enum NodeFlags { @@ -983,13 +988,19 @@ namespace ts { multiLine?: boolean; } + export type EntityNameExpression = Identifier | PropertyAccessEntityNameExpression; + export type EntityNameOrEntityNameExpression = EntityName | EntityNameExpression; + // @kind(SyntaxKind.PropertyAccessExpression) export interface PropertyAccessExpression extends MemberExpression, Declaration { expression: LeftHandSideExpression; name: Identifier; } - - export type IdentifierOrPropertyAccess = Identifier | PropertyAccessExpression; + /** Brand for a PropertyAccessExpression which, like a QualifiedName, consists of a sequence of identifiers separated by dots. */ + export interface PropertyAccessEntityNameExpression extends PropertyAccessExpression { + _propertyAccessExpressionLikeQualifiedNameBrand?: any; + expression: EntityNameExpression; + } // @kind(SyntaxKind.ElementAccessExpression) export interface ElementAccessExpression extends MemberExpression { @@ -1493,6 +1504,10 @@ namespace ts { type: JSDocType; } + export interface JSDocLiteralType extends JSDocType { + literal: LiteralTypeNode; + } + export type JSDocTypeReferencingNode = JSDocThisType | JSDocConstructorType | JSDocVariadicType | JSDocOptionalType | JSDocNullableType | JSDocNonNullableType; // @kind(SyntaxKind.JSDocRecordMember) @@ -1607,6 +1622,16 @@ namespace ts { antecedent: FlowNode; } + export type FlowType = Type | IncompleteType; + + // Incomplete types occur during control flow analysis of loops. An IncompleteType + // is distinguished from a regular type by a flags value of zero. Incomplete type + // objects are internal to the getFlowTypeOfRefecence function and never escape it. + export interface IncompleteType { + flags: TypeFlags; // No flags set + type: Type; // The type marked incomplete + } + export interface AmdDependency { path: string; name: string; @@ -2023,7 +2048,7 @@ namespace ts { writeTypeOfExpression(expr: Expression, enclosingDeclaration: Node, flags: TypeFormatFlags, writer: SymbolWriter): void; writeBaseConstructorTypeOfClass(node: ClassLikeDeclaration, enclosingDeclaration: Node, flags: TypeFormatFlags, writer: SymbolWriter): void; isSymbolAccessible(symbol: Symbol, enclosingDeclaration: Node, meaning: SymbolFlags): SymbolAccessibilityResult; - isEntityNameVisible(entityName: EntityName | Expression, enclosingDeclaration: Node): SymbolVisibilityResult; + isEntityNameVisible(entityName: EntityNameOrEntityNameExpression, enclosingDeclaration: Node): SymbolVisibilityResult; // Returns the constant value this property access resolves to, or 'undefined' for a non-constant getConstantValue(node: EnumMember | PropertyAccessExpression | ElementAccessExpression): number; getReferencedValueDeclaration(reference: Identifier): Declaration; @@ -2032,7 +2057,7 @@ namespace ts { moduleExportsSomeValue(moduleReferenceExpression: Expression): boolean; isArgumentsLocalBinding(node: Identifier): boolean; getExternalModuleFileFromDeclaration(declaration: ImportEqualsDeclaration | ImportDeclaration | ExportDeclaration | ModuleDeclaration): SourceFile; - getTypeReferenceDirectivesForEntityName(name: EntityName | PropertyAccessExpression): string[]; + getTypeReferenceDirectivesForEntityName(name: EntityNameOrEntityNameExpression): string[]; getTypeReferenceDirectivesForSymbol(symbol: Symbol, meaning?: SymbolFlags): string[]; } @@ -2139,6 +2164,8 @@ namespace ts { /* @internal */ exportSymbol?: Symbol; // Exported symbol associated with this symbol /* @internal */ constEnumOnlyModule?: boolean; // True if module contains only const enums or other modules with only const enums /* @internal */ isReferenced?: boolean; // True if the symbol is referenced elsewhere + /* @internal */ isReplaceableByMethod?: boolean; // Can this Javascript class property be replaced by a method symbol? + /* @internal */ isAssigned?: boolean; // True if the symbol is a parameter with assignments } /* @internal */ @@ -2152,6 +2179,8 @@ namespace ts { mapper?: TypeMapper; // Type mapper for instantiation alias referenced?: boolean; // True if alias symbol has been referenced as a value containingType?: UnionOrIntersectionType; // Containing union or intersection type for synthetic property + hasCommonType?: boolean; // True if constituents of synthetic property all have same type + isDiscriminantProperty?: boolean; // True if discriminant synthetic property resolvedExports?: SymbolTable; // Resolved exports of module exportsChecked?: boolean; // True if exports of external module have been checked isDeclarationWithCollidingName?: boolean; // True if symbol is block scoped redeclaration @@ -2162,9 +2191,7 @@ namespace ts { /* @internal */ export interface TransientSymbol extends Symbol, SymbolLinks { } - export interface SymbolTable { - [index: string]: Symbol; - } + export type SymbolTable = Map; /** Represents a "prefix*suffix" pattern. */ /* @internal */ @@ -2191,23 +2218,24 @@ namespace ts { AsyncMethodWithSuper = 0x00000800, // An async method that reads a value from a member of 'super'. AsyncMethodWithSuperBinding = 0x00001000, // An async method that assigns a value to a member of 'super'. CaptureArguments = 0x00002000, // Lexical 'arguments' used in body (for async functions) - EnumValuesComputed = 0x00004000, // Values for enum members have been computed, and any errors have been reported for them. - LexicalModuleMergesWithClass = 0x00008000, // Instantiated lexical module declaration is merged with a previous class declaration. - LoopWithCapturedBlockScopedBinding = 0x00010000, // Loop that contains block scoped variable captured in closure - CapturedBlockScopedBinding = 0x00020000, // Block-scoped binding that is captured in some function - BlockScopedBindingInLoop = 0x00040000, // Block-scoped binding with declaration nested inside iteration statement - ClassWithBodyScopedClassBinding = 0x00080000, // Decorated class that contains a binding to itself inside of the class body. - BodyScopedClassBinding = 0x00100000, // Binding to a decorated class inside of the class's body. - NeedsLoopOutParameter = 0x00200000, // Block scoped binding whose value should be explicitly copied outside of the converted loop + EnumValuesComputed = 0x00004000, // Values for enum members have been computed, and any errors have been reported for them. + LexicalModuleMergesWithClass = 0x00008000, // Instantiated lexical module declaration is merged with a previous class declaration. + LoopWithCapturedBlockScopedBinding = 0x00010000, // Loop that contains block scoped variable captured in closure + CapturedBlockScopedBinding = 0x00020000, // Block-scoped binding that is captured in some function + BlockScopedBindingInLoop = 0x00040000, // Block-scoped binding with declaration nested inside iteration statement + ClassWithBodyScopedClassBinding = 0x00080000, // Decorated class that contains a binding to itself inside of the class body. + BodyScopedClassBinding = 0x00100000, // Binding to a decorated class inside of the class's body. + NeedsLoopOutParameter = 0x00200000, // Block scoped binding whose value should be explicitly copied outside of the converted loop + AssignmentsMarked = 0x00400000, // Parameter assignments have been marked } /* @internal */ export interface NodeLinks { + flags?: NodeCheckFlags; // Set of flags specific to Node resolvedType?: Type; // Cached type of type node resolvedSignature?: Signature; // Cached signature of signature node or call expression resolvedSymbol?: Symbol; // Cached name resolution result resolvedIndexInfo?: IndexInfo; // Cached indexing info resolution result - flags?: NodeCheckFlags; // Set of flags specific to Node enumMemberValue?: number; // Constant value of enum member isVisible?: boolean; // Is this node visible hasReportedStatementInAmbientContext?: boolean; // Cache boolean if we report statements in ambient context @@ -2361,6 +2389,7 @@ namespace ts { export interface TupleType extends ObjectType { elementTypes: Type[]; // Element types + thisType?: Type; // This-type of tuple (only needed for tuples that are constraints of type parameters) } export interface UnionOrIntersectionType extends Type { @@ -2547,7 +2576,7 @@ namespace ts { } export type RootPaths = string[]; - export type PathSubstitutions = Map; + export type PathSubstitutions = MapLike; export type TsConfigOnlyOptions = RootPaths | PathSubstitutions; export type CompilerOptionsValue = string | number | boolean | (string | number)[] | TsConfigOnlyOptions; @@ -2707,7 +2736,7 @@ namespace ts { fileNames: string[]; raw?: any; errors: Diagnostic[]; - wildcardDirectories?: Map; + wildcardDirectories?: MapLike; compileOnSave?: boolean; } @@ -2718,7 +2747,7 @@ namespace ts { export interface ExpandResult { fileNames: string[]; - wildcardDirectories: Map; + wildcardDirectories: MapLike; } /* @internal */ @@ -2740,7 +2769,7 @@ namespace ts { /* @internal */ export interface CommandLineOptionOfCustomType extends CommandLineOptionBase { - type: Map; // an object literal mapping named values to actual values + type: Map; // an object literal mapping named values to actual values } /* @internal */ @@ -2903,6 +2932,7 @@ namespace ts { directoryExists?(directoryName: string): boolean; realpath?(path: string): string; getCurrentDirectory?(): string; + getDirectories?(path: string): string[]; } export interface ResolvedModule { diff --git a/src/compiler/utilities.ts b/src/compiler/utilities.ts index 576c7c8741446..9d60521d76bfe 100644 --- a/src/compiler/utilities.ts +++ b/src/compiler/utilities.ts @@ -87,26 +87,7 @@ namespace ts { return node.end - node.pos; } - export function mapIsEqualTo(map1: Map, map2: Map): boolean { - if (!map1 || !map2) { - return map1 === map2; - } - return containsAll(map1, map2) && containsAll(map2, map1); - } - - function containsAll(map: Map, other: Map): boolean { - for (const key in map) { - if (!hasProperty(map, key)) { - continue; - } - if (!hasProperty(other, key) || map[key] !== other[key]) { - return false; - } - } - return true; - } - - export function arrayIsEqualTo(array1: T[], array2: T[], equaler?: (a: T, b: T) => boolean): boolean { + export function arrayIsEqualTo(array1: ReadonlyArray, array2: ReadonlyArray, equaler?: (a: T, b: T) => boolean): boolean { if (!array1 || !array2) { return array1 === array2; } @@ -126,7 +107,7 @@ namespace ts { } export function hasResolvedModule(sourceFile: SourceFile, moduleNameText: string): boolean { - return sourceFile.resolvedModules && hasProperty(sourceFile.resolvedModules, moduleNameText); + return !!(sourceFile.resolvedModules && sourceFile.resolvedModules[moduleNameText]); } export function getResolvedModule(sourceFile: SourceFile, moduleNameText: string): ResolvedModule { @@ -135,7 +116,7 @@ namespace ts { export function setResolvedModule(sourceFile: SourceFile, moduleNameText: string, resolvedModule: ResolvedModule): void { if (!sourceFile.resolvedModules) { - sourceFile.resolvedModules = {}; + sourceFile.resolvedModules = createMap(); } sourceFile.resolvedModules[moduleNameText] = resolvedModule; @@ -143,7 +124,7 @@ namespace ts { export function setResolvedTypeReferenceDirective(sourceFile: SourceFile, typeReferenceDirectiveName: string, resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective): void { if (!sourceFile.resolvedTypeReferenceDirectiveNames) { - sourceFile.resolvedTypeReferenceDirectiveNames = {}; + sourceFile.resolvedTypeReferenceDirectiveNames = createMap(); } sourceFile.resolvedTypeReferenceDirectiveNames[typeReferenceDirectiveName] = resolvedTypeReferenceDirective; @@ -166,7 +147,7 @@ namespace ts { } for (let i = 0; i < names.length; i++) { const newResolution = newResolutions[i]; - const oldResolution = oldResolutions && hasProperty(oldResolutions, names[i]) ? oldResolutions[names[i]] : undefined; + const oldResolution = oldResolutions && oldResolutions[names[i]]; const changed = oldResolution ? !newResolution || !comparer(oldResolution, newResolution) @@ -1033,14 +1014,14 @@ namespace ts { && (node).expression.kind === SyntaxKind.SuperKeyword; } - - export function getEntityNameFromTypeNode(node: TypeNode): EntityName | Expression { + export function getEntityNameFromTypeNode(node: TypeNode): EntityNameOrEntityNameExpression { if (node) { switch (node.kind) { case SyntaxKind.TypeReference: return (node).typeName; case SyntaxKind.ExpressionWithTypeArguments: - return (node).expression; + Debug.assert(isEntityNameExpression((node).expression)); + return (node).expression; case SyntaxKind.Identifier: case SyntaxKind.QualifiedName: return (node); @@ -1215,12 +1196,6 @@ namespace ts { return false; } - export function isExternalModuleNameRelative(moduleName: string): boolean { - // TypeScript 1.0 spec (April 2014): 11.2.1 - // An external module name is "relative" if the first term is "." or "..". - return /^\.\.?($|[\\/])/.test(moduleName); - } - export function isInstantiatedModule(node: ModuleDeclaration, preserveConstEnums: boolean) { const moduleState = getModuleInstanceState(node); return moduleState === ModuleInstanceState.Instantiated || @@ -1569,6 +1544,7 @@ namespace ts { case SyntaxKind.MethodSignature: case SyntaxKind.ModuleDeclaration: case SyntaxKind.NamespaceImport: + case SyntaxKind.NamespaceExportDeclaration: case SyntaxKind.Parameter: case SyntaxKind.PropertyAssignment: case SyntaxKind.PropertyDeclaration: @@ -1693,8 +1669,8 @@ namespace ts { // import * as from ... // import { x as } from ... // export { x as } from ... - // export = ... - // export default ... + // export = + // export default export function isAliasSymbolDeclaration(node: Node): boolean { return node.kind === SyntaxKind.ImportEqualsDeclaration || node.kind === SyntaxKind.NamespaceExportDeclaration || @@ -1702,7 +1678,11 @@ namespace ts { node.kind === SyntaxKind.NamespaceImport || node.kind === SyntaxKind.ImportSpecifier || node.kind === SyntaxKind.ExportSpecifier || - node.kind === SyntaxKind.ExportAssignment && (node).expression.kind === SyntaxKind.Identifier; + node.kind === SyntaxKind.ExportAssignment && exportAssignmentIsAlias(node); + } + + export function exportAssignmentIsAlias(node: ExportAssignment): boolean { + return isEntityNameExpression(node.expression); } export function getClassExtendsHeritageClauseElement(node: ClassLikeDeclaration | InterfaceDeclaration) { @@ -1965,7 +1945,7 @@ namespace ts { export function createDiagnosticCollection(): DiagnosticCollection { let nonFileDiagnostics: Diagnostic[] = []; - const fileDiagnostics: Map = {}; + const fileDiagnostics = createMap(); let diagnosticsModified = false; let modificationCount = 0; @@ -1983,12 +1963,11 @@ namespace ts { } function reattachFileDiagnostics(newFile: SourceFile): void { - if (!hasProperty(fileDiagnostics, newFile.fileName)) { - return; - } - - for (const diagnostic of fileDiagnostics[newFile.fileName]) { - diagnostic.file = newFile; + const diagnostics = fileDiagnostics[newFile.fileName]; + if (diagnostics) { + for (const diagnostic of diagnostics) { + diagnostic.file = newFile; + } } } @@ -2029,9 +2008,7 @@ namespace ts { forEach(nonFileDiagnostics, pushDiagnostic); for (const key in fileDiagnostics) { - if (hasProperty(fileDiagnostics, key)) { - forEach(fileDiagnostics[key], pushDiagnostic); - } + forEach(fileDiagnostics[key], pushDiagnostic); } return sortAndDeduplicateDiagnostics(allDiagnostics); @@ -2046,9 +2023,7 @@ namespace ts { nonFileDiagnostics = sortAndDeduplicateDiagnostics(nonFileDiagnostics); for (const key in fileDiagnostics) { - if (hasProperty(fileDiagnostics, key)) { - fileDiagnostics[key] = sortAndDeduplicateDiagnostics(fileDiagnostics[key]); - } + fileDiagnostics[key] = sortAndDeduplicateDiagnostics(fileDiagnostics[key]); } } } @@ -2059,7 +2034,7 @@ namespace ts { // the map below must be updated. Note that this regexp *does not* include the 'delete' character. // There is no reason for this other than that JSON.stringify does not handle it either. const escapedCharsRegExp = /[\\\"\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g; - const escapedCharsMap: Map = { + const escapedCharsMap = createMap({ "\0": "\\0", "\t": "\\t", "\v": "\\v", @@ -2072,7 +2047,7 @@ namespace ts { "\u2028": "\\u2028", // lineSeparator "\u2029": "\\u2029", // paragraphSeparator "\u0085": "\\u0085" // nextLine - }; + }); /** @@ -2249,16 +2224,6 @@ namespace ts { return removeFileExtension(path) + ".d.ts"; } - export function getEmitScriptTarget(compilerOptions: CompilerOptions) { - return compilerOptions.target || ScriptTarget.ES3; - } - - export function getEmitModuleKind(compilerOptions: CompilerOptions) { - return typeof compilerOptions.module === "number" ? - compilerOptions.module : - getEmitScriptTarget(compilerOptions) === ScriptTarget.ES6 ? ModuleKind.ES6 : ModuleKind.CommonJS; - } - export interface EmitFileNames { jsFilePath: string; sourceMapFilePath: string; @@ -2680,22 +2645,9 @@ namespace ts { isClassLike(node.parent.parent); } - // Returns false if this heritage clause element's expression contains something unsupported - // (i.e. not a name or dotted name). - export function isSupportedExpressionWithTypeArguments(node: ExpressionWithTypeArguments): boolean { - return isSupportedExpressionWithTypeArgumentsRest(node.expression); - } - - function isSupportedExpressionWithTypeArgumentsRest(node: Expression): boolean { - if (node.kind === SyntaxKind.Identifier) { - return true; - } - else if (isPropertyAccessExpression(node)) { - return isSupportedExpressionWithTypeArgumentsRest(node.expression); - } - else { - return false; - } + export function isEntityNameExpression(node: Expression): node is EntityNameExpression { + return node.kind === SyntaxKind.Identifier || + node.kind === SyntaxKind.PropertyAccessExpression && isEntityNameExpression((node).expression); } export function isRightSideOfQualifiedNameOrPropertyAccess(node: Node) { @@ -2718,14 +2670,10 @@ namespace ts { return symbol && symbol.valueDeclaration && (symbol.valueDeclaration.flags & NodeFlags.Default) ? symbol.valueDeclaration.localSymbol : undefined; } - export function hasJavaScriptFileExtension(fileName: string) { - return forEach(supportedJavascriptExtensions, extension => fileExtensionIs(fileName, extension)); - } - - export function hasTypeScriptFileExtension(fileName: string) { - return forEach(supportedTypeScriptExtensions, extension => fileExtensionIs(fileName, extension)); + /** Return ".ts", ".d.ts", or ".tsx", if that is the extension. */ + export function tryExtractTypeScriptExtension(fileName: string): string | undefined { + return find(supportedTypescriptExtensionsForExtractExtension, extension => fileExtensionIs(fileName, extension)); } - /** * Replace each instance of non-ascii characters by one, two, three, or four escape sequences * representing the UTF-8 encoding of the character, and return the expanded char code list. @@ -2805,7 +2753,7 @@ namespace ts { } function stringifyObject(value: any) { - return `{${reduceProperties(value, stringifyProperty, "")}}`; + return `{${reduceOwnProperties(value, stringifyProperty, "")}}`; } function stringifyProperty(memo: string, value: any, key: string) { diff --git a/src/harness/compilerRunner.ts b/src/harness/compilerRunner.ts index ecdc18394b079..a64435b7ddcbe 100644 --- a/src/harness/compilerRunner.ts +++ b/src/harness/compilerRunner.ts @@ -1,8 +1,6 @@ /// /// /// -// In harness baselines, null is different than undefined. See `generateActual` in `harness.ts`. -/* tslint:disable:no-null-keyword */ const enum CompilerTestType { Conformance, @@ -52,7 +50,7 @@ class CompilerBaselineRunner extends RunnerBase { private makeUnitName(name: string, root: string) { const path = ts.toPath(name, root, (fileName) => Harness.Compiler.getCanonicalFileName(fileName)); const pathStart = ts.toPath(Harness.IO.getCurrentDirectory(), "", (fileName) => Harness.Compiler.getCanonicalFileName(fileName)); - return path.replace(pathStart, "/"); + return pathStart ? path.replace(pathStart, "/") : path; }; public checkTestCodeOutput(fileName: string) { @@ -136,27 +134,16 @@ class CompilerBaselineRunner extends RunnerBase { otherFiles = undefined; }); - function getByteOrderMarkText(file: Harness.Compiler.GeneratedFile): string { - return file.writeByteOrderMark ? "\u00EF\u00BB\u00BF" : ""; - } - - function getErrorBaseline(toBeCompiled: Harness.Compiler.TestFile[], otherFiles: Harness.Compiler.TestFile[], result: Harness.Compiler.CompilerResult) { - return Harness.Compiler.getErrorBaseline(toBeCompiled.concat(otherFiles), result.errors); - } - // check errors it("Correct errors for " + fileName, () => { if (this.errors) { - Harness.Baseline.runBaseline("Correct errors for " + fileName, justName.replace(/\.tsx?$/, ".errors.txt"), (): string => { - if (result.errors.length === 0) return null; - return getErrorBaseline(toBeCompiled, otherFiles, result); - }); + Harness.Compiler.doErrorBaseline(justName, toBeCompiled.concat(otherFiles), result.errors); } }); it (`Correct module resolution tracing for ${fileName}`, () => { if (options.traceResolution) { - Harness.Baseline.runBaseline("Correct module resolution tracing for " + fileName, justName.replace(/\.tsx?$/, ".trace.json"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".trace.json"), () => { return JSON.stringify(result.traceResults || [], undefined, 4); }); } @@ -165,11 +152,13 @@ class CompilerBaselineRunner extends RunnerBase { // Source maps? it("Correct sourcemap content for " + fileName, () => { if (options.sourceMap || options.inlineSourceMap) { - Harness.Baseline.runBaseline("Correct sourcemap content for " + fileName, justName.replace(/\.tsx?$/, ".sourcemap.txt"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".sourcemap.txt"), () => { const record = result.getSourceMapRecord(); if (options.noEmitOnError && result.errors.length !== 0 && record === undefined) { - // Because of the noEmitOnError option no files are created. We need to return null because baselining isn"t required. + // Because of the noEmitOnError option no files are created. We need to return null because baselining isn't required. + /* tslint:disable:no-null-keyword */ return null; + /* tslint:enable:no-null-keyword */ } return record; }); @@ -178,87 +167,12 @@ class CompilerBaselineRunner extends RunnerBase { it("Correct JS output for " + fileName, () => { if (hasNonDtsFiles && this.emit) { - if (!options.noEmit && result.files.length === 0 && result.errors.length === 0) { - throw new Error("Expected at least one js file to be emitted or at least one error to be created."); - } - - // check js output - Harness.Baseline.runBaseline("Correct JS output for " + fileName, justName.replace(/\.tsx?/, ".js"), () => { - let tsCode = ""; - const tsSources = otherFiles.concat(toBeCompiled); - if (tsSources.length > 1) { - tsCode += "//// [" + fileName + "] ////\r\n\r\n"; - } - for (let i = 0; i < tsSources.length; i++) { - tsCode += "//// [" + Harness.Path.getFileName(tsSources[i].unitName) + "]\r\n"; - tsCode += tsSources[i].content + (i < (tsSources.length - 1) ? "\r\n" : ""); - } - - let jsCode = ""; - for (let i = 0; i < result.files.length; i++) { - jsCode += "//// [" + Harness.Path.getFileName(result.files[i].fileName) + "]\r\n"; - jsCode += getByteOrderMarkText(result.files[i]); - jsCode += result.files[i].code; - } - - if (result.declFilesCode.length > 0) { - jsCode += "\r\n\r\n"; - for (let i = 0; i < result.declFilesCode.length; i++) { - jsCode += "//// [" + Harness.Path.getFileName(result.declFilesCode[i].fileName) + "]\r\n"; - jsCode += getByteOrderMarkText(result.declFilesCode[i]); - jsCode += result.declFilesCode[i].code; - } - } - - const declFileCompilationResult = - Harness.Compiler.compileDeclarationFiles( - toBeCompiled, otherFiles, result, harnessSettings, options, /*currentDirectory*/ undefined); - - if (declFileCompilationResult && declFileCompilationResult.declResult.errors.length) { - jsCode += "\r\n\r\n//// [DtsFileErrors]\r\n"; - jsCode += "\r\n\r\n"; - jsCode += getErrorBaseline(declFileCompilationResult.declInputFiles, declFileCompilationResult.declOtherFiles, declFileCompilationResult.declResult); - } - - if (jsCode.length > 0) { - return tsCode + "\r\n\r\n" + jsCode; - } - else { - return null; - } - }); + Harness.Compiler.doJsEmitBaseline(justName, fileName, options, result, toBeCompiled, otherFiles, harnessSettings); } }); it("Correct Sourcemap output for " + fileName, () => { - if (options.inlineSourceMap) { - if (result.sourceMaps.length > 0) { - throw new Error("No sourcemap files should be generated if inlineSourceMaps was set."); - } - return null; - } - else if (options.sourceMap) { - if (result.sourceMaps.length !== result.files.length) { - throw new Error("Number of sourcemap files should be same as js files."); - } - - Harness.Baseline.runBaseline("Correct Sourcemap output for " + fileName, justName.replace(/\.tsx?/, ".js.map"), () => { - if (options.noEmitOnError && result.errors.length !== 0 && result.sourceMaps.length === 0) { - // We need to return null here or the runBaseLine will actually create a empty file. - // Baselining isn't required here because there is no output. - return null; - } - - let sourceMapCode = ""; - for (let i = 0; i < result.sourceMaps.length; i++) { - sourceMapCode += "//// [" + Harness.Path.getFileName(result.sourceMaps[i].fileName) + "]\r\n"; - sourceMapCode += getByteOrderMarkText(result.sourceMaps[i]); - sourceMapCode += result.sourceMaps[i].code; - } - - return sourceMapCode; - }); - } + Harness.Compiler.doSourcemapBaseline(justName, options, result); }); it("Correct type/symbol baselines for " + fileName, () => { @@ -266,129 +180,7 @@ class CompilerBaselineRunner extends RunnerBase { return; } - // NEWTODO: Type baselines - if (result.errors.length !== 0) { - return; - } - - // The full walker simulates the types that you would get from doing a full - // compile. The pull walker simulates the types you get when you just do - // a type query for a random node (like how the LS would do it). Most of the - // time, these will be the same. However, occasionally, they can be different. - // Specifically, when the compiler internally depends on symbol IDs to order - // things, then we may see different results because symbols can be created in a - // different order with 'pull' operations, and thus can produce slightly differing - // output. - // - // For example, with a full type check, we may see a type displayed as: number | string - // But with a pull type check, we may see it as: string | number - // - // These types are equivalent, but depend on what order the compiler observed - // certain parts of the program. - - const program = result.program; - const allFiles = toBeCompiled.concat(otherFiles).filter(file => !!program.getSourceFile(file.unitName)); - - const fullWalker = new TypeWriterWalker(program, /*fullTypeCheck*/ true); - - const fullResults: ts.Map = {}; - const pullResults: ts.Map = {}; - - for (const sourceFile of allFiles) { - fullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName); - pullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName); - } - - // Produce baselines. The first gives the types for all expressions. - // The second gives symbols for all identifiers. - let e1: Error, e2: Error; - try { - checkBaseLines(/*isSymbolBaseLine*/ false); - } - catch (e) { - e1 = e; - } - - try { - checkBaseLines(/*isSymbolBaseLine*/ true); - } - catch (e) { - e2 = e; - } - - if (e1 || e2) { - throw e1 || e2; - } - - return; - - function checkBaseLines(isSymbolBaseLine: boolean) { - const fullBaseLine = generateBaseLine(fullResults, isSymbolBaseLine); - const pullBaseLine = generateBaseLine(pullResults, isSymbolBaseLine); - - const fullExtension = isSymbolBaseLine ? ".symbols" : ".types"; - const pullExtension = isSymbolBaseLine ? ".symbols.pull" : ".types.pull"; - - if (fullBaseLine !== pullBaseLine) { - Harness.Baseline.runBaseline("Correct full information for " + fileName, justName.replace(/\.tsx?/, fullExtension), () => fullBaseLine); - Harness.Baseline.runBaseline("Correct pull information for " + fileName, justName.replace(/\.tsx?/, pullExtension), () => pullBaseLine); - } - else { - Harness.Baseline.runBaseline("Correct information for " + fileName, justName.replace(/\.tsx?/, fullExtension), () => fullBaseLine); - } - } - - function generateBaseLine(typeWriterResults: ts.Map, isSymbolBaseline: boolean): string { - const typeLines: string[] = []; - const typeMap: { [fileName: string]: { [lineNum: number]: string[]; } } = {}; - - allFiles.forEach(file => { - const codeLines = file.content.split("\n"); - typeWriterResults[file.unitName].forEach(result => { - if (isSymbolBaseline && !result.symbol) { - return; - } - - const typeOrSymbolString = isSymbolBaseline ? result.symbol : result.type; - const formattedLine = result.sourceText.replace(/\r?\n/g, "") + " : " + typeOrSymbolString; - if (!typeMap[file.unitName]) { - typeMap[file.unitName] = {}; - } - - let typeInfo = [formattedLine]; - const existingTypeInfo = typeMap[file.unitName][result.line]; - if (existingTypeInfo) { - typeInfo = existingTypeInfo.concat(typeInfo); - } - typeMap[file.unitName][result.line] = typeInfo; - }); - - typeLines.push("=== " + file.unitName + " ===\r\n"); - for (let i = 0; i < codeLines.length; i++) { - const currentCodeLine = codeLines[i]; - typeLines.push(currentCodeLine + "\r\n"); - if (typeMap[file.unitName]) { - const typeInfo = typeMap[file.unitName][i]; - if (typeInfo) { - typeInfo.forEach(ty => { - typeLines.push(">" + ty + "\r\n"); - }); - if (i + 1 < codeLines.length && (codeLines[i + 1].match(/^\s*[{|}]\s*$/) || codeLines[i + 1].trim() === "")) { - } - else { - typeLines.push("\r\n"); - } - } - } - else { - typeLines.push("No type information for this code."); - } - } - }); - - return typeLines.join(""); - } - + Harness.Compiler.doTypeAndSymbolBaseline(justName, result, toBeCompiled.concat(otherFiles).filter(file => !!result.program.getSourceFile(file.unitName))); }); }); } diff --git a/src/harness/fourslash.ts b/src/harness/fourslash.ts index 18580e5dad2aa..91b641f621810 100644 --- a/src/harness/fourslash.ts +++ b/src/harness/fourslash.ts @@ -95,14 +95,14 @@ namespace FourSlash { export import IndentStyle = ts.IndentStyle; - const entityMap: ts.Map = { + const entityMap = ts.createMap({ "&": "&", "\"": """, "'": "'", "/": "/", "<": "<", ">": ">" - }; + }); export function escapeXmlAttributeValue(s: string) { return s.replace(/[&<>"'\/]/g, ch => entityMap[ch]); @@ -204,7 +204,7 @@ namespace FourSlash { public formatCodeSettings: ts.FormatCodeSettings; - private inputFiles: ts.Map = {}; // Map between inputFile's fileName and its content for easily looking up when resolving references + private inputFiles = ts.createMap(); // Map between inputFile's fileName and its content for easily looking up when resolving references // Add input file which has matched file name with the given reference-file path. // This is necessary when resolveReference flag is specified @@ -249,6 +249,7 @@ namespace FourSlash { if (compilationOptions.typeRoots) { compilationOptions.typeRoots = compilationOptions.typeRoots.map(p => ts.getNormalizedAbsolutePath(p, this.basePath)); } + compilationOptions.skipDefaultLibCheck = true; const languageServiceAdapter = this.getLanguageServiceAdapter(testType, this.cancellationToken, compilationOptions); this.languageServiceAdapterHost = languageServiceAdapter.getHost(); @@ -300,11 +301,11 @@ namespace FourSlash { } else { // resolveReference file-option is not specified then do not resolve any files and include all inputFiles - ts.forEachKey(this.inputFiles, fileName => { + for (const fileName in this.inputFiles) { if (!Harness.isDefaultLibraryFile(fileName)) { this.languageServiceAdapterHost.addScript(fileName, this.inputFiles[fileName], /*isRootFile*/ true); } - }); + } this.languageServiceAdapterHost.addScript(Harness.Compiler.defaultLibFileName, Harness.Compiler.getDefaultLibrarySourceFile().text, /*isRootFile*/ false); } @@ -376,7 +377,7 @@ namespace FourSlash { public verifyErrorExistsBetweenMarkers(startMarkerName: string, endMarkerName: string, negative: boolean) { const startMarker = this.getMarkerByName(startMarkerName); const endMarker = this.getMarkerByName(endMarkerName); - const predicate = function (errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { + const predicate = function(errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { return ((errorMinChar === startPos) && (errorLimChar === endPos)) ? true : false; }; @@ -428,12 +429,12 @@ namespace FourSlash { let predicate: (errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) => boolean; if (after) { - predicate = function (errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { + predicate = function(errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { return ((errorMinChar >= startPos) && (errorLimChar >= startPos)) ? true : false; }; } else { - predicate = function (errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { + predicate = function(errorMinChar: number, errorLimChar: number, startPos: number, endPos: number) { return ((errorMinChar <= startPos) && (errorLimChar <= startPos)) ? true : false; }; } @@ -458,7 +459,7 @@ namespace FourSlash { endPos = endMarker.position; } - errors.forEach(function (error: ts.Diagnostic) { + errors.forEach(function(error: ts.Diagnostic) { if (predicate(error.start, error.start + error.length, startPos, endPos)) { exists = true; } @@ -475,7 +476,7 @@ namespace FourSlash { Harness.IO.log("Unexpected error(s) found. Error list is:"); } - errors.forEach(function (error: ts.Diagnostic) { + errors.forEach(function(error: ts.Diagnostic) { Harness.IO.log(" minChar: " + error.start + ", limChar: " + (error.start + error.length) + ", message: " + ts.flattenDiagnosticMessageText(error.messageText, Harness.IO.newLine()) + "\n"); @@ -593,9 +594,9 @@ namespace FourSlash { public noItemsWithSameNameButDifferentKind(): void { const completions = this.getCompletionListAtCaret(); - const uniqueItems: ts.Map = {}; + const uniqueItems = ts.createMap(); for (const item of completions.entries) { - if (!ts.hasProperty(uniqueItems, item.name)) { + if (!(item.name in uniqueItems)) { uniqueItems[item.name] = item.kind; } else { @@ -773,7 +774,7 @@ namespace FourSlash { } public verifyRangesWithSameTextReferenceEachOther() { - ts.forEachValue(this.rangesByText(), ranges => this.verifyRangesReferenceEachOther(ranges)); + ts.forEachProperty(this.rangesByText(), ranges => this.verifyRangesReferenceEachOther(ranges)); } private verifyReferencesWorker(references: ts.ReferenceEntry[], fileName: string, start: number, end: number, isWriteAccess?: boolean, isDefinition?: boolean) { @@ -1131,12 +1132,10 @@ namespace FourSlash { } Harness.Baseline.runBaseline( - "Breakpoint Locations for " + this.activeFile.fileName, baselineFile, () => { return this.baselineCurrentFileLocations(pos => this.getBreakpointStatementLocation(pos)); - }, - true /* run immediately */); + }); } public baselineGetEmitOutput() { @@ -1158,7 +1157,6 @@ namespace FourSlash { } Harness.Baseline.runBaseline( - "Generate getEmitOutput baseline : " + emitFiles.join(" "), this.testData.globalOptions[metadataOptionNames.baselineFile], () => { let resultString = ""; @@ -1184,8 +1182,7 @@ namespace FourSlash { }); return resultString; - }, - true /* run immediately */); + }); } public printBreakpointLocation(pos: number) { @@ -1348,14 +1345,7 @@ namespace FourSlash { } // Enters lines of text at the current caret position - public type(text: string) { - return this.typeHighFidelity(text); - } - - // Enters lines of text at the current caret position, invoking - // language service APIs to mimic Visual Studio's behavior - // as much as possible - private typeHighFidelity(text: string) { + public type(text: string, highFidelity = false) { let offset = this.currentCaretPosition; const prevChar = " "; const checkCadence = (text.length >> 2) + 1; @@ -1364,24 +1354,26 @@ namespace FourSlash { // Make the edit const ch = text.charAt(i); this.languageServiceAdapterHost.editScript(this.activeFile.fileName, offset, offset, ch); - this.languageService.getBraceMatchingAtPosition(this.activeFile.fileName, offset); + if (highFidelity) { + this.languageService.getBraceMatchingAtPosition(this.activeFile.fileName, offset); + } this.updateMarkersForEdit(this.activeFile.fileName, offset, offset, ch); offset++; - if (ch === "(" || ch === ",") { - /* Signature help*/ - this.languageService.getSignatureHelpItems(this.activeFile.fileName, offset); - } - else if (prevChar === " " && /A-Za-z_/.test(ch)) { - /* Completions */ - this.languageService.getCompletionsAtPosition(this.activeFile.fileName, offset); - } + if (highFidelity) { + if (ch === "(" || ch === ",") { + /* Signature help*/ + this.languageService.getSignatureHelpItems(this.activeFile.fileName, offset); + } + else if (prevChar === " " && /A-Za-z_/.test(ch)) { + /* Completions */ + this.languageService.getCompletionsAtPosition(this.activeFile.fileName, offset); + } - if (i % checkCadence === 0) { - this.checkPostEditInvariants(); - // this.languageService.getSyntacticDiagnostics(this.activeFile.fileName); - // this.languageService.getSemanticDiagnostics(this.activeFile.fileName); + if (i % checkCadence === 0) { + this.checkPostEditInvariants(); + } } // Handle post-keystroke formatting @@ -1389,14 +1381,12 @@ namespace FourSlash { const edits = this.languageService.getFormattingEditsAfterKeystroke(this.activeFile.fileName, offset, ch, this.formatCodeSettings); if (edits.length) { offset += this.applyEdits(this.activeFile.fileName, edits, /*isFormattingEdit*/ true); - // this.checkPostEditInvariants(); } } } // Move the caret to wherever we ended up this.currentCaretPosition = offset; - this.fixCaretPosition(); this.checkPostEditInvariants(); } @@ -1415,7 +1405,6 @@ namespace FourSlash { const edits = this.languageService.getFormattingEditsForRange(this.activeFile.fileName, start, offset, this.formatCodeSettings); if (edits.length) { offset += this.applyEdits(this.activeFile.fileName, edits, /*isFormattingEdit*/ true); - this.checkPostEditInvariants(); } } @@ -1639,10 +1628,11 @@ namespace FourSlash { } public rangesByText(): ts.Map { - const result: ts.Map = {}; + const result = ts.createMap(); for (const range of this.getRanges()) { const text = this.rangeText(range); - (ts.getProperty(result, text) || (result[text] = [])).push(range); + const ranges = result[text] || (result[text] = []); + ranges.push(range); } return result; } @@ -1734,13 +1724,11 @@ namespace FourSlash { public baselineCurrentFileNameOrDottedNameSpans() { Harness.Baseline.runBaseline( - "Name OrDottedNameSpans for " + this.activeFile.fileName, this.testData.globalOptions[metadataOptionNames.baselineFile], () => { return this.baselineCurrentFileLocations(pos => this.getNameOrDottedNameSpan(pos)); - }, - true /* run immediately */); + }); } public printNameOrDottedNameSpans(pos: number) { @@ -1895,7 +1883,7 @@ namespace FourSlash { public verifyBraceCompletionAtPosition(negative: boolean, openingBrace: string) { - const openBraceMap: ts.Map = { + const openBraceMap = ts.createMap({ "(": ts.CharacterCodes.openParen, "{": ts.CharacterCodes.openBrace, "[": ts.CharacterCodes.openBracket, @@ -1903,7 +1891,7 @@ namespace FourSlash { '"': ts.CharacterCodes.doubleQuote, "`": ts.CharacterCodes.backtick, "<": ts.CharacterCodes.lessThan - }; + }); const charCode = openBraceMap[openingBrace]; @@ -2267,40 +2255,12 @@ namespace FourSlash { export function runFourSlashTestContent(basePath: string, testType: FourSlashTestType, content: string, fileName: string): void { // Parse out the files and their metadata const testData = parseTestData(basePath, content, fileName); - const state = new TestState(basePath, testType, testData); - - let result = ""; - const fourslashFile: Harness.Compiler.TestFile = { - unitName: Harness.Compiler.fourslashFileName, - content: undefined, - }; - const testFile: Harness.Compiler.TestFile = { - unitName: fileName, - content: content - }; - - const host = Harness.Compiler.createCompilerHost( - [fourslashFile, testFile], - (fn, contents) => result = contents, - ts.ScriptTarget.Latest, - Harness.IO.useCaseSensitiveFileNames(), - Harness.IO.getCurrentDirectory()); - - const program = ts.createProgram([Harness.Compiler.fourslashFileName, fileName], { outFile: "fourslashTestOutput.js", noResolve: true, target: ts.ScriptTarget.ES3 }, host); - - const sourceFile = host.getSourceFile(fileName, ts.ScriptTarget.ES3); - - const diagnostics = ts.getPreEmitDiagnostics(program, sourceFile); - if (diagnostics.length > 0) { - throw new Error(`Error compiling ${fileName}: ` + - diagnostics.map(e => ts.flattenDiagnosticMessageText(e.messageText, Harness.IO.newLine())).join("\r\n")); + const output = ts.transpileModule(content, { reportDiagnostics: true }); + if (output.diagnostics.length > 0) { + throw new Error(`Syntax error in ${basePath}: ${output.diagnostics[0].messageText}`); } - - program.emit(sourceFile); - - ts.Debug.assert(!!result); - runCode(result, state); + runCode(output.outputText, state); } function runCode(code: string, state: TestState): void { @@ -2393,13 +2353,14 @@ ${code} // Comment line, check for global/file @options and record them const match = optionRegex.exec(line.substr(2)); if (match) { - const fileMetadataNamesIndex = fileMetadataNames.indexOf(match[1]); + const [key, value] = match.slice(1); + const fileMetadataNamesIndex = fileMetadataNames.indexOf(key); if (fileMetadataNamesIndex === -1) { // Check if the match is already existed in the global options - if (globalOptions[match[1]] !== undefined) { - throw new Error("Global Option : '" + match[1] + "' is already existed"); + if (globalOptions[key] !== undefined) { + throw new Error(`Global option '${key}' already exists`); } - globalOptions[match[1]] = match[2]; + globalOptions[key] = value; } else { if (fileMetadataNamesIndex === fileMetadataNames.indexOf(metadataOptionNames.fileName)) { @@ -2414,12 +2375,12 @@ ${code} resetLocalData(); } - currentFileName = basePath + "/" + match[2]; - currentFileOptions[match[1]] = match[2]; + currentFileName = basePath + "/" + value; + currentFileOptions[key] = value; } else { // Add other fileMetadata flag - currentFileOptions[match[1]] = match[2]; + currentFileOptions[key] = value; } } } @@ -2507,7 +2468,7 @@ ${code} } const marker: Marker = { - fileName: fileName, + fileName, position: location.position, data: markerValue }; @@ -2524,7 +2485,7 @@ ${code} function recordMarker(fileName: string, location: LocationInformation, name: string, markerMap: MarkerMap, markers: Marker[]): Marker { const marker: Marker = { - fileName: fileName, + fileName, position: location.position }; diff --git a/src/harness/harness.ts b/src/harness/harness.ts index c5831c1427422..e93d3d4518953 100644 --- a/src/harness/harness.ts +++ b/src/harness/harness.ts @@ -77,12 +77,6 @@ namespace Utils { return Buffer ? Buffer.byteLength(s, encoding) : s.length; } - export function compress(s: string): any { - return Buffer ? new Buffer(s, "utf8") : { data: s, length: s.length }; - } - - export const maxUncompressedMessageSize = Number.MAX_VALUE; - export function evalFile(fileContents: string, fileName: string, nodeContext?: any) { const environment = getExecutionEnvironment(); switch (environment) { @@ -756,7 +750,7 @@ namespace Harness { export function readDirectory(path: string, extension?: string[], exclude?: string[], include?: string[]) { const fs = new Utils.VirtualFileSystem(path, useCaseSensitiveFileNames()); - for (const file in listFiles(path)) { + for (const file of listFiles(path)) { fs.addFile(file); } return ts.matchFiles(path, extension, exclude, include, useCaseSensitiveFileNames(), getCurrentDirectory(), path => { @@ -854,9 +848,9 @@ namespace Harness { export const defaultLibFileName = "lib.d.ts"; export const es2015DefaultLibFileName = "lib.es2015.d.ts"; - const libFileNameSourceFileMap: ts.Map = { + const libFileNameSourceFileMap= ts.createMap({ [defaultLibFileName]: createSourceFileAndAssertInvariants(defaultLibFileName, IO.readFile(libFolder + "lib.es5.d.ts"), /*languageVersion*/ ts.ScriptTarget.Latest) - }; + }); export function getDefaultLibrarySourceFile(fileName = defaultLibFileName): ts.SourceFile { if (!isDefaultLibraryFile(fileName)) { @@ -1011,13 +1005,13 @@ namespace Harness { let optionsIndex: ts.Map; function getCommandLineOption(name: string): ts.CommandLineOption { if (!optionsIndex) { - optionsIndex = {}; + optionsIndex = ts.createMap(); const optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations); for (const option of optionDeclarations) { optionsIndex[option.name.toLowerCase()] = option; } } - return ts.lookUp(optionsIndex, name.toLowerCase()); + return optionsIndex[name.toLowerCase()]; } export function setCompilerOptionsFromHarnessSetting(settings: Harness.TestCaseParser.CompilerSettings, options: ts.CompilerOptions & HarnessOptions): void { @@ -1334,6 +1328,220 @@ namespace Harness { Harness.IO.newLine() + Harness.IO.newLine() + outputLines.join("\r\n"); } + export function doErrorBaseline(baselinePath: string, inputFiles: TestFile[], errors: ts.Diagnostic[]) { + Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?$/, ".errors.txt"), (): string => { + if (errors.length === 0) { + /* tslint:disable:no-null-keyword */ + return null; + /* tslint:enable:no-null-keyword */ + } + return getErrorBaseline(inputFiles, errors); + }); + } + + export function doTypeAndSymbolBaseline(baselinePath: string, result: CompilerResult, allFiles: {unitName: string, content: string}[], opts?: Harness.Baseline.BaselineOptions) { + if (result.errors.length !== 0) { + return; + } + // The full walker simulates the types that you would get from doing a full + // compile. The pull walker simulates the types you get when you just do + // a type query for a random node (like how the LS would do it). Most of the + // time, these will be the same. However, occasionally, they can be different. + // Specifically, when the compiler internally depends on symbol IDs to order + // things, then we may see different results because symbols can be created in a + // different order with 'pull' operations, and thus can produce slightly differing + // output. + // + // For example, with a full type check, we may see a type displayed as: number | string + // But with a pull type check, we may see it as: string | number + // + // These types are equivalent, but depend on what order the compiler observed + // certain parts of the program. + + const program = result.program; + + const fullWalker = new TypeWriterWalker(program, /*fullTypeCheck*/ true); + + const fullResults = ts.createMap(); + + for (const sourceFile of allFiles) { + fullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName); + } + + // Produce baselines. The first gives the types for all expressions. + // The second gives symbols for all identifiers. + let e1: Error, e2: Error; + try { + checkBaseLines(/*isSymbolBaseLine*/ false); + } + catch (e) { + e1 = e; + } + + try { + checkBaseLines(/*isSymbolBaseLine*/ true); + } + catch (e) { + e2 = e; + } + + if (e1 || e2) { + throw e1 || e2; + } + + return; + + function checkBaseLines(isSymbolBaseLine: boolean) { + const fullBaseLine = generateBaseLine(fullResults, isSymbolBaseLine); + + const fullExtension = isSymbolBaseLine ? ".symbols" : ".types"; + + Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?/, fullExtension), () => fullBaseLine, opts); + } + + function generateBaseLine(typeWriterResults: ts.Map, isSymbolBaseline: boolean): string { + const typeLines: string[] = []; + const typeMap: { [fileName: string]: { [lineNum: number]: string[]; } } = {}; + + allFiles.forEach(file => { + const codeLines = file.content.split("\n"); + typeWriterResults[file.unitName].forEach(result => { + if (isSymbolBaseline && !result.symbol) { + return; + } + + const typeOrSymbolString = isSymbolBaseline ? result.symbol : result.type; + const formattedLine = result.sourceText.replace(/\r?\n/g, "") + " : " + typeOrSymbolString; + if (!typeMap[file.unitName]) { + typeMap[file.unitName] = {}; + } + + let typeInfo = [formattedLine]; + const existingTypeInfo = typeMap[file.unitName][result.line]; + if (existingTypeInfo) { + typeInfo = existingTypeInfo.concat(typeInfo); + } + typeMap[file.unitName][result.line] = typeInfo; + }); + + typeLines.push("=== " + file.unitName + " ===\r\n"); + for (let i = 0; i < codeLines.length; i++) { + const currentCodeLine = codeLines[i]; + typeLines.push(currentCodeLine + "\r\n"); + if (typeMap[file.unitName]) { + const typeInfo = typeMap[file.unitName][i]; + if (typeInfo) { + typeInfo.forEach(ty => { + typeLines.push(">" + ty + "\r\n"); + }); + if (i + 1 < codeLines.length && (codeLines[i + 1].match(/^\s*[{|}]\s*$/) || codeLines[i + 1].trim() === "")) { + } + else { + typeLines.push("\r\n"); + } + } + } + else { + typeLines.push("No type information for this code."); + } + } + }); + + return typeLines.join(""); + } + } + + function getByteOrderMarkText(file: Harness.Compiler.GeneratedFile): string { + return file.writeByteOrderMark ? "\u00EF\u00BB\u00BF" : ""; + } + + export function doSourcemapBaseline(baselinePath: string, options: ts.CompilerOptions, result: CompilerResult) { + if (options.inlineSourceMap) { + if (result.sourceMaps.length > 0) { + throw new Error("No sourcemap files should be generated if inlineSourceMaps was set."); + } + return; + } + else if (options.sourceMap) { + if (result.sourceMaps.length !== result.files.length) { + throw new Error("Number of sourcemap files should be same as js files."); + } + + Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?/, ".js.map"), () => { + if (options.noEmitOnError && result.errors.length !== 0 && result.sourceMaps.length === 0) { + // We need to return null here or the runBaseLine will actually create a empty file. + // Baselining isn't required here because there is no output. + /* tslint:disable:no-null-keyword */ + return null; + /* tslint:enable:no-null-keyword */ + } + + let sourceMapCode = ""; + for (let i = 0; i < result.sourceMaps.length; i++) { + sourceMapCode += "//// [" + Harness.Path.getFileName(result.sourceMaps[i].fileName) + "]\r\n"; + sourceMapCode += getByteOrderMarkText(result.sourceMaps[i]); + sourceMapCode += result.sourceMaps[i].code; + } + + return sourceMapCode; + }); + } + } + + export function doJsEmitBaseline(baselinePath: string, header: string, options: ts.CompilerOptions, result: CompilerResult, toBeCompiled: Harness.Compiler.TestFile[], otherFiles: Harness.Compiler.TestFile[], harnessSettings: Harness.TestCaseParser.CompilerSettings) { + if (!options.noEmit && result.files.length === 0 && result.errors.length === 0) { + throw new Error("Expected at least one js file to be emitted or at least one error to be created."); + } + + // check js output + Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?/, ".js"), () => { + let tsCode = ""; + const tsSources = otherFiles.concat(toBeCompiled); + if (tsSources.length > 1) { + tsCode += "//// [" + header + "] ////\r\n\r\n"; + } + for (let i = 0; i < tsSources.length; i++) { + tsCode += "//// [" + Harness.Path.getFileName(tsSources[i].unitName) + "]\r\n"; + tsCode += tsSources[i].content + (i < (tsSources.length - 1) ? "\r\n" : ""); + } + + let jsCode = ""; + for (let i = 0; i < result.files.length; i++) { + jsCode += "//// [" + Harness.Path.getFileName(result.files[i].fileName) + "]\r\n"; + jsCode += getByteOrderMarkText(result.files[i]); + jsCode += result.files[i].code; + } + + if (result.declFilesCode.length > 0) { + jsCode += "\r\n\r\n"; + for (let i = 0; i < result.declFilesCode.length; i++) { + jsCode += "//// [" + Harness.Path.getFileName(result.declFilesCode[i].fileName) + "]\r\n"; + jsCode += getByteOrderMarkText(result.declFilesCode[i]); + jsCode += result.declFilesCode[i].code; + } + } + + const declFileCompilationResult = + Harness.Compiler.compileDeclarationFiles( + toBeCompiled, otherFiles, result, harnessSettings, options, /*currentDirectory*/ undefined); + + if (declFileCompilationResult && declFileCompilationResult.declResult.errors.length) { + jsCode += "\r\n\r\n//// [DtsFileErrors]\r\n"; + jsCode += "\r\n\r\n"; + jsCode += Harness.Compiler.getErrorBaseline(declFileCompilationResult.declInputFiles.concat(declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors); + } + + if (jsCode.length > 0) { + return tsCode + "\r\n\r\n" + jsCode; + } + else { + /* tslint:disable:no-null-keyword */ + return null; + /* tslint:enable:no-null-keyword */ + } + }); + } + export function collateOutputs(outputFiles: Harness.Compiler.GeneratedFile[]): string { // Collect, test, and sort the fileNames outputFiles.sort((a, b) => cleanName(a.fileName).localeCompare(cleanName(b.fileName))); @@ -1575,6 +1783,7 @@ namespace Harness { /** Support class for baseline files */ export namespace Baseline { + const NoContent = ""; export interface BaselineOptions { Subfolder?: string; @@ -1609,31 +1818,7 @@ namespace Harness { } const fileCache: { [idx: string]: boolean } = {}; - function generateActual(actualFileName: string, generateContent: () => string): string { - // For now this is written using TypeScript, because sys is not available when running old test cases. - // But we need to move to sys once we have - // Creates the directory including its parent if not already present - function createDirectoryStructure(dirName: string) { - if (fileCache[dirName] || IO.directoryExists(dirName)) { - fileCache[dirName] = true; - return; - } - - const parentDirectory = IO.directoryName(dirName); - if (parentDirectory != "") { - createDirectoryStructure(parentDirectory); - } - IO.createDirectory(dirName); - fileCache[dirName] = true; - } - - // Create folders if needed - createDirectoryStructure(Harness.IO.directoryName(actualFileName)); - - // Delete the actual file in case it fails - if (IO.fileExists(actualFileName)) { - IO.deleteFile(actualFileName); - } + function generateActual(generateContent: () => string): string { const actual = generateContent(); @@ -1641,14 +1826,6 @@ namespace Harness { throw new Error("The generated content was \"undefined\". Return \"null\" if no baselining is required.\""); } - // Store the content in the 'local' folder so we - // can accept it later (manually) - /* tslint:disable:no-null-keyword */ - if (actual !== null) { - /* tslint:enable:no-null-keyword */ - IO.writeFile(actualFileName, actual); - } - return actual; } @@ -1665,7 +1842,7 @@ namespace Harness { /* tslint:disable:no-null-keyword */ if (actual === null) { /* tslint:enable:no-null-keyword */ - actual = ""; + actual = NoContent; } let expected = ""; @@ -1676,36 +1853,51 @@ namespace Harness { return { expected, actual }; } - function writeComparison(expected: string, actual: string, relativeFileName: string, actualFileName: string, descriptionForDescribe: string) { + function writeComparison(expected: string, actual: string, relativeFileName: string, actualFileName: string) { + // For now this is written using TypeScript, because sys is not available when running old test cases. + // But we need to move to sys once we have + // Creates the directory including its parent if not already present + function createDirectoryStructure(dirName: string) { + if (fileCache[dirName] || IO.directoryExists(dirName)) { + fileCache[dirName] = true; + return; + } + + const parentDirectory = IO.directoryName(dirName); + if (parentDirectory != "") { + createDirectoryStructure(parentDirectory); + } + IO.createDirectory(dirName); + fileCache[dirName] = true; + } + + // Create folders if needed + createDirectoryStructure(Harness.IO.directoryName(actualFileName)); + + // Delete the actual file in case it fails + if (IO.fileExists(actualFileName)) { + IO.deleteFile(actualFileName); + } + const encoded_actual = Utils.encodeString(actual); - if (expected != encoded_actual) { - // Overwrite & issue error - const errMsg = "The baseline file " + relativeFileName + " has changed."; - throw new Error(errMsg); + if (expected !== encoded_actual) { + if (actual === NoContent) { + IO.writeFile(actualFileName + ".delete", ""); + } + else { + IO.writeFile(actualFileName, actual); + } + throw new Error(`The baseline file ${relativeFileName} has changed.`); } } - export function runBaseline( - descriptionForDescribe: string, - relativeFileName: string, - generateContent: () => string, - runImmediately = false, - opts?: BaselineOptions): void { + export function runBaseline(relativeFileName: string, generateContent: () => string, opts?: BaselineOptions): void { - let actual = undefined; const actualFileName = localPath(relativeFileName, opts && opts.Baselinefolder, opts && opts.Subfolder); - if (runImmediately) { - actual = generateActual(actualFileName, generateContent); - const comparison = compareToBaseline(actual, relativeFileName, opts); - writeComparison(comparison.expected, comparison.actual, relativeFileName, actualFileName, descriptionForDescribe); - } - else { - actual = generateActual(actualFileName, generateContent); - - const comparison = compareToBaseline(actual, relativeFileName, opts); - writeComparison(comparison.expected, comparison.actual, relativeFileName, actualFileName, descriptionForDescribe); - } + const actual = generateActual(generateContent); + const comparison = compareToBaseline(actual, relativeFileName, opts); + writeComparison(comparison.expected, comparison.actual, relativeFileName, actualFileName); } } diff --git a/src/harness/harnessLanguageService.ts b/src/harness/harnessLanguageService.ts index 527705d8b4f2c..5da61fd542ad4 100644 --- a/src/harness/harnessLanguageService.ts +++ b/src/harness/harnessLanguageService.ts @@ -123,7 +123,7 @@ namespace Harness.LanguageService { } export class LanguageServiceAdapterHost { - protected fileNameToScript: ts.Map = {}; + protected fileNameToScript = ts.createMap(); constructor(protected cancellationToken = DefaultHostCancellationToken.Instance, protected settings = ts.getDefaultCompilerOptions()) { @@ -135,7 +135,7 @@ namespace Harness.LanguageService { public getFilenames(): string[] { const fileNames: string[] = []; - ts.forEachValue(this.fileNameToScript, (scriptInfo) => { + ts.forEachProperty(this.fileNameToScript, (scriptInfo) => { if (scriptInfo.isRootFile) { // only include root files here // usually it means that we won't include lib.d.ts in the list of root files so it won't mess the computation of compilation root dir. @@ -146,7 +146,7 @@ namespace Harness.LanguageService { } public getScriptInfo(fileName: string): ScriptInfo { - return ts.lookUp(this.fileNameToScript, fileName); + return this.fileNameToScript[fileName]; } public addScript(fileName: string, content: string, isRootFile: boolean): void { @@ -235,7 +235,7 @@ namespace Harness.LanguageService { this.getModuleResolutionsForFile = (fileName) => { const scriptInfo = this.getScriptInfo(fileName); const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ true); - const imports: ts.Map = {}; + const imports = ts.createMap(); for (const module of preprocessInfo.importedFiles) { const resolutionInfo = ts.resolveModuleName(module.fileName, fileName, compilerOptions, moduleResolutionHost); if (resolutionInfo.resolvedModule) { @@ -248,7 +248,7 @@ namespace Harness.LanguageService { const scriptInfo = this.getScriptInfo(fileName); if (scriptInfo) { const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ false); - const resolutions: ts.Map = {}; + const resolutions = ts.createMap(); const settings = this.nativeHost.getCompilationSettings(); for (const typeReferenceDirective of preprocessInfo.typeReferenceDirectives) { const resolutionInfo = ts.resolveTypeReferenceDirective(typeReferenceDirective.fileName, fileName, settings, moduleResolutionHost); @@ -646,6 +646,10 @@ namespace Harness.LanguageService { return true; } + getLogFileName(): string { + return undefined; + } + hasLevel() { return false; } @@ -692,9 +696,8 @@ namespace Harness.LanguageService { const server = new ts.server.Session(serverHost, { isCancellationRequested: () => false }, /*useOneInferredProject*/ false, + /*typingsInstaller*/ undefined, Utils.byteLength, - Utils.maxUncompressedMessageSize, - Utils.compress, process.hrtime, serverHost); // Fake the connection between the client and the server diff --git a/src/harness/projectsRunner.ts b/src/harness/projectsRunner.ts index 038b2dbe35953..080c6edfa879f 100644 --- a/src/harness/projectsRunner.ts +++ b/src/harness/projectsRunner.ts @@ -253,18 +253,15 @@ class ProjectRunner extends RunnerBase { moduleResolution: ts.ModuleResolutionKind.Classic, // currently all tests use classic module resolution kind, this will change in the future }; // Set the values specified using json - const optionNameMap: ts.Map = {}; - ts.forEach(ts.optionDeclarations, option => { - optionNameMap[option.name] = option; - }); + const optionNameMap = ts.arrayToMap(ts.optionDeclarations, option => option.name); for (const name in testCase) { - if (name !== "mapRoot" && name !== "sourceRoot" && ts.hasProperty(optionNameMap, name)) { + if (name !== "mapRoot" && name !== "sourceRoot" && name in optionNameMap) { const option = optionNameMap[name]; const optType = option.type; let value = testCase[name]; if (typeof optType !== "string") { const key = value.toLowerCase(); - if (ts.hasProperty(optType, key)) { + if (key in optType) { value = optType[key]; } } @@ -462,7 +459,7 @@ class ProjectRunner extends RunnerBase { }); it("Resolution information of (" + moduleNameToString(moduleKind) + "): " + testCaseFileName, () => { - Harness.Baseline.runBaseline("Resolution information of (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".json", () => { + Harness.Baseline.runBaseline(getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".json", () => { return JSON.stringify(getCompilerResolutionInfo(), undefined, " "); }); }); @@ -470,7 +467,7 @@ class ProjectRunner extends RunnerBase { it("Errors for (" + moduleNameToString(moduleKind) + "): " + testCaseFileName, () => { if (compilerResult.errors.length) { - Harness.Baseline.runBaseline("Errors for (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".errors.txt", () => { + Harness.Baseline.runBaseline(getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".errors.txt", () => { return getErrorsBaseline(compilerResult); }); } @@ -484,7 +481,7 @@ class ProjectRunner extends RunnerBase { // There may be multiple files with different baselines. Run all and report at the end, else // it stops copying the remaining emitted files from 'local/projectOutput' to 'local/project'. try { - Harness.Baseline.runBaseline("Baseline of emitted result (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + outputFile.fileName, () => { + Harness.Baseline.runBaseline(getBaselineFolder(compilerResult.moduleKind) + outputFile.fileName, () => { try { return Harness.IO.readFile(getProjectOutputFolder(outputFile.fileName, compilerResult.moduleKind)); } @@ -506,7 +503,7 @@ class ProjectRunner extends RunnerBase { it("SourceMapRecord for (" + moduleNameToString(moduleKind) + "): " + testCaseFileName, () => { if (compilerResult.sourceMapData) { - Harness.Baseline.runBaseline("SourceMapRecord for (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".sourcemap.txt", () => { + Harness.Baseline.runBaseline(getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".sourcemap.txt", () => { return Harness.SourceMapRecorder.getSourceMapRecord(compilerResult.sourceMapData, compilerResult.program, ts.filter(compilerResult.outputFiles, outputFile => Harness.Compiler.isJS(outputFile.emittedFileName))); }); @@ -519,7 +516,7 @@ class ProjectRunner extends RunnerBase { if (!compilerResult.errors.length && testCase.declaration) { const dTsCompileResult = compileCompileDTsFiles(compilerResult); if (dTsCompileResult && dTsCompileResult.errors.length) { - Harness.Baseline.runBaseline("Errors in generated Dts files for (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".dts.errors.txt", () => { + Harness.Baseline.runBaseline(getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".dts.errors.txt", () => { return getErrorsBaseline(dTsCompileResult); }); } diff --git a/src/harness/rwcRunner.ts b/src/harness/rwcRunner.ts index 1266ffa5d3783..d56e8d6d35f63 100644 --- a/src/harness/rwcRunner.ts +++ b/src/harness/rwcRunner.ts @@ -158,55 +158,56 @@ namespace RWC { it("has the expected emitted code", () => { - Harness.Baseline.runBaseline("has the expected emitted code", baseName + ".output.js", () => { + Harness.Baseline.runBaseline(baseName + ".output.js", () => { return Harness.Compiler.collateOutputs(compilerResult.files); - }, false, baselineOpts); + }, baselineOpts); }); it("has the expected declaration file content", () => { - Harness.Baseline.runBaseline("has the expected declaration file content", baseName + ".d.ts", () => { + Harness.Baseline.runBaseline(baseName + ".d.ts", () => { if (!compilerResult.declFilesCode.length) { return null; } return Harness.Compiler.collateOutputs(compilerResult.declFilesCode); - }, false, baselineOpts); + }, baselineOpts); }); it("has the expected source maps", () => { - Harness.Baseline.runBaseline("has the expected source maps", baseName + ".map", () => { + Harness.Baseline.runBaseline(baseName + ".map", () => { if (!compilerResult.sourceMaps.length) { return null; } return Harness.Compiler.collateOutputs(compilerResult.sourceMaps); - }, false, baselineOpts); + }, baselineOpts); }); /*it("has correct source map record", () => { if (compilerOptions.sourceMap) { - Harness.Baseline.runBaseline("has correct source map record", baseName + ".sourcemap.txt", () => { + Harness.Baseline.runBaseline(baseName + ".sourcemap.txt", () => { return compilerResult.getSourceMapRecord(); - }, false, baselineOpts); + }, baselineOpts); } });*/ it("has the expected errors", () => { - Harness.Baseline.runBaseline("has the expected errors", baseName + ".errors.txt", () => { + Harness.Baseline.runBaseline(baseName + ".errors.txt", () => { if (compilerResult.errors.length === 0) { return null; } // Do not include the library in the baselines to avoid noise const baselineFiles = inputFiles.concat(otherFiles).filter(f => !Harness.isDefaultLibraryFile(f.unitName)); - return Harness.Compiler.getErrorBaseline(baselineFiles, compilerResult.errors); - }, false, baselineOpts); + const errors = compilerResult.errors.filter(e => !Harness.isDefaultLibraryFile(e.file.fileName)); + return Harness.Compiler.getErrorBaseline(baselineFiles, errors); + }, baselineOpts); }); // Ideally, a generated declaration file will have no errors. But we allow generated // declaration file errors as part of the baseline. it("has the expected errors in generated declaration files", () => { if (compilerOptions.declaration && !compilerResult.errors.length) { - Harness.Baseline.runBaseline("has the expected errors in generated declaration files", baseName + ".dts.errors.txt", () => { + Harness.Baseline.runBaseline(baseName + ".dts.errors.txt", () => { const declFileCompilationResult = Harness.Compiler.compileDeclarationFiles( inputFiles, otherFiles, compilerResult, /*harnessSettings*/ undefined, compilerOptions, currentDirectory); @@ -217,11 +218,16 @@ namespace RWC { return Harness.Compiler.minimalDiagnosticsToString(declFileCompilationResult.declResult.errors) + Harness.IO.newLine() + Harness.IO.newLine() + Harness.Compiler.getErrorBaseline(declFileCompilationResult.declInputFiles.concat(declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors); - }, false, baselineOpts); + }, baselineOpts); } }); - // TODO: Type baselines (need to refactor out from compilerRunner) + it("has the expected types", () => { + Harness.Compiler.doTypeAndSymbolBaseline(baseName, compilerResult, inputFiles + .concat(otherFiles) + .filter(file => !!compilerResult.program.getSourceFile(file.unitName)) + .filter(e => !Harness.isDefaultLibraryFile(e.unitName)), baselineOpts); + }); }); } } diff --git a/src/harness/test262Runner.ts b/src/harness/test262Runner.ts index c44b2286b832d..66cf474824b14 100644 --- a/src/harness/test262Runner.ts +++ b/src/harness/test262Runner.ts @@ -67,21 +67,21 @@ class Test262BaselineRunner extends RunnerBase { }); it("has the expected emitted code", () => { - Harness.Baseline.runBaseline("has the expected emitted code", testState.filename + ".output.js", () => { + Harness.Baseline.runBaseline(testState.filename + ".output.js", () => { const files = testState.compilerResult.files.filter(f => f.fileName !== Test262BaselineRunner.helpersFilePath); return Harness.Compiler.collateOutputs(files); - }, false, Test262BaselineRunner.baselineOptions); + }, Test262BaselineRunner.baselineOptions); }); it("has the expected errors", () => { - Harness.Baseline.runBaseline("has the expected errors", testState.filename + ".errors.txt", () => { + Harness.Baseline.runBaseline(testState.filename + ".errors.txt", () => { const errors = testState.compilerResult.errors; if (errors.length === 0) { return null; } return Harness.Compiler.getErrorBaseline(testState.inputFiles, errors); - }, false, Test262BaselineRunner.baselineOptions); + }, Test262BaselineRunner.baselineOptions); }); it("satisfies invariants", () => { @@ -90,10 +90,10 @@ class Test262BaselineRunner extends RunnerBase { }); it("has the expected AST", () => { - Harness.Baseline.runBaseline("has the expected AST", testState.filename + ".AST.txt", () => { + Harness.Baseline.runBaseline(testState.filename + ".AST.txt", () => { const sourceFile = testState.compilerResult.program.getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename)); return Utils.sourceFileToJSON(sourceFile); - }, false, Test262BaselineRunner.baselineOptions); + }, Test262BaselineRunner.baselineOptions); }); }); } diff --git a/src/harness/tsconfig.json b/src/harness/tsconfig.json index 3b9025c27c362..a21faf9dd0745 100644 --- a/src/harness/tsconfig.json +++ b/src/harness/tsconfig.json @@ -89,6 +89,7 @@ "./unittests/convertCompilerOptionsFromJson.ts", "./unittests/convertTypingOptionsFromJson.ts", "./unittests/tsserverProjectSystem.ts", - "./unittests/matchFiles.ts" + "./unittests/matchFiles.ts", + "./unittests/initializeTSConfig.ts" ] } diff --git a/src/harness/unittests/cachingInServerLSHost.ts b/src/harness/unittests/cachingInServerLSHost.ts index 432df9ac2789c..94135b9fd5cae 100644 --- a/src/harness/unittests/cachingInServerLSHost.ts +++ b/src/harness/unittests/cachingInServerLSHost.ts @@ -7,16 +7,16 @@ namespace ts { } function createDefaultServerHost(fileMap: Map): server.ServerHost { - const existingDirectories: Map = {}; - forEachValue(fileMap, v => { - let dir = getDirectoryPath(v.name); + const existingDirectories = createMap(); + for (const name in fileMap) { + let dir = getDirectoryPath(name); let previous: string; do { existingDirectories[dir] = true; previous = dir; dir = getDirectoryPath(dir); } while (dir !== previous); - }); + } return { args: [], newLine: "\r\n", @@ -24,7 +24,7 @@ namespace ts { write: (s: string) => { }, readFile: (path: string, encoding?: string): string => { - return hasProperty(fileMap, path) && fileMap[path].content; + return path in fileMap ? fileMap[path].content : undefined; }, writeFile: (path: string, data: string, writeByteOrderMark?: boolean) => { throw new Error("NYI"); @@ -36,10 +36,10 @@ namespace ts { throw new Error("NYI"); }, fileExists: (path: string): boolean => { - return hasProperty(fileMap, path); + return path in fileMap; }, directoryExists: (path: string): boolean => { - return hasProperty(existingDirectories, path); + return existingDirectories[path] || false; }, createDirectory: (path: string) => { }, @@ -81,10 +81,11 @@ namespace ts { info: (s: string) => { }, startGroup: () => { }, endGroup: () => { }, - msg: (s: string, type?: string) => { } + msg: (s: string, type?: string) => { }, + getLogFileName: (): string => undefined }; - const projectService = new server.ProjectService(serverHost, logger, { isCancellationRequested: () => false }, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(serverHost, logger, { isCancellationRequested: () => false }, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); const rootScriptInfo = projectService.getOrCreateScriptInfo(rootFile, /* openedByClient */true, /*containingProject*/ undefined); const project = projectService.createInferredProjectWithRootFileIfNecessary(rootScriptInfo); project.setCompilerOptions({ module: ts.ModuleKind.AMD } ); @@ -106,7 +107,7 @@ namespace ts { content: `foo()` }; - const serverHost = createDefaultServerHost({ [root.name]: root, [imported.name]: imported }); + const serverHost = createDefaultServerHost(createMap({ [root.name]: root, [imported.name]: imported })); const { project, rootScriptInfo } = createProject(root.name, serverHost); // ensure that imported file was found @@ -193,7 +194,7 @@ namespace ts { content: `export var y = 1` }; - const fileMap: Map = { [root.name]: root }; + const fileMap = createMap({ [root.name]: root }); const serverHost = createDefaultServerHost(fileMap); const originalFileExists = serverHost.fileExists; diff --git a/src/harness/unittests/initializeTSConfig.ts b/src/harness/unittests/initializeTSConfig.ts new file mode 100644 index 0000000000000..cb995212a944a --- /dev/null +++ b/src/harness/unittests/initializeTSConfig.ts @@ -0,0 +1,44 @@ +/// +/// + +namespace ts { + describe("initTSConfig", () => { + function initTSConfigCorrectly(name: string, commandLinesArgs: string[]) { + describe(name, () => { + const commandLine = parseCommandLine(commandLinesArgs); + const initResult = generateTSConfig(commandLine.options, commandLine.fileNames); + const outputFileName = `tsConfig/${name.replace(/[^a-z0-9\-. ]/ig, "")}/tsconfig.json`; + + it(`Correct output for ${outputFileName}`, () => { + Harness.Baseline.runBaseline(outputFileName, () => { + if (initResult) { + return JSON.stringify(initResult, undefined, 4); + } + else { + // This can happen if compiler recieve invalid compiler-options + /* tslint:disable:no-null-keyword */ + return null; + /* tslint:enable:no-null-keyword */ + } + }); + }); + }); + } + + initTSConfigCorrectly("Default initialized TSConfig", ["--init"]); + + initTSConfigCorrectly("Initialized TSConfig with files options", ["--init", "file0.st", "file1.ts", "file2.ts"]); + + initTSConfigCorrectly("Initialized TSConfig with boolean value compiler options", ["--init", "--noUnusedLocals"]); + + initTSConfigCorrectly("Initialized TSConfig with enum value compiler options", ["--init", "--target", "es5", "--jsx", "react"]); + + initTSConfigCorrectly("Initialized TSConfig with list compiler options", ["--init", "--types", "jquery,mocha"]); + + initTSConfigCorrectly("Initialized TSConfig with list compiler options with enum value", ["--init", "--lib", "es5,es2015.core"]); + + initTSConfigCorrectly("Initialized TSConfig with incorrect compiler option", ["--init", "--someNonExistOption"]); + + initTSConfigCorrectly("Initialized TSConfig with incorrect compiler option value", ["--init", "--lib", "nonExistLib,es5,es2015.promise"]); + }); +} \ No newline at end of file diff --git a/src/harness/unittests/moduleResolution.ts b/src/harness/unittests/moduleResolution.ts index 5f63889b08498..3e1ac171216e0 100644 --- a/src/harness/unittests/moduleResolution.ts +++ b/src/harness/unittests/moduleResolution.ts @@ -10,7 +10,7 @@ namespace ts { const map = arrayToMap(files, f => f.name); if (hasDirectoryExists) { - const directories: Map = {}; + const directories = createMap(); for (const f of files) { let name = getDirectoryPath(f.name); while (true) { @@ -25,19 +25,19 @@ namespace ts { return { readFile, directoryExists: path => { - return hasProperty(directories, path); + return path in directories; }, fileExists: path => { - assert.isTrue(hasProperty(directories, getDirectoryPath(path)), `'fileExists' '${path}' request in non-existing directory`); - return hasProperty(map, path); + assert.isTrue(getDirectoryPath(path) in directories, `'fileExists' '${path}' request in non-existing directory`); + return path in map; } }; } else { - return { readFile, fileExists: path => hasProperty(map, path), }; + return { readFile, fileExists: path => path in map, }; } function readFile(path: string): string { - return hasProperty(map, path) ? map[path].content : undefined; + return path in map ? map[path].content : undefined; } } @@ -287,7 +287,7 @@ namespace ts { const host: CompilerHost = { getSourceFile: (fileName: string, languageVersion: ScriptTarget) => { const path = normalizePath(combinePaths(currentDirectory, fileName)); - return hasProperty(files, path) ? createSourceFile(fileName, files[path], languageVersion) : undefined; + return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined; }, getDefaultLibFileName: () => "lib.d.ts", writeFile: (fileName, content): void => { throw new Error("NotImplemented"); }, @@ -298,7 +298,7 @@ namespace ts { useCaseSensitiveFileNames: () => false, fileExists: fileName => { const path = normalizePath(combinePaths(currentDirectory, fileName)); - return hasProperty(files, path); + return path in files; }, readFile: (fileName): string => { throw new Error("NotImplemented"); } }; @@ -318,7 +318,7 @@ namespace ts { } it("should find all modules", () => { - const files: Map = { + const files = createMap({ "/a/b/c/first/shared.ts": ` class A {} export = A`, @@ -332,23 +332,23 @@ import Shared = require('../first/shared'); class C {} export = C; ` - }; + }); test(files, "/a/b/c/first/second", ["class_a.ts"], 3, ["../../../c/third/class_c.ts"]); }); it("should find modules in node_modules", () => { - const files: Map = { + const files = createMap({ "/parent/node_modules/mod/index.d.ts": "export var x", "/parent/app/myapp.ts": `import {x} from "mod"` - }; + }); test(files, "/parent/app", ["myapp.ts"], 2, []); }); it("should find file referenced via absolute and relative names", () => { - const files: Map = { + const files = createMap({ "/a/b/c.ts": `/// `, "/a/b/b.ts": "var x" - }; + }); test(files, "/a/b", ["c.ts", "/a/b/b.ts"], 2, []); }); }); @@ -358,11 +358,7 @@ export = C; function test(files: Map, options: CompilerOptions, currentDirectory: string, useCaseSensitiveFileNames: boolean, rootFiles: string[], diagnosticCodes: number[]): void { const getCanonicalFileName = createGetCanonicalFileName(useCaseSensitiveFileNames); if (!useCaseSensitiveFileNames) { - const f: Map = {}; - for (const fileName in files) { - f[getCanonicalFileName(fileName)] = files[fileName]; - } - files = f; + files = reduceProperties(files, (files, file, fileName) => (files[getCanonicalFileName(fileName)] = file, files), createMap()); } const host: CompilerHost = { @@ -371,7 +367,7 @@ export = C; return library; } const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName))); - return hasProperty(files, path) ? createSourceFile(fileName, files[path], languageVersion) : undefined; + return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined; }, getDefaultLibFileName: () => "lib.d.ts", writeFile: (fileName, content): void => { throw new Error("NotImplemented"); }, @@ -382,7 +378,7 @@ export = C; useCaseSensitiveFileNames: () => useCaseSensitiveFileNames, fileExists: fileName => { const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName))); - return hasProperty(files, path); + return path in files; }, readFile: (fileName): string => { throw new Error("NotImplemented"); } }; @@ -395,77 +391,77 @@ export = C; } it("should succeed when the same file is referenced using absolute and relative names", () => { - const files: Map = { + const files = createMap({ "/a/b/c.ts": `/// `, "/a/b/d.ts": "var x" - }; + }); test(files, { module: ts.ModuleKind.AMD }, "/a/b", /*useCaseSensitiveFileNames*/ false, ["c.ts", "/a/b/d.ts"], []); }); it("should fail when two files used in program differ only in casing (tripleslash references)", () => { - const files: Map = { + const files = createMap({ "/a/b/c.ts": `/// `, "/a/b/d.ts": "var x" - }; + }); test(files, { module: ts.ModuleKind.AMD, forceConsistentCasingInFileNames: true }, "/a/b", /*useCaseSensitiveFileNames*/ false, ["c.ts", "d.ts"], [1149]); }); it("should fail when two files used in program differ only in casing (imports)", () => { - const files: Map = { + const files = createMap({ "/a/b/c.ts": `import {x} from "D"`, "/a/b/d.ts": "export var x" - }; + }); test(files, { module: ts.ModuleKind.AMD, forceConsistentCasingInFileNames: true }, "/a/b", /*useCaseSensitiveFileNames*/ false, ["c.ts", "d.ts"], [1149]); }); it("should fail when two files used in program differ only in casing (imports, relative module names)", () => { - const files: Map = { + const files = createMap({ "moduleA.ts": `import {x} from "./ModuleB"`, "moduleB.ts": "export var x" - }; + }); test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "", /*useCaseSensitiveFileNames*/ false, ["moduleA.ts", "moduleB.ts"], [1149]); }); it("should fail when two files exist on disk that differs only in casing", () => { - const files: Map = { + const files = createMap({ "/a/b/c.ts": `import {x} from "D"`, "/a/b/D.ts": "export var x", "/a/b/d.ts": "export var y" - }; + }); test(files, { module: ts.ModuleKind.AMD }, "/a/b", /*useCaseSensitiveFileNames*/ true, ["c.ts", "d.ts"], [1149]); }); it("should fail when module name in 'require' calls has inconsistent casing", () => { - const files: Map = { + const files = createMap({ "moduleA.ts": `import a = require("./ModuleC")`, "moduleB.ts": `import a = require("./moduleC")`, "moduleC.ts": "export var x" - }; + }); test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "", /*useCaseSensitiveFileNames*/ false, ["moduleA.ts", "moduleB.ts", "moduleC.ts"], [1149, 1149]); }); it("should fail when module names in 'require' calls has inconsistent casing and current directory has uppercase chars", () => { - const files: Map = { + const files = createMap({ "/a/B/c/moduleA.ts": `import a = require("./ModuleC")`, "/a/B/c/moduleB.ts": `import a = require("./moduleC")`, "/a/B/c/moduleC.ts": "export var x", "/a/B/c/moduleD.ts": ` -import a = require("./moduleA.ts"); -import b = require("./moduleB.ts"); +import a = require("./moduleA"); +import b = require("./moduleB"); ` - }; + }); test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "/a/B/c", /*useCaseSensitiveFileNames*/ false, ["moduleD.ts"], [1149]); }); it("should not fail when module names in 'require' calls has consistent casing and current directory has uppercase chars", () => { - const files: Map = { + const files = createMap({ "/a/B/c/moduleA.ts": `import a = require("./moduleC")`, "/a/B/c/moduleB.ts": `import a = require("./moduleC")`, "/a/B/c/moduleC.ts": "export var x", "/a/B/c/moduleD.ts": ` -import a = require("./moduleA.ts"); -import b = require("./moduleB.ts"); +import a = require("./moduleA"); +import b = require("./moduleB"); ` - }; + }); test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "/a/B/c", /*useCaseSensitiveFileNames*/ false, ["moduleD.ts"], []); }); }); @@ -1023,7 +1019,7 @@ import b = require("./moduleB.ts"); const names = map(files, f => f.name); const sourceFiles = arrayToMap(map(files, f => createSourceFile(f.name, f.content, ScriptTarget.ES6)), f => f.fileName); const compilerHost: CompilerHost = { - fileExists : fileName => hasProperty(sourceFiles, fileName), + fileExists : fileName => fileName in sourceFiles, getSourceFile: fileName => sourceFiles[fileName], getDefaultLibFileName: () => "lib.d.ts", writeFile(file, text) { @@ -1034,7 +1030,7 @@ import b = require("./moduleB.ts"); getCanonicalFileName: f => f.toLowerCase(), getNewLine: () => "\r\n", useCaseSensitiveFileNames: () => false, - readFile: fileName => hasProperty(sourceFiles, fileName) ? sourceFiles[fileName].text : undefined + readFile: fileName => fileName in sourceFiles ? sourceFiles[fileName].text : undefined }; const program1 = createProgram(names, {}, compilerHost); const diagnostics1 = program1.getFileProcessingDiagnostics().getDiagnostics(); diff --git a/src/harness/unittests/reuseProgramStructure.ts b/src/harness/unittests/reuseProgramStructure.ts index 8b2b15c15b30d..60687d34c55af 100644 --- a/src/harness/unittests/reuseProgramStructure.ts +++ b/src/harness/unittests/reuseProgramStructure.ts @@ -95,13 +95,14 @@ namespace ts { } } + function createSourceFileWithText(fileName: string, sourceText: SourceText, target: ScriptTarget) { + const file = createSourceFile(fileName, sourceText.getFullText(), target); + file.sourceText = sourceText; + return file; + } + function createTestCompilerHost(texts: NamedSourceText[], target: ScriptTarget): CompilerHost { - const files: Map = {}; - for (const t of texts) { - const file = createSourceFile(t.name, t.text.getFullText(), target); - file.sourceText = t.text; - files[t.name] = file; - } + const files = arrayToMap(texts, t => t.name, t => createSourceFileWithText(t.name, t.text, target)); return { getSourceFile(fileName): SourceFile { @@ -128,10 +129,9 @@ namespace ts { getNewLine(): string { return sys ? sys.newLine : newLine; }, - fileExists: fileName => hasProperty(files, fileName), + fileExists: fileName => fileName in files, readFile: fileName => { - const file = lookUp(files, fileName); - return file && file.text; + return fileName in files ? files[fileName].text : undefined; } }; } @@ -152,29 +152,29 @@ namespace ts { return program; } - function getSizeOfMap(map: Map): number { - let size = 0; - for (const id in map) { - if (hasProperty(map, id)) { - size++; + function checkResolvedModule(expected: ResolvedModule, actual: ResolvedModule): boolean { + if (!expected === !actual) { + if (expected) { + assert.isTrue(expected.resolvedFileName === actual.resolvedFileName, `'resolvedFileName': expected '${expected.resolvedFileName}' to be equal to '${actual.resolvedFileName}'`); + assert.isTrue(expected.isExternalLibraryImport === actual.isExternalLibraryImport, `'isExternalLibraryImport': expected '${expected.isExternalLibraryImport}' to be equal to '${actual.isExternalLibraryImport}'`); } + return true; } - return size; - } - - function checkResolvedModule(expected: ResolvedModule, actual: ResolvedModule): void { - assert.isTrue(actual !== undefined); - assert.isTrue(expected.resolvedFileName === actual.resolvedFileName, `'resolvedFileName': expected '${expected.resolvedFileName}' to be equal to '${actual.resolvedFileName}'`); - assert.isTrue(expected.isExternalLibraryImport === actual.isExternalLibraryImport, `'isExternalLibraryImport': expected '${expected.isExternalLibraryImport}' to be equal to '${actual.isExternalLibraryImport}'`); + return false; } - function checkResolvedTypeDirective(expected: ResolvedTypeReferenceDirective, actual: ResolvedTypeReferenceDirective): void { - assert.isTrue(actual !== undefined); - assert.isTrue(expected.resolvedFileName === actual.resolvedFileName, `'resolvedFileName': expected '${expected.resolvedFileName}' to be equal to '${actual.resolvedFileName}'`); - assert.isTrue(expected.primary === actual.primary, `'primary': expected '${expected.primary}' to be equal to '${actual.primary}'`); + function checkResolvedTypeDirective(expected: ResolvedTypeReferenceDirective, actual: ResolvedTypeReferenceDirective): boolean { + if (!expected === !actual) { + if (expected) { + assert.isTrue(expected.resolvedFileName === actual.resolvedFileName, `'resolvedFileName': expected '${expected.resolvedFileName}' to be equal to '${actual.resolvedFileName}'`); + assert.isTrue(expected.primary === actual.primary, `'primary': expected '${expected.primary}' to be equal to '${actual.primary}'`); + } + return true; + } + return false; } - function checkCache(caption: string, program: Program, fileName: string, expectedContent: Map, getCache: (f: SourceFile) => Map, entryChecker: (expected: T, original: T) => void): void { + function checkCache(caption: string, program: Program, fileName: string, expectedContent: Map, getCache: (f: SourceFile) => Map, entryChecker: (expected: T, original: T) => boolean): void { const file = program.getSourceFile(fileName); assert.isTrue(file !== undefined, `cannot find file ${fileName}`); const cache = getCache(file); @@ -183,23 +183,7 @@ namespace ts { } else { assert.isTrue(cache !== undefined, `expected ${caption} to be set`); - const actualCacheSize = getSizeOfMap(cache); - const expectedSize = getSizeOfMap(expectedContent); - assert.isTrue(actualCacheSize === expectedSize, `expected actual size: ${actualCacheSize} to be equal to ${expectedSize}`); - - for (const id in expectedContent) { - if (hasProperty(expectedContent, id)) { - - if (expectedContent[id]) { - const expected = expectedContent[id]; - const actual = cache[id]; - entryChecker(expected, actual); - } - } - else { - assert.isTrue(cache[id] === undefined); - } - } + assert.isTrue(equalOwnProperties(expectedContent, cache, entryChecker), `contents of ${caption} did not match the expected contents.`); } } @@ -313,6 +297,8 @@ namespace ts { }); it("resolution cache follows imports", () => { + (Error).stackTraceLimit = Infinity; + const files = [ { name: "a.ts", text: SourceText.New("", "import {_} from 'b'", "var x = 1") }, { name: "b.ts", text: SourceText.New("", "", "var y = 2") }, @@ -320,7 +306,7 @@ namespace ts { const options: CompilerOptions = { target }; const program_1 = newProgram(files, ["a.ts"], options); - checkResolvedModulesCache(program_1, "a.ts", { "b": { resolvedFileName: "b.ts" } }); + checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": { resolvedFileName: "b.ts" } })); checkResolvedModulesCache(program_1, "b.ts", undefined); const program_2 = updateProgram(program_1, ["a.ts"], options, files => { @@ -329,7 +315,7 @@ namespace ts { assert.isTrue(program_1.structureIsReused); // content of resolution cache should not change - checkResolvedModulesCache(program_1, "a.ts", { "b": { resolvedFileName: "b.ts" } }); + checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": { resolvedFileName: "b.ts" } })); checkResolvedModulesCache(program_1, "b.ts", undefined); // imports has changed - program is not reused @@ -346,7 +332,7 @@ namespace ts { files[0].text = files[0].text.updateImportsAndExports(newImports); }); assert.isTrue(!program_3.structureIsReused); - checkResolvedModulesCache(program_4, "a.ts", { "b": { resolvedFileName: "b.ts" }, "c": undefined }); + checkResolvedModulesCache(program_4, "a.ts", createMap({ "b": { resolvedFileName: "b.ts" }, "c": undefined })); }); it("resolved type directives cache follows type directives", () => { @@ -357,7 +343,7 @@ namespace ts { const options: CompilerOptions = { target, typeRoots: ["/types"] }; const program_1 = newProgram(files, ["/a.ts"], options); - checkResolvedTypeDirectivesCache(program_1, "/a.ts", { "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }); + checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } })); checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined); const program_2 = updateProgram(program_1, ["/a.ts"], options, files => { @@ -366,7 +352,7 @@ namespace ts { assert.isTrue(program_1.structureIsReused); // content of resolution cache should not change - checkResolvedTypeDirectivesCache(program_1, "/a.ts", { "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }); + checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } })); checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined); // type reference directives has changed - program is not reused @@ -384,7 +370,7 @@ namespace ts { files[0].text = files[0].text.updateReferences(newReferences); }); assert.isTrue(!program_3.structureIsReused); - checkResolvedTypeDirectivesCache(program_1, "/a.ts", { "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }); + checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } })); }); }); diff --git a/src/harness/unittests/session.ts b/src/harness/unittests/session.ts index 9239d57154f92..0d1fb3681e085 100644 --- a/src/harness/unittests/session.ts +++ b/src/harness/unittests/session.ts @@ -36,6 +36,7 @@ namespace ts.server { startGroup(): void {}, endGroup(): void {}, msg(s: string, type?: string): void {}, + getLogFileName: (): string => undefined }; describe("the Session class", () => { @@ -43,7 +44,7 @@ namespace ts.server { let lastSent: protocol.Message; beforeEach(() => { - session = new Session(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, Utils.byteLength, Utils.maxUncompressedMessageSize, Utils.compress, process.hrtime, mockLogger); + session = new Session(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined, Utils.byteLength, process.hrtime, mockLogger); session.send = (msg: protocol.Message) => { lastSent = msg; }; @@ -110,7 +111,7 @@ namespace ts.server { describe("onMessage", () => { it("should not throw when commands are executed with invalid arguments", () => { let i = 0; - for (name in CommandNames) { + for (const name in CommandNames) { if (!Object.prototype.hasOwnProperty.call(CommandNames, name)) { continue; } @@ -181,7 +182,7 @@ namespace ts.server { session.send = Session.prototype.send; assert(session.send); - expect(session.send(msg, /*canCompressResponse*/ false)).to.not.exist; + expect(session.send(msg)).to.not.exist; expect(lastWrittenToHost).to.equal(resultMsg); }); }); @@ -249,7 +250,7 @@ namespace ts.server { }; const command = "test"; - session.output(body, command, /*canCompressResponse*/ false); + session.output(body, command); expect(lastSent).to.deep.equal({ seq: 0, @@ -268,7 +269,7 @@ namespace ts.server { lastSent: protocol.Message; customHandler = "testhandler"; constructor() { - super(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, Utils.byteLength, Utils.maxUncompressedMessageSize, Utils.compress, process.hrtime, mockLogger); + super(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined, Utils.byteLength, process.hrtime, mockLogger); this.addProtocolHandler(this.customHandler, () => { return { response: undefined, responseRequired: true }; }); @@ -287,7 +288,7 @@ namespace ts.server { }; const command = "test"; - session.output(body, command, /*canCompressResponse*/ false); + session.output(body, command); expect(session.lastSent).to.deep.equal({ seq: 0, @@ -326,7 +327,7 @@ namespace ts.server { class InProcSession extends Session { private queue: protocol.Request[] = []; constructor(private client: InProcClient) { - super(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, Utils.byteLength, Utils.maxUncompressedMessageSize, Utils.compress, process.hrtime, mockLogger); + super(mockHost, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined, Utils.byteLength, process.hrtime, mockLogger); this.addProtocolHandler("echo", (req: protocol.Request) => ({ response: req.arguments, responseRequired: true @@ -347,11 +348,11 @@ namespace ts.server { ({ response } = this.executeCommand(msg)); } catch (e) { - this.output(undefined, msg.command, /*canCompressResponse*/ false, msg.seq, e.toString()); + this.output(undefined, msg.command, msg.seq, e.toString()); return; } if (response) { - this.output(response, msg.command, /*canCompressResponse*/ false, msg.seq); + this.output(response, msg.command, msg.seq); } } @@ -366,13 +367,13 @@ namespace ts.server { class InProcClient { private server: InProcSession; private seq = 0; - private callbacks: ts.Map<(resp: protocol.Response) => void> = {}; - private eventHandlers: ts.Map<(args: any) => void> = {}; + private callbacks = createMap<(resp: protocol.Response) => void>(); + private eventHandlers = createMap<(args: any) => void>(); handle(msg: protocol.Message): void { if (msg.type === "response") { const response = msg; - if (this.callbacks[response.request_seq]) { + if (response.request_seq in this.callbacks) { this.callbacks[response.request_seq](response); delete this.callbacks[response.request_seq]; } @@ -384,7 +385,7 @@ namespace ts.server { } emit(name: string, args: any): void { - if (this.eventHandlers[name]) { + if (name in this.eventHandlers) { this.eventHandlers[name](args); } } diff --git a/src/harness/unittests/transpile.ts b/src/harness/unittests/transpile.ts index 547d10b9fbc02..2dd9c89a1cb51 100644 --- a/src/harness/unittests/transpile.ts +++ b/src/harness/unittests/transpile.ts @@ -62,7 +62,7 @@ namespace ts { }); it("Correct errors for " + justName, () => { - Harness.Baseline.runBaseline("Correct errors", justName.replace(/\.tsx?$/, ".errors.txt"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".errors.txt"), () => { if (transpileResult.diagnostics.length === 0) { /* tslint:disable:no-null-keyword */ return null; @@ -75,7 +75,7 @@ namespace ts { if (canUseOldTranspile) { it("Correct errors (old transpile) for " + justName, () => { - Harness.Baseline.runBaseline("Correct errors", justName.replace(/\.tsx?$/, ".oldTranspile.errors.txt"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".oldTranspile.errors.txt"), () => { if (oldTranspileDiagnostics.length === 0) { /* tslint:disable:no-null-keyword */ return null; @@ -88,7 +88,7 @@ namespace ts { } it("Correct output for " + justName, () => { - Harness.Baseline.runBaseline("Correct output", justName.replace(/\.tsx?$/, ".js"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".js"), () => { if (transpileResult.outputText) { return transpileResult.outputText; } @@ -104,7 +104,7 @@ namespace ts { if (canUseOldTranspile) { it("Correct output (old transpile) for " + justName, () => { - Harness.Baseline.runBaseline("Correct output", justName.replace(/\.tsx?$/, ".oldTranspile.js"), () => { + Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".oldTranspile.js"), () => { return oldTranspileResult; }); }); diff --git a/src/harness/unittests/tsserverProjectSystem.ts b/src/harness/unittests/tsserverProjectSystem.ts index 9ff3095b7915e..b3a190e88632f 100644 --- a/src/harness/unittests/tsserverProjectSystem.ts +++ b/src/harness/unittests/tsserverProjectSystem.ts @@ -1,4 +1,5 @@ /// +/// namespace ts { function notImplemented(): any { @@ -13,7 +14,8 @@ namespace ts { info: () => void 0, startGroup: () => void 0, endGroup: () => void 0, - msg: () => void 0 + msg: () => void 0, + getLogFileName: (): string => undefined }; const nullCancellationToken: HostCancellationToken = { @@ -26,6 +28,59 @@ namespace ts { content: libFileContent }; + class TestTypingsInstaller extends server.typingsInstaller.TypingsInstaller implements server.ITypingsInstaller { + protected projectService: server.ProjectService; + constructor(readonly cachePath: string, readonly installTypingHost: server.ServerHost) { + super(cachePath, ""); + this.init(); + } + + safeFileList = ""; + postInstallActions: (( map: (t: string[]) => string[]) => void)[] = []; + + runPostInstallActions(map: (t: string[]) => string[]) { + for (const f of this.postInstallActions) { + f(map); + } + this.postInstallActions = []; + } + + onProjectClosed(p: server.Project) { + + } + + attach(projectService: server.ProjectService) { + this.projectService = projectService; + } + + getInstallTypingHost() { + return this.installTypingHost; + } + + installPackage(packageName: string) { + return true; + } + + isPackageInstalled(packageName: string) { + return true; + } + + runTsd(cachePath: string, typingsToInstall: string[], postInstallAction: (installedTypings: string[]) => void) { + this.postInstallActions.push(map => { + postInstallAction(map(typingsToInstall)); + }); + } + + sendResponse(response: server.SetTypings | server.InvalidateCachedTypings) { + this.projectService.updateTypingsForProject(response); + } + + enqueueInstallTypingsRequest(project: server.Project, typingOptions: TypingOptions) { + const request = server.createInstallTypingsRequest(project, typingOptions, this.cachePath); + this.install(request); + } + } + function getExecutingFilePathFromLibFile(libFilePath: string): string { return combinePaths(getDirectoryPath(libFile.path), "tsc.js"); } @@ -105,20 +160,10 @@ namespace ts { return entry; } - function sizeOfMap(map: Map): number { - let n = 0; - for (const name in map) { - if (hasProperty(map, name)) { - n++; - } - } - return n; - } - function checkMapKeys(caption: string, map: Map, expectedKeys: string[]) { - assert.equal(sizeOfMap(map), expectedKeys.length, `${caption}: incorrect size of map`); + assert.equal(reduceProperties(map, count => count + 1, 0), expectedKeys.length, `${caption}: incorrect size of map`); for (const name of expectedKeys) { - assert.isTrue(hasProperty(map, name), `${caption} is expected to contain ${name}, actual keys: ${getKeys(map)}`); + assert.isTrue(name in map, `${caption} is expected to contain ${name}, actual keys: ${Object.keys(map)}`); } } @@ -180,7 +225,13 @@ namespace ts { } count() { - return sizeOfMap(this.map); + let n = 0; +/* tslint:disable:no-unused-variable */ + for (const _ in this.map) { +/* tslint:enable:no-unused-variable */ + n++; + } + return n; } invoke() { @@ -202,12 +253,11 @@ namespace ts { private fs: ts.FileMap; private getCanonicalFileName: (s: string) => string; private toPath: (f: string) => Path; - private timeoutCallbacks = new Callbacks(); private immediateCallbacks = new Callbacks(); - readonly watchedDirectories: Map<{ cb: DirectoryWatcherCallback, recursive: boolean }[]> = {}; - readonly watchedFiles: Map = {}; + readonly watchedDirectories = createMap<{ cb: DirectoryWatcherCallback, recursive: boolean }[]>(); + readonly watchedFiles = createMap(); private filesOrFolders: FileOrFolder[]; @@ -291,7 +341,7 @@ namespace ts { watchDirectory(directoryName: string, callback: DirectoryWatcherCallback, recursive: boolean): DirectoryWatcher { const path = this.toPath(directoryName); - const callbacks = lookUp(this.watchedDirectories, path) || (this.watchedDirectories[path] = []); + const callbacks = this.watchedDirectories[path] || (this.watchedDirectories[path] = []); callbacks.push({ cb: callback, recursive }); return { referenceCount: 0, @@ -312,7 +362,7 @@ namespace ts { triggerDirectoryWatcherCallback(directoryName: string, fileName: string): void { const path = this.toPath(directoryName); - const callbacks = lookUp(this.watchedDirectories, path); + const callbacks = this.watchedDirectories[path]; if (callbacks) { for (const callback of callbacks) { callback.cb(fileName); @@ -322,7 +372,7 @@ namespace ts { triggerFileWatcherCallback(fileName: string, removed?: boolean): void { const path = this.toPath(fileName); - const callbacks = lookUp(this.watchedFiles, path); + const callbacks = this.watchedFiles[path]; if (callbacks) { for (const callback of callbacks) { callback(path, removed); @@ -332,7 +382,7 @@ namespace ts { watchFile(fileName: string, callback: FileWatcherCallback) { const path = this.toPath(fileName); - const callbacks = lookUp(this.watchedFiles, path) || (this.watchedFiles[path] = []); + const callbacks = this.watchedFiles[path] || (this.watchedFiles[path] = []); callbacks.push(callback); return { close: () => { @@ -369,12 +419,16 @@ namespace ts { clearImmediate(timeoutId: any): void { this.immediateCallbacks.unregister(timeoutId); - } + }; createDirectory(directoryName: string): void { this.createFileOrFolder({ path: directoryName }); } + writeFile(path: string, content: string): void { + this.createFileOrFolder({ path, content, fileSize: content.length }); + } + createFileOrFolder(f: FileOrFolder, createParentDirectory = false): void { const base = getDirectoryPath(f.path); if (base !== f.path && !this.directoryExists(base)) { @@ -391,16 +445,6 @@ namespace ts { this.reloadFS(filesOrFolders); } - writeFile(fileName: string, content: string) { - const path = this.toPath(fileName); - const newEntry: File = { - content, - fullPath: path, - path - }; - this.fs.set(path, newEntry); - }; - readonly readFile = (s: string) => (this.fs.get(this.toPath(s))).content; readonly resolvePath = (s: string) => s; readonly getExecutingFilePath = () => this.executingFilePath; @@ -452,7 +496,7 @@ namespace ts { content: `export let x: number` }; const host = createServerHost([appFile, moduleFile, libFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); const { configFileName } = projectService.openClientFile(appFile.path); assert(!configFileName, `should not find config, got: '${configFileName}`); @@ -490,7 +534,7 @@ namespace ts { }; const host = createServerHost([configFile, libFile, file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); const { configFileName, configFileErrors } = projectService.openClientFile(file1.path); assert(configFileName, "should find config file"); @@ -517,7 +561,7 @@ namespace ts { const host = createServerHost(filesWithoutConfig); const filesWithConfig = [libFile, commonFile1, commonFile2, configFile]; - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); projectService.openClientFile(commonFile2.path); @@ -548,7 +592,7 @@ namespace ts { content: `{}` }; const host = createServerHost([commonFile1, libFile, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); checkWatchedDirectories(host, ["/a/b"]); checkNumberOfConfiguredProjects(projectService, 1); @@ -576,7 +620,7 @@ namespace ts { }` }; const host = createServerHost([commonFile1, commonFile2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); projectService.openClientFile(commonFile2.path); @@ -592,7 +636,7 @@ namespace ts { content: `{}` }; const host = createServerHost([commonFile1, commonFile2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); checkNumberOfConfiguredProjects(projectService, 1); @@ -622,7 +666,7 @@ namespace ts { }; const files = [commonFile1, commonFile2, configFile]; const host = createServerHost(files); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); const project = projectService.configuredProjects[0]; @@ -655,7 +699,7 @@ namespace ts { }; const host = createServerHost([commonFile1, commonFile2, excludedFile1, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(commonFile1.path); checkNumberOfConfiguredProjects(projectService, 1); @@ -689,7 +733,7 @@ namespace ts { }; const files = [file1, nodeModuleFile, classicModuleFile, configFile]; const host = createServerHost(files); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); projectService.openClientFile(nodeModuleFile.path); projectService.openClientFile(classicModuleFile.path); @@ -730,7 +774,7 @@ namespace ts { }` }; const host = createServerHost([file1, file2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); projectService.closeClientFile(file1.path); projectService.openClientFile(file2.path); @@ -757,7 +801,7 @@ namespace ts { }` }; const host = createServerHost([file1, file2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); projectService.closeClientFile(file1.path); projectService.openClientFile(file2.path); @@ -790,7 +834,7 @@ namespace ts { }; const host = createServerHost([file1, file2, file3, libFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ true); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ true, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); projectService.openClientFile(file2.path); projectService.openClientFile(file3.path); @@ -823,7 +867,7 @@ namespace ts { }` }; const host = createServerHost([file1, configFile, libFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ true); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ true, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfConfiguredProjects(projectService, 1); @@ -842,7 +886,7 @@ namespace ts { }; const externalProjectName = "externalproject"; const host = createServerHost([file1, file2]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openExternalProject({ rootFiles: toExternalFiles([file1.path, file2.path]), options: {}, @@ -900,7 +944,7 @@ namespace ts { }; const externalProjectName = "externalproject"; const host = createServerHost([file1, file2, file3, config1, config2]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openExternalProject({ rootFiles: toExternalFiles([config1.path, config2.path, file3.path]), options: {}, @@ -938,7 +982,7 @@ namespace ts { }; const externalProjectName = "externalproject"; const host = createServerHost([file1, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -969,7 +1013,7 @@ namespace ts { }; const externalProjectName = "externalproject"; const host = createServerHost([file1, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useOneInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -1004,7 +1048,7 @@ namespace ts { content: `export let y = 1;` }; const host = createServerHost([file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); @@ -1041,7 +1085,7 @@ namespace ts { content: `export let y = 1;` }; const host = createServerHost([file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); @@ -1080,7 +1124,7 @@ namespace ts { }; const host = createServerHost([file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { inferredProjects: 1 }); @@ -1101,8 +1145,8 @@ namespace ts { const file1 = { path: "/a/b/f1.ts", content: ` - export * from "../c/f2.ts"; - export * from "../d/f3.ts";` + export * from "../c/f2"; + export * from "../d/f3";` }; const file2 = { path: "/a/c/f2.ts", @@ -1113,7 +1157,7 @@ namespace ts { content: "export let y = 1;" }; const host = createServerHost([file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file2.path); checkNumberOfProjects(projectService, { inferredProjects: 1 }); @@ -1148,7 +1192,7 @@ namespace ts { }; const host = createServerHost([file1, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -1179,7 +1223,7 @@ namespace ts { }; const host = createServerHost([file1, file2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -1212,7 +1256,7 @@ namespace ts { }; const host = createServerHost([file1, file2, configFile]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -1240,7 +1284,7 @@ namespace ts { content: "let y = 1" }; const host = createServerHost([file1, file2]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openExternalProject({ projectFileName: "project", options: {}, rootFiles: toExternalFiles([file1.path]) }); checkNumberOfProjects(projectService, { externalProjects: 1 }); @@ -1266,7 +1310,7 @@ namespace ts { }; const host = createServerHost([file1, file2, file3]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openExternalProject({ projectFileName: "project", options: { moduleResolution: ModuleResolutionKind.NodeJs }, rootFiles: toExternalFiles([file1.path, file2.path]) }); checkNumberOfProjects(projectService, { externalProjects: 1 }); @@ -1293,7 +1337,7 @@ namespace ts { content: JSON.stringify({ compilerOptions: {} }) }; const host = createServerHost([file1, file2, config]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); checkNumberOfProjects(projectService, { configuredProjects: 1 }); @@ -1321,7 +1365,7 @@ namespace ts { content: "export let x = 1" }; const host = createServerHost([file1, file2]); - const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false); + const projectService = new server.ProjectService(host, nullLogger, nullCancellationToken, /*useSingleInferredProject*/ false, /*typingsInstaller*/ undefined); projectService.openClientFile(file1.path); projectService.openClientFile(file2.path); @@ -1346,7 +1390,7 @@ namespace ts { content: `