diff --git a/src/analyzer.ts b/src/analyzer.ts index 5b06c37ed..8b54aebd0 100644 --- a/src/analyzer.ts +++ b/src/analyzer.ts @@ -9,25 +9,31 @@ */ import * as fs from 'fs'; -import {Analyzer, Deferred, Loader, Resolver, DocumentDescriptor} from 'hydrolysis'; -import {posix as posixPath} from 'path'; -import * as osPath from 'path'; -import {Transform} from 'stream'; +import {Analyzer, Deferred, Loader, Resolver, DocumentDescriptor} + from 'hydrolysis'; +import * as path from 'path'; +import {PassThrough, Transform} from 'stream'; import File = require('vinyl'); import {parse as parseUrl} from 'url'; import * as logging from 'plylog'; import {Node, queryAll, predicates, getAttribute} from 'dom5'; -import {FileCB} from './streams'; -import urlFromPath from './url-from-path'; +import {FileCB, VinylReaderTransform} from './streams'; +import {urlFromPath, pathFromUrl} from './path-transformers'; +import {DocumentDeps, getDependenciesFromDocument} + from './get-dependencies-from-document'; const minimatchAll = require('minimatch-all'); const logger = logging.getLogger('cli.build.analyzer'); -export interface DocumentDeps { - imports?: Array; - scripts?: Array; - styles?: Array; +export interface DepsIndex { + // An index of dependency -> fragments that depend on it + depsToFragments: Map; + // TODO(garlicnation): Remove this map. + // An index of fragments -> html dependencies + fragmentToDeps: Map; + // A map from frament urls to html, js, and css dependencies. + fragmentToFullDeps: Map; } export class StreamAnalyzer extends Transform { @@ -37,33 +43,45 @@ export class StreamAnalyzer extends Transform { shell: string; fragments: string[]; allFragments: string[]; + sourceGlobs: string[]; resolver: StreamResolver; loader: Loader; analyzer: Analyzer; - files = new Map(); - - _analyzeResolve: (index: DepsIndex) => void; - analyze: Promise; + _dependenciesStream = new PassThrough({ objectMode: true }); + _dependenciesProcessingStream = new VinylReaderTransform(); - constructor(root: string, entrypoint: string, shell: string, fragments: string[]) { + files = new Map(); + allFragmentsToAnalyze: Set; + foundDependencies = new Set(); + + analyzeDependencies: Promise; + _dependencyAnalysis: DepsIndex = { + depsToFragments: new Map(), + fragmentToDeps: new Map(), + fragmentToFullDeps: new Map() + }; + _resolveDependencyAnalysis: (index: DepsIndex) => void; + + constructor(root: string, entrypoint: string, shell: string, + fragments: string[], sourceGlobs: string[]) { super({objectMode: true}); + this.root = root; this.entrypoint = entrypoint; this.shell = shell; this.fragments = fragments; - + this.sourceGlobs = sourceGlobs; this.allFragments = []; + // It's important that shell is first for document-ordering of imports if (shell) { this.allFragments.push(shell); } - if (entrypoint && !shell && fragments.length === 0) { this.allFragments.push(entrypoint); } - if (fragments) { this.allFragments = this.allFragments.concat(fragments); } @@ -72,37 +90,83 @@ export class StreamAnalyzer extends Transform { this.loader = new Loader(); this.loader.addResolver(this.resolver); this.analyzer = new Analyzer(false, this.loader); - this.analyze = new Promise((resolve, reject) => { - this._analyzeResolve = resolve; + + // Connect the dependencies stream that the analyzer pushes into to the + // processing stream which loads each file and attaches the file contents. + this._dependenciesStream.pipe(this._dependenciesProcessingStream); + + this.allFragmentsToAnalyze = new Set(this.allFragments); + this.analyzeDependencies = new Promise((resolve, reject) => { + this._resolveDependencyAnalysis = resolve; }); } + /** + * The source dependency stream that Analyzer pushes discovered dependencies + * into is connected to the post-processing stream. We want consumers to only + * use the post-processed data so that all file objects have contents + * loaded by default. This also makes Analyzer easier for us to test. + */ + get dependencies(): Transform { + return this._dependenciesProcessingStream; + } + _transform(file: File, encoding: string, callback: FileCB): void { + let filePath = file.path; + let fileUrl = urlFromPath(this.root, file.path); this.addFile(file); - // If this is the entrypoint, hold on to the file, so that it's fully - // analyzed by the time down-stream transforms see it. + // If our resolver is waiting for this file, resolve its deferred loader + if (this.resolver.hasDeferredFile(fileUrl)) { + this.resolver.resolveDeferredFile(fileUrl, file); + } + + // Propagate the file so that the stream can continue + callback(null, file); + + // If the file is a fragment, begin analysis on its dependencies if (this.isFragment(file)) { - callback(null, null); - } else { - callback(null, file); + this._getDependencies(urlFromPath(this.root, filePath)) + .then((deps: DocumentDeps) => { + // Add all found dependencies to our index + this._addDependencies(filePath, deps); + this.allFragmentsToAnalyze.delete(filePath); + // If there are no more fragments to analyze, close the dependency stream + if (this.allFragmentsToAnalyze.size === 0) { + this._dependenciesStream.end(); + } + }); } } _flush(done: (error?: any) => void) { - this._getDepsToEntrypointIndex().then((depsIndex) => { - // push held back files - for (let fragment of this.allFragments) { - let url = urlFromPath(this.root, fragment); - let file = this.getUrl(url); - if (file == null) { - done(new Error(`no file found for fragment ${fragment}`)); - } - this.push(file); + // If stream finished with files that still needed to be loaded, error out + if (this.resolver.hasDeferredFiles()) { + for (let fileUrl of this.resolver.deferredFiles.keys()) { + logger.error(`${fileUrl} never loaded`); } - this._analyzeResolve(depsIndex); - done(); - }); + done(new Error(`${this.resolver.deferredFiles.size} deferred files were never loaded`)); + return; + } + // Resolve our dependency analysis promise now that we have seen all files + this._resolveDependencyAnalysis(this._dependencyAnalysis); + done(); + } + + getFile(filepath: string): File { + let url = urlFromPath(this.root, filepath); + return this.getFileByUrl(url); + } + + getFileByUrl(url: string): File { + if (url.startsWith('/')) { + url = url.substring(1); + } + return this.files.get(url); + } + + isFragment(file: File): boolean { + return this.allFragments.indexOf(file.path) !== -1; } /** @@ -115,158 +179,89 @@ export class StreamAnalyzer extends Transform { logger.debug(`addFile: ${file.path}`); // Badly-behaved upstream transformers (looking at you gulp-html-minifier) // may use posix path separators on Windows. - let filepath = osPath.normalize(file.path); + let filepath = path.normalize(file.path); // Store only root-relative paths, in URL/posix format this.files.set(urlFromPath(this.root, filepath), file); } - getFile(filepath: string): File { - return this.getUrl(urlFromPath(this.root, filepath)); + /** + * Attempts to retreive document-order transitive dependencies for `url`. + */ + _getDependencies(url: string): Promise { + let dir = path.posix.dirname(url); + return this.analyzer.metadataTree(url) + .then((tree) => getDependenciesFromDocument(tree, dir)); } - getUrl(url: string): File { - if (url.startsWith('/')) { - url = url.substring(1); + _addDependencies(filePath: string, deps: DocumentDeps) { + // Make sure function is being called properly + if (!this.allFragmentsToAnalyze.has(filePath)) { + throw new Error(`Dependency analysis incorrectly called for ${filePath}`); } - let file = this.files.get(url); - if (!file) { - logger.debug(`no file for ${url} :(`); - logger.debug(Array.from(this.files.values()).join(', ')); - try { - throw new Error(); - } catch (e) { - logger.error(e.stack); - } - } - return file; - } - - isFragment(file: File): boolean { - return this.allFragments.indexOf(file.path) !== -1; - } - - _getDepsToEntrypointIndex(): Promise { - let depsPromises = []>this.allFragments.map((f) => - this._getDependencies(urlFromPath(this.root, f))); - - return Promise.all(depsPromises).then((value: any) => { - // tsc was giving a spurious error with `allDeps` as the parameter - let allDeps: DocumentDeps[] = value; - - // An index of dependency -> fragments that depend on it - let depsToFragments = new Map(); - - // An index of fragments -> dependencies - let fragmentToDeps = new Map(); - - let fragmentToFullDeps = new Map(); - console.assert(this.allFragments.length === allDeps.length); - - for (let i = 0; i < allDeps.length; i++) { - let fragment = this.allFragments[i]; - let deps: DocumentDeps = allDeps[i]; - console.assert(deps != null, `deps is null for ${fragment}`); - - fragmentToDeps.set(fragment, deps.imports); - fragmentToFullDeps.set(fragment, deps); - - for (let dep of deps.imports) { - let entrypointList: string[]; - if (!depsToFragments.has(dep)) { - entrypointList = []; - depsToFragments.set(dep, entrypointList); - } else { - entrypointList = depsToFragments.get(dep); - } - entrypointList.push(fragment); - } + // Add dependencies to _dependencyAnalysis object, and push them through + // the dependency stream. + this._dependencyAnalysis.fragmentToFullDeps.set(filePath, deps); + this._dependencyAnalysis.fragmentToDeps.set(filePath, deps.imports); + deps.scripts.forEach((url) => this.pushDependency(url)); + deps.styles.forEach((url) => this.pushDependency(url)); + deps.imports.forEach((url) => { + this.pushDependency(url); + + let entrypointList: string[] = this._dependencyAnalysis.depsToFragments.get(url); + if (entrypointList) { + entrypointList.push(filePath); + } else { + this._dependencyAnalysis.depsToFragments.set(url, [filePath]); } - return { - depsToFragments, - fragmentToDeps, - fragmentToFullDeps, - }; }); } + /** - * Attempts to retreive document-order transitive dependencies for `url`. + * Process the given dependency before pushing it through the stream. + * Each dependency is only pushed through once to avoid duplicates. */ - _getDependencies(url: string): Promise { - let dir = posixPath.dirname(url); - return this.analyzer.metadataTree(url) - .then((tree) => this._getDependenciesFromDescriptor(tree, dir)); - } - - _getDependenciesFromDescriptor(descriptor: DocumentDescriptor, dir: string): DocumentDeps { - let allHtmlDeps: string[] = []; - let allScriptDeps = new Set(); - let allStyleDeps = new Set(); - - let deps: DocumentDeps = this._collectScriptsAndStyles(descriptor); - deps.scripts.forEach((s) => allScriptDeps.add(posixPath.resolve(dir, s))); - deps.styles.forEach((s) => allStyleDeps.add(posixPath.resolve(dir, s))); - if (descriptor.imports) { - let queue = descriptor.imports.slice(); - while (queue.length > 0) { - let next = queue.shift(); - if (!next.href) { - continue; - } - allHtmlDeps.push(next.href); - let childDeps = this._getDependenciesFromDescriptor(next, posixPath.dirname(next.href)); - allHtmlDeps = allHtmlDeps.concat(childDeps.imports); - childDeps.scripts.forEach((s) => allScriptDeps.add(s)); - childDeps.styles.forEach((s) => allStyleDeps.add(s)); - } + pushDependency(dependencyUrl: string) { + if (this.getFileByUrl(dependencyUrl)) { + logger.debug('dependency has already been pushed, ignoring...', {dep: dependencyUrl}); + return; } - return { - scripts: Array.from(allScriptDeps), - styles: Array.from(allStyleDeps), - imports: allHtmlDeps, - }; - } + let dependencyFilePath = pathFromUrl(this.root, dependencyUrl); + if (minimatchAll(dependencyFilePath, this.sourceGlobs)) { + logger.debug('dependency is a source file, ignoring...', {dep: dependencyUrl}); + return; + } - _collectScriptsAndStyles(tree: DocumentDescriptor): DocumentDeps { - let scripts: string[] = []; - let styles: string[] = []; - tree.html.script.forEach((script: Node) => { - // TODO(justinfagnani): stop patching Nodes in Hydrolysis - let __hydrolysisInlined = (script).__hydrolysisInlined; - if (__hydrolysisInlined) { - scripts.push(__hydrolysisInlined); - } - }); - tree.html.style.forEach((style: Node) => { - let href = getAttribute(style, 'href'); - if (href) { - styles.push(href); - } - }); - return { - scripts, - styles - }; + logger.debug('new dependency found, pushing into dependency stream...', dependencyFilePath); + this._dependenciesStream.push(dependencyFilePath); } } -export interface DepsIndex { - depsToFragments: Map; - // TODO(garlicnation): Remove this map. - // A legacy map from framents to html dependencies. - fragmentToDeps: Map; - // A map from frament urls to html, js, and css dependencies. - fragmentToFullDeps: Map; -} class StreamResolver implements Resolver { + analyzer: StreamAnalyzer; + deferredFiles = new Map>(); constructor(analyzer: StreamAnalyzer) { this.analyzer = analyzer; } + hasDeferredFile(url: string): boolean { + return this.deferredFiles.has(url); + } + + hasDeferredFiles(): boolean { + return this.deferredFiles.size > 0; + } + + resolveDeferredFile(url: string, file: File): void { + let deferred = this.deferredFiles.get(url); + deferred.resolve(file.contents.toString()); + this.deferredFiles.delete(url); + } + accept(url: string, deferred: Deferred): boolean { logger.debug(`accept: ${url}`); let urlObject = parseUrl(url); @@ -276,17 +271,15 @@ class StreamResolver implements Resolver { } let urlPath = decodeURIComponent(urlObject.pathname); - let file = this.analyzer.getUrl(urlPath); + let file = this.analyzer.getFileByUrl(urlPath); if (file) { deferred.resolve(file.contents.toString()); } else { - logger.debug(`No file found for ${urlPath}`); - // If you're template to do the next line, Loader does that for us, so - // don't double reject! - // deferred.reject(new Error(`No file found for ${urlPath}`)); - return false; + this.analyzer.pushDependency(urlPath); + this.deferredFiles.set(urlPath, deferred); } + return true; } -} +} \ No newline at end of file diff --git a/src/bundle.ts b/src/bundle.ts index b045e6642..a3eca736e 100644 --- a/src/bundle.ts +++ b/src/bundle.ts @@ -15,7 +15,7 @@ import {posix as posixPath} from 'path'; import {Transform} from 'stream'; import File = require('vinyl'); import * as logging from 'plylog'; -import urlFromPath from './url-from-path'; +import {urlFromPath} from './path-transformers'; import {StreamAnalyzer, DepsIndex} from './analyzer'; import {compose} from './streams'; @@ -254,7 +254,7 @@ export class Bundler extends Transform { } _getBundles() { - return this.analyzer.analyze.then((indexes) => { + return this.analyzer.analyzeDependencies.then((indexes) => { let depsToEntrypoints = indexes.depsToFragments; let fragmentToDeps = indexes.fragmentToDeps; let bundles = new Map(); diff --git a/src/get-dependencies-from-document.ts b/src/get-dependencies-from-document.ts new file mode 100644 index 000000000..5ac989f45 --- /dev/null +++ b/src/get-dependencies-from-document.ts @@ -0,0 +1,71 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import {DocumentDescriptor} from 'hydrolysis'; +import {posix as posixPath} from 'path'; +import {Node, queryAll, predicates, getAttribute} from 'dom5'; + +export interface DocumentDeps { + imports?: Array; + scripts?: Array; + styles?: Array; +} + +function collectScriptsAndStyles(tree: DocumentDescriptor): DocumentDeps { + let scripts: string[] = []; + let styles: string[] = []; + tree.html.script.forEach((script: Node) => { + // TODO(justinfagnani): stop patching Nodes in Hydrolysis + let __hydrolysisInlined = (script).__hydrolysisInlined; + if (__hydrolysisInlined) { + scripts.push(__hydrolysisInlined); + } + }); + tree.html.style.forEach((style: Node) => { + let href = getAttribute(style, 'href'); + if (href) { + styles.push(href); + } + }); + return { + scripts, + styles + }; +} + +export function getDependenciesFromDocument(descriptor: DocumentDescriptor, dir: string): DocumentDeps { + let allHtmlDeps: string[] = []; + let allScriptDeps = new Set(); + let allStyleDeps = new Set(); + + let deps: DocumentDeps = collectScriptsAndStyles(descriptor); + deps.scripts.forEach((s) => allScriptDeps.add(posixPath.join(dir, s))); + deps.styles.forEach((s) => allStyleDeps.add(posixPath.join(dir, s))); + if (descriptor.imports) { + let queue = descriptor.imports.slice(); + let next; + while (next = queue.shift()) { + if (!next.href) { + continue; + } + allHtmlDeps.push(next.href); + let childDeps = getDependenciesFromDocument(next, posixPath.dirname(next.href)); + allHtmlDeps = allHtmlDeps.concat(childDeps.imports); + childDeps.scripts.forEach((s) => allScriptDeps.add(s)); + childDeps.styles.forEach((s) => allStyleDeps.add(s)); + } + } + + return { + scripts: Array.from(allScriptDeps), + styles: Array.from(allStyleDeps), + imports: allHtmlDeps, + }; +} diff --git a/src/url-from-path.ts b/src/path-transformers.ts similarity index 84% rename from src/url-from-path.ts rename to src/path-transformers.ts index 0c62d4101..d6a414c20 100644 --- a/src/url-from-path.ts +++ b/src/path-transformers.ts @@ -38,7 +38,7 @@ import * as path from 'path'; -export default function urlFromPath(root: string, filepath: string) { +export function urlFromPath(root: string, filepath: string) { if (!filepath.startsWith(root)) { throw new Error(`file path is not in root: ${filepath} (${root})`); } @@ -54,3 +54,18 @@ export default function urlFromPath(root: string, filepath: string) { // Otherwise, just return the relative path between the two return path.relative(root, filepath); } + +export function pathFromUrl(root: string, url: string) { + let isPlatformWin = /^win/.test(process.platform); + let filepath: string; + + // On windows systems, convert URL to filesystem path by replacing slashes + if (isPlatformWin) { + filepath = url.replace(/\//g, '\\'); + } else { + filepath = url; + } + + // Otherwise, just return the relative path between the two + return path.join(root, url); +} diff --git a/src/polymer-project.ts b/src/polymer-project.ts index 98e909077..70d9fde1a 100644 --- a/src/polymer-project.ts +++ b/src/polymer-project.ts @@ -12,14 +12,15 @@ import * as dom5 from 'dom5'; import {posix as posixPath} from 'path'; import * as osPath from 'path'; import * as logging from 'plylog'; -import {Transform, Readable} from 'stream'; +import {Transform} from 'stream'; import File = require('vinyl'); import * as vfs from 'vinyl-fs'; - import {StreamAnalyzer} from './analyzer'; import {Bundler} from './bundle'; import {optimize, OptimizeOptions} from './optimize'; import {FileCB} from './streams'; +import {forkStream} from './fork-stream'; +const mergeStream = require('merge-stream'); const logger = logging.getLogger('polymer-project'); const pred = dom5.predicates; @@ -62,20 +63,17 @@ export interface ProjectOptions { sourceGlobs?: string[]; /** - * List of glob patterns, relative to root, of dependencies to read from the - * file system. For example node_modules\/**\/* and bower_components\/**\/* + * List of file paths, relative to the project directory, that should be included + * as dependencies in the build target. */ - dependencyGlobs?: string[]; + includeDependencies?: string[]; } export const defaultSourceGlobs = [ - '**/*', - '!build/**/*', -]; - -export const defaultDependencyGlobs = [ - 'bower_components/**/*', - 'node_modules/**/*', + 'src/**/*', + // NOTE(fks) 06-29-2016: `polymer-cli serve` uses a bower.json file to display + // information about the project. The file is included here by default. + 'bower.json', ]; function resolveGlob(fromPath: string, glob: string) { @@ -105,7 +103,7 @@ export class PolymerProject { shell: string; fragments: string[]; sourceGlobs: string[]; - dependencyGlobs: string[]; + includeDependencies: string[]; _splitFiles: Map = new Map(); _parts: Map = new Map(); @@ -119,15 +117,16 @@ export class PolymerProject { this.fragments = (options.fragments || []) .map((f) => osPath.resolve(this.root, f)); this.sourceGlobs = (options.sourceGlobs || defaultSourceGlobs) - .map((g) => resolveGlob(this.root, g)); - this.dependencyGlobs = (options.dependencyGlobs || defaultDependencyGlobs) - .map((g) => resolveGlob(this.root, g)); + .map((glob) => resolveGlob(this.root, glob)); + this.includeDependencies = (options.includeDependencies || []) + .map((path) => osPath.resolve(this.root, path)); this._analyzer = new StreamAnalyzer( this.root, this.entrypoint, this.shell, - this.fragments); + this.fragments, + this.allSourceGlobs); this._bundler = new Bundler( this.root, @@ -141,12 +140,12 @@ export class PolymerProject { logger.debug(`entrypoint: ${this.entrypoint}`); logger.debug(`fragments: ${this.entrypoint}`); logger.debug(`sources: ${this.sourceGlobs}`); - logger.debug(`dependencies: \n\t${this.dependencyGlobs}`); + logger.debug(`includeDependencies: ${this.includeDependencies}`); } /** * An array of globs composed of `entrypoint`, `shell`, `fragments`, - * `sourceGlobs`, and the inverted array of `dependencyGlobs`. + * and `sourceGlobs`. */ get allSourceGlobs(): string[] { let globs: string[] = []; @@ -158,11 +157,6 @@ export class PolymerProject { if (this.sourceGlobs && this.sourceGlobs.length > 0) { globs = globs.concat(this.sourceGlobs); } - if (this.dependencyGlobs && this.dependencyGlobs.length > 0) { - let excludes = this.dependencyGlobs.map((g) => invertGlob(g)); - logger.debug(`excludes: \n\t${excludes.join('\n\t')}`); - globs = globs.concat(excludes); - } logger.debug(`sourceGlobs: \n\t${globs.join('\n\t')}`); return globs; } @@ -181,17 +175,25 @@ export class PolymerProject { }); } - // TODO(justinfagnani): add options, pass to vfs.src() dependencies(): NodeJS.ReadableStream { - let deps = this.dependencyGlobs; - return vfs.src(deps, { - allowEmpty: true, - cwdbase: true, - nodir: true, - }); - } + let dependenciesStream: NodeJS.ReadableStream = forkStream( + this._analyzer.dependencies + ); + + // If we need to include additional dependencies, create a new vfs.src + // stream and pipe our default dependencyStream through it to combine. + if (this.includeDependencies.length > 0) { + let includeStream = vfs.src(this.includeDependencies, { + allowEmpty: true, + cwdbase: true, + nodir: true, + passthrough: true, + }); + dependenciesStream = dependenciesStream.pipe(includeStream); + } - // TODO(justinfagnani): add allFiles() + return dependenciesStream; + } /** * Returns a new `Transform` that splits inline script into separate files. @@ -359,6 +361,7 @@ class HtmlSplitter extends Transform { } } + /** * Joins HTML files split by `Splitter`. */ diff --git a/src/prefetch.ts b/src/prefetch.ts index 995fb62d1..847d8d7e7 100644 --- a/src/prefetch.ts +++ b/src/prefetch.ts @@ -116,7 +116,7 @@ export class PrefetchTransform extends Transform { if (this.fileMap.size === 0) { return done(); } - this.analyzer.analyze.then((depsIndex: DepsIndex) => { + this.analyzer.analyzeDependencies.then((depsIndex: DepsIndex) => { let fragmentToDeps = new Map(depsIndex.fragmentToDeps); if (this.entrypoint && this.shell) { diff --git a/src/streams.ts b/src/streams.ts index 1d2fbe024..03bf256ed 100644 --- a/src/streams.ts +++ b/src/streams.ts @@ -10,6 +10,7 @@ import {PassThrough, Readable, Transform} from 'stream'; import File = require('vinyl'); +import * as fs from 'fs'; const multipipe = require('multipipe'); @@ -42,3 +43,32 @@ export function compose(streams: NodeJS.ReadWriteStream[]) { return new PassThrough({objectMode: true}); } } + +/** + * A stream that takes file path strings, and outputs full Vinyl file objects + * for the file at each location. + */ +export class VinylReaderTransform extends Transform { + + constructor() { + super({ objectMode: true }); + } + + _transform( + filePath: string, + encoding: string, + callback: (error?: Error, data?: File) => void + ): void { + fs.readFile(filePath, (err?: Error, data?: Buffer) => { + if (err) { + callback(err); + return; + } + callback(null, new File({ + path: filePath, + contents: data + })); + }); + } + +} \ No newline at end of file diff --git a/test/analyzer_test.js b/test/analyzer_test.js index 8c7436e5b..5515aff16 100644 --- a/test/analyzer_test.js +++ b/test/analyzer_test.js @@ -13,6 +13,7 @@ const assert = require('chai').assert; const path = require('path'); const StreamAnalyzer = require('../lib/analyzer').StreamAnalyzer; +const mergeStream = require('merge-stream'); const vfs = require('vinyl-fs-fake'); suite('Analyzer', () => { @@ -21,14 +22,18 @@ suite('Analyzer', () => { test('fragment to deps list has only uniques', (done) => { let root = path.resolve('test/analyzer-data'); - let analyzer = new StreamAnalyzer(root, null, null, [ + let fragments = [ path.resolve(root, 'a.html'), path.resolve(root, 'b.html'), - ]); - vfs.src(path.join(root, '**'), {cwdbase: true}) + ]; + let analyzer = new StreamAnalyzer(root, null, null, fragments, fragments); + mergeStream( + vfs.src(path.join(root, '**'), {cwdbase: true}), + analyzer.dependencies + ) .pipe(analyzer) .on('finish', () => { - analyzer.analyze.then((depsIndex) => { + analyzer.analyzeDependencies.then((depsIndex) => { let ftd = depsIndex.fragmentToDeps; for (let frag of ftd.keys()) { assert.deepEqual(ftd.get(frag), ['shared-1.html', 'shared-2.html']); @@ -40,22 +45,104 @@ suite('Analyzer', () => { test("analyzing shell and entrypoint doesn't double load files", (done) => { let root = path.resolve('test/analyzer-data'); + let sourceGlobs = [ + path.resolve(root, 'a.html'), + path.resolve(root, 'b.html'), + ]; let analyzer = new StreamAnalyzer( root, path.resolve(root, 'entrypoint.html'), - path.resolve(root, 'shell.html')); - vfs.src(root + '/**', {cwdbase: true}) + path.resolve(root, 'shell.html'), + undefined, + sourceGlobs); + mergeStream( + vfs.src(path.join(root, '**'), {cwdbase: true}), + analyzer.dependencies + ) .pipe(analyzer) .on('finish', () => { - analyzer.analyze.then((depsIndex) => { + analyzer.analyzeDependencies.then((depsIndex) => { assert.isTrue(depsIndex.depsToFragments.has('shared-2.html')); assert.isFalse(depsIndex.depsToFragments.has('/shell.html')); assert.isFalse(depsIndex.depsToFragments.has('/shared-2.html')); done(); - }).catch((err) => done(err)); + }).catch(done); }); }); }); + suite('.dependencies', () => { + + test('outputs all dependencies needed by source', (done) => { + let root = path.resolve('test/analyzer-data'); + let shell = path.resolve(root, 'shell.html'); + let entrypoint = path.resolve(root, 'entrypoint.html'); + let sourceGlobs = [ + path.resolve(root, 'a.html'), + path.resolve(root, 'b.html'), + ]; + let analyzer = new StreamAnalyzer( + root, + entrypoint, + shell, + undefined, + sourceGlobs.concat(shell, entrypoint)); + + let foundDependencies = new Set(); + analyzer.dependencies.on('data', (file) => { + foundDependencies.add(file.path); + }); + + mergeStream( + vfs.src(sourceGlobs.concat(shell, entrypoint), {cwdbase: true}), + analyzer.dependencies + ) + .pipe(analyzer) + .on('finish', () => { + // shared-1 is never imported by shell/entrypoint, so it is not included as a dep. + assert.isFalse(foundDependencies.has(path.resolve(root, 'shared-1.html'))); + // shared-2 is imported by shell, so it is included as a dep. + assert.isTrue(foundDependencies.has(path.resolve(root, 'shared-2.html'))); + done(); + }) + .on('error', done); + }); + + test('outputs all dependencies needed by source and given fragments', (done) => { + let root = path.resolve('test/analyzer-data'); + let shell = path.resolve(root, 'shell.html'); + let entrypoint = path.resolve(root, 'entrypoint.html'); + let sourceGlobs = [ + path.resolve(root, 'a.html'), + path.resolve(root, 'b.html'), + ]; + let analyzer = new StreamAnalyzer( + root, + entrypoint, + shell, + sourceGlobs, + sourceGlobs.concat(shell, entrypoint)); + + let foundDependencies = new Set(); + analyzer.dependencies.on('data', (file) => { + foundDependencies.add(file.path); + }); + + mergeStream( + vfs.src(sourceGlobs.concat(shell, entrypoint), {cwdbase: true}), + analyzer.dependencies + ) + .pipe(analyzer) + .on('finish', () => { + // shared-1 is imported by 'a' & 'b', so it is included as a dep. + assert.isTrue(foundDependencies.has(path.resolve(root, 'shared-1.html'))); + // shared-1 is imported by 'a' & 'b', so it is included as a dep. + assert.isTrue(foundDependencies.has(path.resolve(root, 'shared-2.html'))); + done(); + }) + .on('error', done); + }); + }); + }); diff --git a/test/bundle_test.js b/test/bundle_test.js index ce0cfdd01..409ce4b90 100644 --- a/test/bundle_test.js +++ b/test/bundle_test.js @@ -15,6 +15,7 @@ const dom5 = require('dom5'); const File = require('vinyl'); const path = require('path'); const stream = require('stream'); +const mergeStream = require('merge-stream'); const analyzer = require('../lib/analyzer'); const bundle = require('../lib/bundle'); @@ -39,14 +40,29 @@ suite('Bundler', () => { && path.resolve(root, options.entrypoint); let shell = options.shell && path.resolve(root, options.shell); - let analyzer = new StreamAnalyzer(root, entrypoint, shell, fragments); - bundler = new Bundler(root, entrypoint, shell, fragments, analyzer); + let analyzer = new StreamAnalyzer( + root, + entrypoint, + shell, + fragments, + options.files.map((f) => f.path) + ); + bundler = new Bundler( + root, + entrypoint, + shell, + fragments, + analyzer + ); sourceStream = new stream.Readable({ objectMode: true, }); - bundledStream = sourceStream - .pipe(analyzer) - .pipe(bundler); + bundledStream = mergeStream( + sourceStream, + analyzer.dependencies + ) + .pipe(analyzer) + .pipe(bundler); files = new Map(); bundledStream.on('data', (file) => { files.set(file.path, file); diff --git a/test/polymer-project_test.js b/test/polymer-project_test.js index dced156c9..d4f180f22 100644 --- a/test/polymer-project_test.js +++ b/test/polymer-project_test.js @@ -14,47 +14,51 @@ const assert = require('chai').assert; const path = require('path'); const stream = require('stream'); const File = require('vinyl'); +const mergeStream = require('merge-stream'); const PolymerProject = require('../lib/polymer-project').PolymerProject; suite('PolymerProject', () => { - let project; + let defaultProject; let root = path.resolve(__dirname, 'test-project'); let unroot = (p) => p.substring(root.length + 1); setup(() => { - project = new PolymerProject({ + defaultProject = new PolymerProject({ root: path.resolve(__dirname, 'test-project'), entrypoint: 'index.html', shell: 'shell.html', + sourceGlobs: [ + 'source-dir/**', + ], }); }) test('reads sources', (done) => { let files = []; - project.sources() + defaultProject.sources() .on('data', (f) => files.push(f)) .on('end', () => { let names = files.map((f) => unroot(f.path)); let expected = [ 'index.html', 'shell.html', - // note, we'll probably want to exclude certain files by defult in - // the future - 'gulpfile.js', + 'source-dir/my-app.html', ]; assert.sameMembers(names, expected); done(); }); }); - test('reads dependencies', (done) => { - let files = []; - project.dependencies() - .on('data', (f) => files.push(f)) - .on('end', () => { + suite('.dependencies()', () => { + + test('reads dependencies', (done) => { + let files = []; + let dependencyStream = defaultProject.dependencies(); + dependencyStream.on('data', (f) => files.push(f)); + dependencyStream.on('end', () => { let names = files.map((f) => unroot(f.path)); let expected = [ 'bower_components/dep.html', @@ -62,27 +66,64 @@ suite('PolymerProject', () => { assert.sameMembers(names, expected); done(); }); + mergeStream( + defaultProject.sources(), + dependencyStream + ).pipe(defaultProject.analyze); + }); + + test('reads dependencies and includes additionally provided files', (done) => { + let files = []; + let projectWithIncludedDeps = new PolymerProject({ + root: path.resolve(__dirname, 'test-project'), + entrypoint: 'index.html', + shell: 'shell.html', + sourceGlobs: [ + 'source-dir/**', + ], + includeDependencies: [ + 'bower_components/unreachable*', + ], + }); + + let dependencyStream = projectWithIncludedDeps.dependencies(); + dependencyStream.on('data', (f) => files.push(f)); + dependencyStream.on('end', () => { + let names = files.map((f) => unroot(f.path)); + let expected = [ + 'bower_components/dep.html', + 'bower_components/unreachable-dep.html', + ]; + assert.sameMembers(names, expected); + done(); + }); + + mergeStream( + projectWithIncludedDeps.sources(), + dependencyStream + ).pipe(projectWithIncludedDeps.analyze); + }); }); test('splits and rejoins scripts', (done) => { let splitFiles = new Map(); let joinedFiles = new Map(); - project.sources() - .pipe(project.splitHtml()) + defaultProject.sources() + .pipe(defaultProject.splitHtml()) .on('data', (f) => splitFiles.set(unroot(f.path), f)) - .pipe(project.rejoinHtml()) + .pipe(defaultProject.rejoinHtml()) .on('data', (f) => joinedFiles.set(unroot(f.path), f)) .on('end', () => { let expectedSplitFiles = [ 'index.html', 'shell.html_script_0.js', 'shell.html', - 'gulpfile.js', + 'source-dir/my-app.html', ]; let expectedJoinedFiles = [ 'index.html', 'shell.html', - 'gulpfile.js', + 'source-dir/my-app.html', ]; assert.sameMembers(Array.from(splitFiles.keys()), expectedSplitFiles); assert.sameMembers(Array.from(joinedFiles.keys()), expectedJoinedFiles); @@ -115,14 +156,14 @@ suite('PolymerProject', () => { }); sourceStream - .pipe(project.splitHtml()) + .pipe(defaultProject.splitHtml()) .on('data', (file) => { // this is what gulp-html-minifier does... if (path.sep === '\\' && file.path.endsWith('.html')) { file.path = file.path.replace('\\', '/'); } }) - .pipe(project.rejoinHtml()) + .pipe(defaultProject.rejoinHtml()) .on('data', (file) => { let contents = file.contents.toString(); assert.equal(contents, source); diff --git a/test/test-project/bower_components/unreachable-dep.html b/test/test-project/bower_components/unreachable-dep.html new file mode 100644 index 000000000..d5283b26a --- /dev/null +++ b/test/test-project/bower_components/unreachable-dep.html @@ -0,0 +1 @@ +
diff --git a/test/test-project/index.html b/test/test-project/index.html index ed75d9235..81de2cdf5 100644 --- a/test/test-project/index.html +++ b/test/test-project/index.html @@ -1 +1,2 @@ + diff --git a/test/test-project/source-dir/my-app.html b/test/test-project/source-dir/my-app.html new file mode 100644 index 000000000..6c217c628 --- /dev/null +++ b/test/test-project/source-dir/my-app.html @@ -0,0 +1 @@ +
diff --git a/test/url-from-path_test.js b/test/url-from-path_test.js index acfc223c5..007bdc139 100644 --- a/test/url-from-path_test.js +++ b/test/url-from-path_test.js @@ -11,7 +11,7 @@ 'use strict'; const assert = require('chai').assert; -const urlFromPath = require('../lib/url-from-path').default; +const urlFromPath = require('../lib/path-transformers').urlFromPath; const WIN_ROOT_PATH = 'C:\\Users\\TEST_USER\\TEST_ROOT'; const MAC_ROOT_PATH = '/Users/TEST_USER/TEST_ROOT'; diff --git a/tsconfig.json b/tsconfig.json index 09cf71473..424c8fb2b 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -32,6 +32,7 @@ "src/analyzer.ts", "src/bundle.ts", "src/fork-stream.ts", + "src/get-dependencies-from-document.ts", "src/optimize.ts", "src/polymer-build.ts", "src/polymer-project.ts", @@ -39,7 +40,7 @@ "src/streams.ts", "src/sw-precache.ts", "src/uglify-transform.ts", - "src/url-from-path.ts", + "src/path-transformers.ts", "typings/index.d.ts" ], "atom": {