diff --git a/dbt/clients/jinja.py b/dbt/clients/jinja.py index 2e89e0170f4..ba512d3c551 100644 --- a/dbt/clients/jinja.py +++ b/dbt/clients/jinja.py @@ -44,7 +44,11 @@ def call(*args, **kwargs): name = node.get('name') module = template.make_module( context, False, context) - macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)] + + if node['resource_type'] == NodeType.Operation: + macro = module.__dict__[dbt.utils.get_dbt_operation_name(name)] + else: + macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)] module.__dict__.update(context) try: diff --git a/dbt/compilation.py b/dbt/compilation.py index e09ae71a857..0431c85a309 100644 --- a/dbt/compilation.py +++ b/dbt/compilation.py @@ -302,9 +302,6 @@ def compile(self): self._check_resource_uniqueness(flat_graph) - flat_graph = dbt.parser.process_refs(flat_graph, - root_project.get('name')) - linked_graph = self.link_graph(linker, flat_graph) stats = defaultdict(int) diff --git a/dbt/context/common.py b/dbt/context/common.py index a1743690420..d2f23eda805 100644 --- a/dbt/context/common.py +++ b/dbt/context/common.py @@ -1,9 +1,8 @@ import json import os -import pytz from dbt.adapters.factory import get_adapter -from dbt.compat import basestring, to_string +from dbt.compat import basestring from dbt.node_types import NodeType from dbt.contracts.graph.parsed import ParsedMacro, ParsedNode @@ -405,10 +404,14 @@ def generate(model, project_cfg, flat_graph, provider=None): "fromjson": fromjson, "tojson": tojson, "target": target, - "this": get_this_relation(db_wrapper, project_cfg, profile, model), "try_or_compiler_error": try_or_compiler_error(model) }) + # Operations do not represent database relations, so 'this' does not apply + if model.get('resource_type') != NodeType.Operation: + context["this"] = get_this_relation(db_wrapper, project_cfg, profile, + model) + context = _add_tracking(context) context = _add_validation(context) context = _add_sql_handlers(context) diff --git a/dbt/loader.py b/dbt/loader.py index 6e28147484f..2c87c9de4d3 100644 --- a/dbt/loader.py +++ b/dbt/loader.py @@ -17,7 +17,9 @@ def load_all(cls, root_project, all_projects): for loader in cls._LOADERS: nodes.update(loader.load_all(root_project, all_projects, macros)) - return ParsedManifest(nodes=nodes, macros=macros) + manifest = ParsedManifest(nodes=nodes, macros=macros) + manifest = dbt.parser.process_refs(manifest, root_project) + return manifest @classmethod def register(cls, loader): diff --git a/dbt/parser.py b/dbt/parser.py index b82cb23832d..da9d27a271d 100644 --- a/dbt/parser.py +++ b/dbt/parser.py @@ -73,8 +73,9 @@ def resolve_ref(flat_graph, target_model_name, target_model_package, None) -def process_refs(flat_graph, current_project): - for _, node in flat_graph.get('nodes').items(): +def process_refs(manifest, current_project): + flat_graph = manifest.to_flat_graph() + for _, node in manifest.nodes.items(): target_model = None target_model_name = None target_model_package = None @@ -106,7 +107,7 @@ def process_refs(flat_graph, current_project): node['depends_on']['nodes'].append(target_model_id) flat_graph['nodes'][node['unique_id']] = node - return flat_graph + return manifest def get_fqn(path, package_project_config, extra=[]): @@ -154,28 +155,38 @@ def parse_macro_file(macro_file_path, raise e for key, item in template.module.__dict__.items(): - if type(item) == jinja2.runtime.Macro: + if type(item) != jinja2.runtime.Macro: + continue + + node_type = None + if key.startswith(dbt.utils.MACRO_PREFIX): + node_type = NodeType.Macro name = key.replace(dbt.utils.MACRO_PREFIX, '') - unique_id = get_path(resource_type, - package_name, - name) - - merged = dbt.utils.deep_merge( - base_node.serialize(), - { - 'name': name, - 'unique_id': unique_id, - 'tags': tags, - 'resource_type': resource_type, - 'depends_on': {'macros': []}, - }) - - new_node = ParsedMacro( - template=template, - **merged) - - to_return[unique_id] = new_node + elif key.startswith(dbt.utils.OPERATION_PREFIX): + node_type = NodeType.Operation + name = key.replace(dbt.utils.OPERATION_PREFIX, '') + + if node_type != resource_type: + continue + + unique_id = get_path(resource_type, package_name, name) + + merged = dbt.utils.deep_merge( + base_node.serialize(), + { + 'name': name, + 'unique_id': unique_id, + 'tags': tags, + 'resource_type': resource_type, + 'depends_on': {'macros': []}, + }) + + new_node = ParsedMacro( + template=template, + **merged) + + to_return[unique_id] = new_node return to_return diff --git a/dbt/task/generate.py b/dbt/task/generate.py index 6c99ae2239c..aa1a295c4c0 100644 --- a/dbt/task/generate.py +++ b/dbt/task/generate.py @@ -1,13 +1,13 @@ import json import os +from dbt.contracts.graph.parsed import ParsedManifest, ParsedNode, ParsedMacro from dbt.adapters.factory import get_adapter from dbt.clients.system import write_file from dbt.compat import bigint -from dbt.include import GLOBAL_DBT_MODULES_PATH -from dbt.node_types import NodeType import dbt.ui.printer import dbt.utils +import dbt.compilation from dbt.task.base_task import BaseTask @@ -92,42 +92,22 @@ def unflatten(columns): class GenerateTask(BaseTask): - def get_all_projects(self): - root_project = self.project.cfg - all_projects = {root_project.get('name'): root_project} - # we only need to load the global deps. We haven't compiled, so our - # project['module-path'] does not exist. - dependency_projects = dbt.utils.dependencies_for_path( - self.project, GLOBAL_DBT_MODULES_PATH - ) - - for project in dependency_projects: - name = project.cfg.get('name', 'unknown') - all_projects[name] = project.cfg - - if dbt.flags.STRICT_MODE: - dbt.contracts.project.ProjectList(**all_projects) - - return all_projects + def _get_manifest(self, project): + compiler = dbt.compilation.Compiler(project) + compiler.initialize() - def _get_manifest(self): - # TODO: I'd like to do this better. We can't use - # utils.dependency_projects because it assumes you have compiled your - # project (I think?) - it assumes that you have an existing and - # populated project['modules-path'], but 'catalog generate' shouldn't - # require that. It might be better to suppress the exception in - # dependency_projects if that's reasonable, or make it a flag. - root_project = self.project.cfg - all_projects = self.get_all_projects() + root_project = project.cfg + all_projects = compiler.get_all_projects() manifest = dbt.loader.GraphLoader.load_all(root_project, all_projects) return manifest def run(self): - manifest = self._get_manifest() + manifest = self._get_manifest(self.project) profile = self.project.run_environment() adapter = get_adapter(profile) + dbt.ui.printer.print_timestamped_line("Building catalog") results = adapter.get_catalog(profile, self.project.cfg, manifest) results = [ diff --git a/dbt/utils.py b/dbt/utils.py index 311132480d8..c35115e0433 100644 --- a/dbt/utils.py +++ b/dbt/utils.py @@ -149,12 +149,17 @@ def find_in_subgraph_by_name(subgraph, target_name, target_package, nodetype): MACRO_PREFIX = 'dbt_macro__' +OPERATION_PREFIX = 'dbt_operation__' def get_dbt_macro_name(name): return '{}{}'.format(MACRO_PREFIX, name) +def get_dbt_operation_name(name): + return '{}{}'.format(OPERATION_PREFIX, name) + + def get_materialization_macro_name(materialization_name, adapter_type=None, with_prefix=True): if adapter_type is None: @@ -193,7 +198,7 @@ def get_materialization_macro(flat_graph, materialization_name, def get_operation_macro_name(operation_name, with_prefix=True): if with_prefix: - return get_dbt_macro_name(operation_name) + return get_dbt_operation_name(operation_name) else: return operation_name diff --git a/test/integration/029_docs_generate_tests/test_docs_generate.py b/test/integration/029_docs_generate_tests/test_docs_generate.py index 4ee21304348..25bb443283b 100644 --- a/test/integration/029_docs_generate_tests/test_docs_generate.py +++ b/test/integration/029_docs_generate_tests/test_docs_generate.py @@ -29,6 +29,7 @@ def project_config(self): @attr(type='postgres') def test_simple_generate(self): + self.run_dbt(["deps"]) self.run_dbt(["docs", "generate"]) self.assertTrue(os.path.exists('./target/catalog.json')) diff --git a/test/unit/test_parser.py b/test/unit/test_parser.py index b6e31f82be3..0f1a9090f76 100644 --- a/test/unit/test_parser.py +++ b/test/unit/test_parser.py @@ -6,6 +6,7 @@ import dbt.parser from dbt.node_types import NodeType +from dbt.contracts.graph.parsed import ParsedManifest, ParsedNode, ParsedMacro def get_os_path(unix_path): return os.path.normpath(unix_path) @@ -680,8 +681,14 @@ def test__process_refs__packages(self): } } + manifest = ParsedManifest( + nodes={k: ParsedNode(**v) for (k,v) in graph['nodes'].items()}, + macros={k: ParsedMacro(**v) for (k,v) in graph['macros'].items()}, + ) + + processed_manifest = dbt.parser.process_refs(manifest, 'root') self.assertEquals( - dbt.parser.process_refs(graph, 'root'), + processed_manifest.to_flat_graph(), { 'macros': {}, 'nodes': { @@ -703,7 +710,8 @@ def test__process_refs__packages(self): 'path': 'events.sql', 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), - 'raw_sql': 'does not matter' + 'raw_sql': 'does not matter', + 'agate_table': None, }, 'model.root.events': { 'name': 'events', @@ -723,7 +731,8 @@ def test__process_refs__packages(self): 'path': 'events.sql', 'original_file_path': 'events.sql', 'root_path': get_os_path('/usr/src/app'), - 'raw_sql': 'does not matter' + 'raw_sql': 'does not matter', + 'agate_table': None, }, 'model.root.dep': { 'name': 'dep', @@ -743,7 +752,8 @@ def test__process_refs__packages(self): 'path': 'multi.sql', 'original_file_path': 'multi.sql', 'root_path': get_os_path('/usr/src/app'), - 'raw_sql': 'does not matter' + 'raw_sql': 'does not matter', + 'agate_table': None, } } }