diff --git a/.changes/unreleased/Features-20230117-083848.yaml b/.changes/unreleased/Features-20230117-083848.yaml new file mode 100644 index 0000000..2a4672c --- /dev/null +++ b/.changes/unreleased/Features-20230117-083848.yaml @@ -0,0 +1,10 @@ +kind: Features +body: With this PR, dbt-server users/clients can pass a project path directly through + /parse rather than sending a dictionary of file contents to /push. If a user does + this, they can then call other endpoints such as /async/dbt and /compile without + a state_id, and the server will default to using that project path. +time: 2023-01-17T08:38:48.019196-06:00 +custom: + Author: racheldaniel + Issue: "155" + PR: "154" diff --git a/.changes/unreleased/Features-20230130-160207.yaml b/.changes/unreleased/Features-20230130-160207.yaml new file mode 100644 index 0000000..fa85418 --- /dev/null +++ b/.changes/unreleased/Features-20230130-160207.yaml @@ -0,0 +1,9 @@ +kind: Features +body: This PR adds a new synchronous endpoint, which will block and return command + results rather than return a task_id. These tasks are not added to the db, and do + not output logs +time: 2023-01-30T16:02:07.497161-06:00 +custom: + Author: racheldaniel + Issue: "162" + PR: "161" diff --git a/.changes/unreleased/Features-20230206-120426.yaml b/.changes/unreleased/Features-20230206-120426.yaml new file mode 100644 index 0000000..f36a1a4 --- /dev/null +++ b/.changes/unreleased/Features-20230206-120426.yaml @@ -0,0 +1,7 @@ +kind: Features +body: Add new task status callback functionality to the async dbt endpoint +time: 2023-02-06T12:04:26.954999-05:00 +custom: + Author: jp-dbt + Issue: "165" + PR: "164" diff --git a/.changes/unreleased/Features-20230221-183401.yaml b/.changes/unreleased/Features-20230221-183401.yaml new file mode 100644 index 0000000..1f4cd30 --- /dev/null +++ b/.changes/unreleased/Features-20230221-183401.yaml @@ -0,0 +1,7 @@ +kind: Features +body: Add smoke test. +time: 2023-02-21T18:34:01.400318-08:00 +custom: + Author: dichenqiandbt + Issue: "727" + PR: "170" diff --git a/.changes/unreleased/Under the Hood-20221221-112702.yaml b/.changes/unreleased/Under the Hood-20221221-112702.yaml new file mode 100644 index 0000000..fad591a --- /dev/null +++ b/.changes/unreleased/Under the Hood-20221221-112702.yaml @@ -0,0 +1,8 @@ +kind: Under the Hood +body: Upgrade FastAPI version in requirements.txt and add httpx to dev-requirements.txt + to resolve error handling issue with underlying FastAPI dependency +time: 2022-12-21T11:27:02.990803-08:00 +custom: + Author: jenniferjsmmiller + Issue: "599" + PR: "149" diff --git a/.changes/unreleased/Under the Hood-20230222-124703.yaml b/.changes/unreleased/Under the Hood-20230222-124703.yaml new file mode 100644 index 0000000..a5dc051 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230222-124703.yaml @@ -0,0 +1,8 @@ +kind: Under the Hood +body: Update github actions to test by dbt branch and deploy different images based + on branch (0.1.latest) +time: 2023-02-22T12:47:03.781432-06:00 +custom: + Author: racheldaniel + Issue: "172" + PR: "171" diff --git a/.changes/unreleased/Under the Hood-20230227-113905.yaml b/.changes/unreleased/Under the Hood-20230227-113905.yaml new file mode 100644 index 0000000..849447f --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230227-113905.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Only run tests for github actions on appropriate branch +time: 2023-02-27T11:39:05.03617-06:00 +custom: + Author: racheldaniel + Issue: "182" + PR: "181" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3929d24..71c0b53 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -35,6 +35,13 @@ jobs: name: test code runs-on: ubuntu-latest timeout-minutes: 3 + strategy: + matrix: + dbt-core: + - version: "1.5.0-pre" + package: "dbt-core~=1.5.0b1" + prerelease: true + steps: - name: checkout repo uses: actions/checkout@v3 @@ -43,13 +50,19 @@ jobs: uses: actions/setup-python@v4 with: python-version: "3.8" - - - name: run tests + - name: run tests - releases run: | pip install -r requirements.txt -r dev-requirements.txt - pip install dbt-core dbt-postgres + pip install ${{ (matrix.dbt-core.prerelease && '--pre') || '' }} ${{ matrix.dbt-core.package }} dbt-postgres dbt-snowflake + pytest + + - name: run tests - head + run: | + pip install -r requirements.txt -r dev-requirements.txt + pip install "https://github.com/dbt-labs/dbt-core/archive/HEAD.tar.gz#egg=dbt-core&subdirectory=core" + pip install "https://github.com/dbt-labs/dbt-core/archive/HEAD.tar.gz#egg=dbt-postgres&subdirectory=plugins/postgres" + pip install "https://github.com/dbt-labs/dbt-snowflake/archive/HEAD.tar.gz#egg=dbt-snowflake" pytest - build-push: name: build and push dbt server images runs-on: ubuntu-latest @@ -57,29 +70,9 @@ jobs: fail-fast: false matrix: dbt-core: - - version: "1.0.0" - package: "dbt-core~=1.0.0" - - version: "1.0.1" - package: "dbt-core~=1.0.1" - - version: "1.1.0-pre" - package: "dbt-core~=1.1.0b1" - - version: "1.1.0-latest" - package: "dbt-core~=1.1.1" - - version: "1.2.0-pre" - package: "dbt-core~=1.2.0b1" - prerelease: true - - version: "1.2.0-latest" - package: "dbt-core~=1.2.0" - - version: "1.3.0-pre" - package: "dbt-core~=1.3.0b1" - prerelease: true - - version: "1.3.0-latest" - package: "dbt-core~=1.3.0" - - version: "1.4.0-pre" - package: "dbt-core~=1.4.0b1" + - version: "1.5.0-pre" + package: "dbt-core~=1.5.0b1" prerelease: true - - version: "1.4.0-latest" - package: "dbt-core~=1.4.0" dbt-database-adapter: - name: snowflake package: dbt-snowflake diff --git a/.gitignore b/.gitignore index a448976..ebce7e9 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,5 @@ sql_app.db /env /venv dbt-core-server-exploration/ +dbt.log +.DS_Store diff --git a/dbt_server/crud.py b/dbt_server/crud.py index 7dad04d..3f27c31 100644 --- a/dbt_server/crud.py +++ b/dbt_server/crud.py @@ -24,26 +24,15 @@ def create_task(db: Session, task: schemas.Task): return db_task -def set_task_running(db: Session, task: schemas.Task): +def set_task_state( + db: Session, task: schemas.Task, state: models.TaskState, error: str +): db_task = get_task(db, task.task_id) - db_task.state = models.TaskState.RUNNING - db.commit() - db.refresh(db_task) - return db_task - - -def set_task_done(db: Session, task: schemas.Task): - db_task = get_task(db, task.task_id) - db_task.state = models.TaskState.FINISHED - db.commit() - db.refresh(db_task) - return db_task + db_task.state = state + if error: + db_task.error = error -def set_task_errored(db: Session, task: schemas.Task, error: str): - db_task = get_task(db, task.task_id) - db_task.state = models.TaskState.ERROR - db_task.error = error db.commit() db.refresh(db_task) return db_task diff --git a/dbt_server/database.py b/dbt_server/database.py index b5f4d5e..cf9b877 100644 --- a/dbt_server/database.py +++ b/dbt_server/database.py @@ -1,8 +1,10 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker +from dbt_server.services.filesystem_service import get_db_path -SQLALCHEMY_DATABASE_URL = "sqlite:///./sql_app.db" + +SQLALCHEMY_DATABASE_URL = f"sqlite:///{get_db_path()}" engine = create_engine( SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} diff --git a/dbt_server/helpers.py b/dbt_server/helpers.py index c281630..3449ece 100644 --- a/dbt_server/helpers.py +++ b/dbt_server/helpers.py @@ -1,10 +1,5 @@ import os from dbt_server.exceptions import InternalException -from pydantic import BaseModel - - -class Args(BaseModel): - profile: str = None def extract_compiled_code_from_node(result_node_dict): @@ -23,13 +18,12 @@ def extract_compiled_code_from_node(result_node_dict): return compiled_code -def set_profile_name(args=None): - # If no profile name is passed in args, we will attempt to set it from env vars - # If no profile is set, dbt will default to reading from dbt_project.yml +def get_profile_name(args=None): + # If no profile name is passed in args, we will attempt to get it from env vars + # If profile is None, dbt will default to reading from dbt_project.yml if args and hasattr(args, "profile") and args.profile: - return args - if os.getenv("DBT_PROFILE_NAME"): - if args is None: - args = Args() - args.profile = os.getenv("DBT_PROFILE_NAME") - return args + return args.profile + env_profile_name = os.getenv("DBT_PROFILE_NAME") + if env_profile_name: + return env_profile_name + return None diff --git a/dbt_server/logging.py b/dbt_server/logging.py index d37fc1b..4ad11bb 100644 --- a/dbt_server/logging.py +++ b/dbt_server/logging.py @@ -5,20 +5,24 @@ from datetime import datetime from typing import Optional -try: - from dbt.events.functions import STDOUT_LOG, FILE_LOG -except (ModuleNotFoundError, ImportError): - STDOUT_LOG = None - FILE_LOG = None - +from dbt.events.eventmgr import EventLevel +from dbt.events.base_types import BaseEvent from pythonjsonlogger import jsonlogger -from dbt_server.models import TaskState +from dbt_server.models import TaskState ACCOUNT_ID = os.environ.get("ACCOUNT_ID") ENVIRONMENT_ID = os.environ.get("ENVIRONMENT_ID") WORKSPACE_ID = os.environ.get("WORKSPACE_ID") +dbt_event_to_python_root_log = { + EventLevel.DEBUG: logging.root.debug, + EventLevel.TEST: logging.root.debug, + EventLevel.INFO: logging.root.info, + EventLevel.WARN: logging.root.warn, + EventLevel.ERROR: logging.root.error, +} + class CustomJsonFormatter(jsonlogger.JsonFormatter): def add_fields(self, log_record, record, message_dict): @@ -37,9 +41,9 @@ def add_fields(self, log_record, record, message_dict): log_record["workspaceID"] = WORKSPACE_ID -# setup json logging +# setup json logging for stdout and datadog logger = logging.getLogger() -logger.setLevel(logging.INFO) +logger.setLevel(logging.DEBUG) stdout = logging.StreamHandler() if os.environ.get("APPLICATION_ENVIRONMENT") in ("dev", None): formatter = logging.Formatter( @@ -55,16 +59,11 @@ def add_fields(self, log_record, record, message_dict): ) stdout.setFormatter(formatter) logger.addHandler(stdout) -dbt_server_logger = logging.getLogger("dbt-server") -dbt_server_logger.setLevel(logging.DEBUG) -GLOBAL_LOGGER = dbt_server_logger -# remove handlers from these loggers, so -# that they propagate up to the root logger -# for json formatting -if STDOUT_LOG and FILE_LOG: - STDOUT_LOG.handlers = [] - FILE_LOG.handlers = [] +# Use standard python logger for all dbt-server logs-- these will be sent to +# stdout but will not be written to task log files +DBT_SERVER_LOGGER = logging.getLogger("dbt-server") +DBT_SERVER_LOGGER.setLevel(logging.DEBUG) # make sure uvicorn is deferring to the root # logger to format logs @@ -91,6 +90,21 @@ def configure_uvicorn_access_log(): ual.handlers = [] +# Push event messages to root python logger for formatting +def log_event_to_console(event: BaseEvent): + logging_method = dbt_event_to_python_root_log[event.log_level()] + if logging_method == logging.root.debug: + # Only log debug level for dbt-server logs + return + logging_method(event.info.msg) + + +# TODO: Core is still working on a way to add a callback to the eventlogger using the +# newer format. We will still need to do this for events emitted by core +# EVENT_MANAGER.callbacks.append(log_event_to_console) + + +# TODO: This should be some type of event. We may also choose to send events for all task state updates. @dataclass class ServerLog: state: TaskState diff --git a/dbt_server/models.py b/dbt_server/models.py index 050a25a..51bbe44 100644 --- a/dbt_server/models.py +++ b/dbt_server/models.py @@ -1,6 +1,5 @@ from sqlalchemy import Column, String from enum import Enum - from .database import Base diff --git a/dbt_server/server.py b/dbt_server/server.py index cc4127b..e83eb46 100644 --- a/dbt_server/server.py +++ b/dbt_server/server.py @@ -8,12 +8,18 @@ from dbt_server.database import engine from dbt_server.services import dbt_service, filesystem_service from dbt_server.views import app -from dbt_server.logging import GLOBAL_LOGGER as logger, configure_uvicorn_access_log +from dbt_server.logging import DBT_SERVER_LOGGER as logger, configure_uvicorn_access_log from dbt_server.state import LAST_PARSED from dbt_server.exceptions import StateNotFoundException +from sqlalchemy.exc import OperationalError +# The default checkfirst=True should handle this, however we still +# see a table exists error from time to time +try: + models.Base.metadata.create_all(bind=engine, checkfirst=True) +except OperationalError as err: + logger.debug(f"Handled error when creating database: {str(err)}") -models.Base.metadata.create_all(bind=engine) dbt_service.disable_tracking() @@ -25,23 +31,25 @@ class ConfigArgs(BaseModel): def startup_cache_initialize(): """ Initialize the manifest cache at startup. The cache will only be populated if there is - a latest-state-id.txt file pointing to a state folder with a pre-compiled manifest. - If any step fails (the latest-state-id.txt file is missing, there's no compiled manifest, - or it can't be deserialized) then continue without caching. + a latest-state-id.txt file or latest-project-path.txt file pointing to a state or project folder + with a pre-compiled manifest. If any step fails (the latest-state-id.txt file is missing, + there's no compiled manifest, or it can't be deserialized) then continue without caching. """ # If an exception is raised in this method, the dbt-server will fail to start up. # Be careful here :) - latest_state_id = filesystem_service.get_latest_state_id(None) - if latest_state_id is None: - logger.info("[STARTUP] No latest state found - not loading manifest into cache") + latest_project_path = filesystem_service.get_latest_project_path() + root_path = filesystem_service.get_root_path(latest_state_id, latest_project_path) + + if root_path is None: + logger.info( + "[STARTUP] No latest state or project found - not loading manifest into cache" + ) return - manifest_path = filesystem_service.get_path(latest_state_id, "manifest.msgpack") - logger.info( - f"[STARTUP] Loading manifest from file system (state_id={latest_state_id})" - ) + manifest_path = filesystem_service.get_path(root_path, "manifest.msgpack") + logger.info(f"[STARTUP] Loading manifest from file system (path={root_path})") try: manifest = dbt_service.deserialize_manifest(manifest_path) @@ -50,7 +58,7 @@ def startup_cache_initialize(): return except (StateNotFoundException): logger.error( - f"[STARTUP] Specified latest state not found - not loading manifest (state_id={latest_state_id})" + f"[STARTUP] Specified root path not found - not loading manifest (path={root_path})" ) return @@ -59,15 +67,14 @@ def startup_cache_initialize(): target_name = None config_args = ConfigArgs(target=target_name) - source_path = filesystem_service.get_root_path(latest_state_id) manifest_size = filesystem_service.get_size(manifest_path) - config = dbt_service.create_dbt_config(source_path, config_args) + config = dbt_service.create_dbt_config(root_path, config_args) LAST_PARSED.set_last_parsed_manifest( - latest_state_id, manifest, manifest_size, config + latest_state_id, latest_project_path, root_path, manifest, manifest_size, config ) - logger.info(f"[STARTUP] Cached manifest in memory (state_id={latest_state_id})") + logger.info(f"[STARTUP] Cached manifest in memory (path={root_path})") @tracer.wrap diff --git a/dbt_server/services/dbt_service.py b/dbt_server/services/dbt_service.py index a6c6916..3a5f027 100644 --- a/dbt_server/services/dbt_service.py +++ b/dbt_server/services/dbt_service.py @@ -2,12 +2,17 @@ import threading import uuid from inspect import getmembers, isfunction +from typing import List, Optional, Any # dbt Core imports import dbt.tracking import dbt.lib import dbt.adapters.factory +import requests +from requests.adapters import HTTPAdapter +from sqlalchemy.orm import Session +from urllib3 import Retry # These exceptions were removed in v1.4 try: @@ -24,7 +29,6 @@ ) from dbt.lib import ( - create_task, get_dbt_config as dbt_get_dbt_config, parse_to_manifest as dbt_parse_to_manifest, execute_sql as dbt_execute_sql, @@ -44,8 +48,12 @@ # dbt Server imports from dbt_server.services import filesystem_service -from dbt_server import tracer -from dbt_server.logging import GLOBAL_LOGGER as logger +from dbt_server.logging import DBT_SERVER_LOGGER as logger +from dbt_server.helpers import get_profile_name +from dbt_server import crud, tracer, models +from dbt.lib import load_profile_project +from dbt.cli.main import dbtRunner + from dbt_server.exceptions import ( InvalidConfigurationException, @@ -53,7 +61,7 @@ dbtCoreCompilationException, UnsupportedQueryException, ) -from dbt_server.helpers import set_profile_name +from pydantic import BaseModel ALLOW_INTROSPECTION = str(os.environ.get("__DBT_ALLOW_INTROSPECTION", "1")).lower() in ( "true", @@ -64,6 +72,10 @@ CONFIG_GLOBAL_LOCK = threading.Lock() +class Args(BaseModel): + profile: str = None + + def inject_dd_trace_into_core_lib(): for attr_name, attr in getmembers(dbt.lib): @@ -124,7 +136,10 @@ def get_sql_parser(config, manifest): @tracer.wrap def create_dbt_config(project_path, args=None): try: - args = set_profile_name(args) + if not args: + args = Args() + if hasattr(args, "profile"): + args.profile = get_profile_name(args) # This needs a lock to prevent two threads from mutating an adapter concurrently with CONFIG_GLOBAL_LOCK: return dbt_get_dbt_config(project_path, args) @@ -159,9 +174,10 @@ def parse_to_manifest(project_path, args): @tracer.wrap -def serialize_manifest(manifest, serialize_path): +def serialize_manifest(manifest, serialize_path, partial_parse_path): manifest_msgpack = dbt_serialize_manifest(manifest) filesystem_service.write_file(serialize_path, manifest_msgpack) + filesystem_service.write_file(partial_parse_path, manifest_msgpack) @tracer.wrap @@ -170,48 +186,6 @@ def deserialize_manifest(serialize_path): return dbt_deserialize_manifest(manifest_packed) -def dbt_run(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("run", args, manifest, config) - return task.run() - - -def dbt_test(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("test", args, manifest, config) - return task.run() - - -def dbt_list(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("list", args, manifest, config) - return task.run() - - -def dbt_seed(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("seed", args, manifest, config) - return task.run() - - -def dbt_build(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("build", args, manifest, config) - return task.run() - - -def dbt_run_operation(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("run_operation", args, manifest, config) - return task.run() - - -def dbt_snapshot(project_path, args, manifest): - config = create_dbt_config(project_path, args) - task = create_task("snapshot", args, manifest, config) - return task.run() - - @handle_dbt_compilation_error @tracer.wrap def execute_sql(manifest, project_path, sql): @@ -266,3 +240,100 @@ def compile_sql(manifest, config, parser, sql): ) return result.to_dict() + + +def execute_async_command( + command: List, + task_id: str, + root_path: str, + manifest: Any, + db: Session, + state_id: Optional[str] = None, + callback_url: Optional[str] = None, +) -> None: + db_task = crud.get_task(db, task_id) + # For commands, only the log file destination directory is sent to --log-path + log_dir_path = filesystem_service.get_task_artifacts_path(task_id, state_id) + + # Temporary solution for structured log formatting until core adds a cleaner interface + new_command = [] + new_command.append("--log-format") + new_command.append("json") + new_command.append("--log-path") + new_command.append(log_dir_path) + new_command += command + + logger.info( + f"Running dbt ({task_id}) - deserializing manifest found at {root_path}" + ) + + # TODO: this is a tmp solution to set profile_dir to global flags + # we should provide a better programatical interface of core to sort out + # the creation of project, profile + from dbt.flags import set_from_args + from argparse import Namespace + from dbt.cli.resolvers import default_profiles_dir + + if os.getenv("DBT_PROFILES_DIR"): + profiles_dir = os.getenv("DBT_PROFILES_DIR") + else: + profiles_dir = default_profiles_dir() + set_from_args(Namespace(profiles_dir=profiles_dir), None) + + # TODO: If a command contains a --profile flag, how should we access/pass it? + profile_name = get_profile_name() + profile, project = load_profile_project(root_path, profile_name) + + update_task_status(db, db_task, callback_url, models.TaskState.RUNNING, None) + + logger.info(f"Running dbt ({task_id}) - kicking off task") + + # Passing a custom target path is not currently working through the + # core API. As a result, the target is defaulting to a relative `./dbt_packages` + # This chdir action is taken in core for several commands, but not for others, + # which can result in a packages dir creation at the app root. + # Until custom target paths are supported, this will ensure package folders are created + # at the project root. + dbt_server_root = os.getcwd() + try: + os.chdir(root_path) + dbt = dbtRunner(project, profile, manifest) + _, _ = dbt.invoke(new_command) + except Exception as e: + update_task_status(db, db_task, callback_url, models.TaskState.ERROR, str(e)) + raise e + finally: + # Return to dbt server root + os.chdir(dbt_server_root) + + logger.info(f"Running dbt ({task_id}) - done") + + update_task_status(db, db_task, callback_url, models.TaskState.FINISHED, None) + + +@tracer.wrap +def execute_sync_command(command: List, root_path: str, manifest: Any): + str_command = (" ").join(str(param) for param in command) + logger.info( + f"Running dbt ({str_command}) - deserializing manifest found at {root_path}" + ) + + # TODO: If a command contains a --profile flag, how should we access/pass it? + profile_name = get_profile_name() + profile, project = load_profile_project(root_path, profile_name) + + logger.info(f"Running dbt ({str_command})") + + dbt = dbtRunner(project, profile, manifest) + return dbt.invoke(command) + + +def update_task_status(db, db_task, callback_url, status, error): + crud.set_task_state(db, db_task, status, error) + + if callback_url: + retries = Retry(total=5, allowed_methods=frozenset(["POST"])) + + session = requests.Session() + session.mount("http://", HTTPAdapter(max_retries=retries)) + session.post(callback_url, json={"task_id": db_task.task_id, "status": status}) diff --git a/dbt_server/services/filesystem_service.py b/dbt_server/services/filesystem_service.py index 80c0b50..73b6c37 100644 --- a/dbt_server/services/filesystem_service.py +++ b/dbt_server/services/filesystem_service.py @@ -1,26 +1,73 @@ import os import shutil -from dbt_server.logging import GLOBAL_LOGGER as logger from dbt_server.exceptions import StateNotFoundException from dbt_server import tracer +PARTIAL_PARSE_FILE = "partial_parse.msgpack" +DEFAULT_WORKING_DIR = "./working-dir" +DEFAULT_TARGET_DIR = "./target" +DATABASE_FILE_NAME = "sql_app.db" +# This is defined in dbt-core-- dir path is configurable but not filename +DBT_LOG_FILE_NAME = "dbt.log" + + def get_working_dir(): - return os.environ.get("__DBT_WORKING_DIR", "./working-dir") + return os.environ.get("__DBT_WORKING_DIR", DEFAULT_WORKING_DIR) + + +def get_target_path(): + # TODO: The --target-path flag should override this, but doesn't + # appear to be working on invoke. When it does, need to revisit + # how partial parsing is working + return os.environ.get("DBT_TARGET_PATH", DEFAULT_TARGET_DIR) -def get_root_path(state_id): +def get_root_path(state_id=None, project_path=None): + if project_path is not None: + return os.path.abspath(project_path) + if state_id is None: + return None working_dir = get_working_dir() return os.path.join(working_dir, f"state-{state_id}") +def get_task_artifacts_path(task_id, state_id=None): + working_dir = get_working_dir() + if state_id is None: + return os.path.join(working_dir, task_id) + return os.path.join(working_dir, f"state-{state_id}", task_id) + + +def get_log_path(task_id, state_id=None): + artifacts_path = get_task_artifacts_path(task_id, state_id) + return os.path.join(artifacts_path, DBT_LOG_FILE_NAME) + + +def get_partial_parse_path(): + target_path = get_target_path() + return os.path.join(target_path, PARTIAL_PARSE_FILE) + + +def get_db_path(): + working_dir = get_working_dir() + path = os.path.join(working_dir, DATABASE_FILE_NAME) + ensure_dir_exists(path) + return path + + def get_latest_state_file_path(): working_dir = get_working_dir() return os.path.join(working_dir, "latest-state-id.txt") -def get_path(state_id, *path_parts): - return os.path.join(get_root_path(state_id), *path_parts) +def get_latest_project_path_file_path(): + working_dir = get_working_dir() + return os.path.join(working_dir, "latest-project-path.txt") + + +def get_path(*path_parts): + return os.path.join(*path_parts) @tracer.wrap @@ -45,6 +92,12 @@ def write_file(path, contents): fh.write(contents) +@tracer.wrap +def copy_file(source_path, dest_path): + ensure_dir_exists(dest_path) + shutil.copyfile(source_path, dest_path) + + @tracer.wrap def read_serialized_manifest(path): try: @@ -55,30 +108,60 @@ def read_serialized_manifest(path): @tracer.wrap -def write_unparsed_manifest_to_disk(state_id, filedict): +def write_unparsed_manifest_to_disk(state_id, previous_state_id, filedict): root_path = get_root_path(state_id) if os.path.exists(root_path): shutil.rmtree(root_path) for filename, file_info in filedict.items(): - path = get_path(state_id, filename) + path = get_path(root_path, filename) write_file(path, file_info.contents) + if previous_state_id and state_id != previous_state_id: + # TODO: The target folder is usually created during command runs and won't exist on push/parse + # of a new state. It can also be named by env var or flag -- hardcoding as this will change + # with the click API work. This bypasses the DBT_TARGET_PATH env var. + previous_partial_parse_path = get_path( + get_root_path(previous_state_id), "target", PARTIAL_PARSE_FILE + ) + new_partial_parse_path = get_path(root_path, "target", PARTIAL_PARSE_FILE) + if not os.path.exists(previous_partial_parse_path): + return + copy_file(previous_partial_parse_path, new_partial_parse_path) + @tracer.wrap def get_latest_state_id(state_id): if not state_id: path = os.path.abspath(get_latest_state_file_path()) if not os.path.exists(path): - logger.error("No state id included in request, no previous state id found.") return None with open(path, "r") as latest_path_file: state_id = latest_path_file.read().strip() return state_id +@tracer.wrap +def get_latest_project_path(): + path = os.path.abspath(get_latest_project_path_file_path()) + if not os.path.exists(path): + return None + with open(path, "r") as latest_path_file: + project_path = latest_path_file.read().strip() + return project_path + + @tracer.wrap def update_state_id(state_id): path = os.path.abspath(get_latest_state_file_path()) + ensure_dir_exists(path) with open(path, "w+") as latest_path_file: latest_path_file.write(state_id) + + +@tracer.wrap +def update_project_path(project_path): + path = os.path.abspath(get_latest_project_path_file_path()) + ensure_dir_exists(path) + with open(path, "w+") as latest_path_file: + latest_path_file.write(project_path) diff --git a/dbt_server/services/task_service.py b/dbt_server/services/task_service.py deleted file mode 100644 index cd17896..0000000 --- a/dbt_server/services/task_service.py +++ /dev/null @@ -1,214 +0,0 @@ -import uuid - -try: - from dbt.exceptions import RuntimeException -except (ModuleNotFoundError, ImportError): - from dbt.exceptions import DbtRuntimeError as RuntimeException - - -from dbt_server import crud, schemas -from dbt_server.services import dbt_service, filesystem_service -from dbt_server.logging import GLOBAL_LOGGER as logger, ServerLog -from dbt_server.models import TaskState - -from fastapi import HTTPException -import asyncio -import io - - -def run_task(task_name, task_id, args, db): - db_task = crud.get_task(db, task_id) - - path = filesystem_service.get_root_path(args.state_id) - serialize_path = filesystem_service.get_path(args.state_id, "manifest.msgpack") - # log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - # log_manager = LogManager(log_path) - - # TODO: Structured logging doesn't have the concept of custom log lines like this, - # need to follow up with core about a way to do this - logger.info(f"Running dbt ({task_id}) - deserializing manifest {serialize_path}") - - manifest = dbt_service.deserialize_manifest(serialize_path) - - crud.set_task_running(db, db_task) - - logger.info(f"Running dbt ({task_id}) - kicking off task") - - try: - if task_name == "run": - dbt_service.dbt_run(path, args, manifest) - elif task_name == "seed": - dbt_service.dbt_seed(path, args, manifest) - elif task_name == "test": - dbt_service.dbt_test(path, args, manifest) - elif task_name == "build": - dbt_service.dbt_build(path, args, manifest) - elif task_name == "snapshot": - dbt_service.dbt_snapshot(path, args, manifest) - elif task_name == "run_operation": - dbt_service.dbt_run_operation(path, args, manifest) - else: - raise RuntimeException("Not an actual task") - except RuntimeException as e: - crud.set_task_errored(db, db_task, str(e)) - # log_manager.cleanup() - raise e - - logger.info(f"Running dbt ({task_id}) - done") - - # log_manager.cleanup() - - crud.set_task_done(db, db_task) - - -def run_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, state=TaskState.PENDING, command="dbt run", log_path=log_path - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "run", task_id, args, db) - return crud.create_task(db, task) - - -def test_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, state=TaskState.PENDING, command="dbt test", log_path=log_path - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "test", task_id, args, db) - return crud.create_task(db, task) - - -def seed_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, state=TaskState.PENDING, command="dbt seed", log_path=log_path - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "seed", task_id, args, db) - return crud.create_task(db, task) - - -def build_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, state=TaskState.PENDING, command="dbt build", log_path=log_path - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "build", task_id, args, db) - return crud.create_task(db, task) - - -def run_operation_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, - state=TaskState.PENDING, - command="dbt run-operation", - log_path=log_path, - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "run_operation", task_id, args, db) - return crud.create_task(db, task) - - -def snapshot_async(background_tasks, db, args): - task_id = str(uuid.uuid4()) - log_path = filesystem_service.get_path(args.state_id, task_id, "logs.stdout") - - task = schemas.Task( - task_id=task_id, - state=TaskState.PENDING, - command="dbt snapshot", - log_path=log_path, - ) - - db_task = crud.get_task(db, task_id) - if db_task: - raise HTTPException(status_code=400, detail="Task already registered") - - background_tasks.add_task(run_task, "snapshot", task_id, args, db) - return crud.create_task(db, task) - - -async def _wait_for_file(path): - for _ in range(10): - try: - return open(path) - except FileNotFoundError: - # TODO : Remove / debugging - logger.info(f"Waiting for file handle @ {path}") - await asyncio.sleep(0.5) - continue - else: - raise RuntimeError("No log file appeared in designated timeout") - - -async def _read_until_empty(fh): - while True: - line = fh.readline() - if len(line) == 0: - break - else: - yield line - - -async def tail_logs_for_path(db, task_id, request, live=True): - db_task = crud.get_task(db, task_id) - logger.info(f"Waiting for file @ {db_task.log_path}") - fh = await _wait_for_file(db_task.log_path) - - if live: - fh.seek(0, io.SEEK_END) - try: - while db_task.state not in (TaskState.ERROR, TaskState.FINISHED): - if await request.is_disconnected(): - logger.debug("Log request disconnected") - break - async for log in _read_until_empty(fh): - yield log - await asyncio.sleep(0.5) - db.refresh(db_task) - - # Drain any lines accumulated after end of task - # If we didn't do this, some lines could be omitted - logger.info("Draining logs from file") - async for log in _read_until_empty(fh): - yield log - - finally: - yield ServerLog(state=db_task.state, error=db_task.error).to_json() - fh.close() diff --git a/dbt_server/state.py b/dbt_server/state.py index cf600b3..32792e3 100644 --- a/dbt_server/state.py +++ b/dbt_server/state.py @@ -1,10 +1,11 @@ from dbt_server.services import filesystem_service, dbt_service from dbt_server.exceptions import StateNotFoundException -from dbt_server.logging import GLOBAL_LOGGER as logger +from dbt_server.logging import DBT_SERVER_LOGGER as logger from dbt_server import tracer + from dataclasses import dataclass -from typing import Optional, Any +from typing import Optional, Any, Tuple import threading @@ -14,15 +15,21 @@ @dataclass class CachedManifest: state_id: Optional[str] = None + project_path: Optional[str] = None + root_path: Optional[str] = None manifest: Optional[Any] = None manifest_size: Optional[int] = None config: Optional[Any] = None parser: Optional[Any] = None - def set_last_parsed_manifest(self, state_id, manifest, manifest_size, config): + def set_last_parsed_manifest( + self, state_id, project_path, root_path, manifest, manifest_size, config + ): with MANIFEST_LOCK: self.state_id = state_id + self.project_path = project_path + self.root_path = root_path self.manifest = manifest self.manifest_size = manifest_size self.config = config @@ -42,6 +49,8 @@ def lookup(self, state_id): def reset(self): with MANIFEST_LOCK: self.state_id = None + self.project_path = None + self.root_path = None self.manifest = None self.manifest_size = None self.config = None @@ -53,26 +62,39 @@ def reset(self): class StateController(object): def __init__( - self, state_id, manifest, config, parser, manifest_size, is_manifest_cached + self, + state_id, + project_path, + root_path, + manifest, + config, + parser, + manifest_size, + is_manifest_cached, ): self.state_id = state_id + self.project_path = project_path + self.root_path = root_path self.manifest = manifest self.config = config self.parser = parser self.manifest_size = manifest_size self.is_manifest_cached = is_manifest_cached - self.root_path = filesystem_service.get_root_path(state_id) - self.serialize_path = filesystem_service.get_path(state_id, "manifest.msgpack") + self.serialize_path = filesystem_service.get_path(root_path, "manifest.msgpack") @classmethod @tracer.wrap - def from_parts(cls, state_id, manifest, source_path, manifest_size, args=None): - config = dbt_service.create_dbt_config(source_path, args) + def from_parts( + cls, state_id, project_path, manifest, root_path, manifest_size, args=None + ): + config = dbt_service.create_dbt_config(root_path, args) parser = dbt_service.get_sql_parser(config, manifest) return cls( state_id=state_id, + project_path=project_path, + root_path=root_path, manifest=manifest, config=config, parser=parser, @@ -85,6 +107,8 @@ def from_parts(cls, state_id, manifest, source_path, manifest_size, args=None): def from_cached(cls, cached): return cls( state_id=cached.state_id, + project_path=cached.project_path, + root_path=cached.root_path, manifest=cached.manifest, config=cached.config, parser=cached.parser, @@ -94,22 +118,33 @@ def from_cached(cls, cached): @classmethod @tracer.wrap - def parse_from_source(cls, state_id, parse_args=None): + def parse_from_source(cls, parse_args=None): """ Loads a manifest from source code in a specified directory based on the provided state_id. This method will cache the parsed manifest in memory before returning. """ - source_path = filesystem_service.get_root_path(state_id) - logger.info(f"Parsing manifest from filetree (state_id={state_id})") - manifest = dbt_service.parse_to_manifest(source_path, parse_args) + root_path = filesystem_service.get_root_path( + parse_args.state_id, parse_args.project_path + ) + log_details = generate_log_details(parse_args.state_id, parse_args.project_path) + logger.info(f"Parsing manifest from filetree ({log_details})") - logger.info(f"Done parsing from source (state_id={state_id})") - return cls.from_parts(state_id, manifest, source_path, 0, parse_args) + manifest = dbt_service.parse_to_manifest(root_path, parse_args) + + logger.info(f"Done parsing from source {log_details}") + return cls.from_parts( + parse_args.state_id, + parse_args.project_path, + manifest, + root_path, + 0, + parse_args, + ) @classmethod @tracer.wrap - def load_state(cls, state_id, args=None): + def load_state(cls, args=None): """ Loads a manifest given a state_id from an in-memory cache if present, or from disk at a location specified by the state_id argument. The @@ -117,40 +152,74 @@ def load_state(cls, state_id, args=None): state_ids which are None (ie. "latest") or exactly matching the latest parsed state_id will be cache hits. """ - cached = LAST_PARSED.lookup(state_id) + cached = LAST_PARSED.lookup(args.state_id) if cached: - logger.info(f"Loading manifest from cache ({cached.state_id})") + logger.info( + f"Loading manifest from cache {generate_log_details(cached.state_id, cached.root_path)}" + ) return cls.from_cached(cached) - # Not in cache - need to go to filesystem to deserialize it - logger.info(f"Manifest cache miss (state_id={state_id})") - state_id = filesystem_service.get_latest_state_id(state_id) + state_id = filesystem_service.get_latest_state_id(args.state_id) + project_path = filesystem_service.get_latest_project_path() + + logger.info( + f"Manifest cache miss ({generate_log_details(state_id, project_path)})" + ) # No state_id provided, and no latest-state-id.txt found - if state_id is None: + if state_id is None and project_path is None: raise StateNotFoundException( - f"Provided state_id does not exist or is not found ({state_id})" + f"Provided state_id does not exist, no previous state_id or project_path found {generate_log_details(state_id, project_path)}" ) + root_path = filesystem_service.get_root_path(state_id, project_path) + # Don't cache on deserialize - that's only for /parse - manifest_path = filesystem_service.get_path(state_id, "manifest.msgpack") + manifest_path = filesystem_service.get_path(root_path, "manifest.msgpack") logger.info(f"Loading manifest from file system ({manifest_path})") manifest = dbt_service.deserialize_manifest(manifest_path) manifest_size = filesystem_service.get_size(manifest_path) - source_path = filesystem_service.get_root_path(state_id) - return cls.from_parts(state_id, manifest, source_path, manifest_size, args) + return cls.from_parts( + state_id, project_path, manifest, root_path, manifest_size, args + ) @tracer.wrap def serialize_manifest(self): logger.info(f"Serializing manifest to file system ({self.serialize_path})") - dbt_service.serialize_manifest(self.manifest, self.serialize_path) + partial_parse_path = filesystem_service.get_partial_parse_path() + dbt_service.serialize_manifest( + self.manifest, self.serialize_path, partial_parse_path + ) self.manifest_size = filesystem_service.get_size(self.serialize_path) @tracer.wrap def update_state_id(self): - logger.info(f"Updating latest state id ({self.state_id})") - filesystem_service.update_state_id(self.state_id) + if self.state_id is not None: + logger.info(f"Updating latest state id ({self.state_id})") + filesystem_service.update_state_id(self.state_id) + + @tracer.wrap + def update_project_path(self): + if self.project_path is not None: + logger.info(f"Updating latest project path ({self.project_path})") + filesystem_service.update_project_path(self.project_path) + + @tracer.wrap + def update_cache(self): + logger.info( + f"Updating cache {generate_log_details(self.state_id, self.project_path)}" + ) + self.update_state_id() + self.update_project_path() + LAST_PARSED.set_last_parsed_manifest( + self.state_id, + self.project_path, + self.root_path, + self.manifest, + self.manifest_size, + self.config, + ) @tracer.wrap def compile_query(self, query): @@ -166,8 +235,25 @@ def execute_query(self, query): return dbt_service.execute_sql(self.manifest, self.root_path, query) @tracer.wrap - def update_cache(self): - logger.info(f"Updating cache (state_id={self.state_id})") - LAST_PARSED.set_last_parsed_manifest( - self.state_id, self.manifest, self.manifest_size, self.config + def execute_async_command(self, task_id, command, db, callback_url) -> None: + return dbt_service.execute_async_command( + command, + task_id, + self.root_path, + self.manifest, + db, + self.state_id, + callback_url, ) + + @tracer.wrap + def execute_sync_command(self, command) -> Tuple: + return dbt_service.execute_sync_command(command, self.root_path, self.manifest) + + +def generate_log_details(state_id, project_path): + if state_id is None and project_path: + return f"(project_path={project_path})" + elif state_id: + return f"(state_id={state_id})" + return "" diff --git a/dbt_server/views.py b/dbt_server/views.py index 6c18a70..7a4ee73 100644 --- a/dbt_server/views.py +++ b/dbt_server/views.py @@ -2,25 +2,21 @@ import dbt.events.functions import os import signal +import uuid -from sse_starlette.sse import EventSourceResponse -from fastapi import FastAPI, BackgroundTasks, Depends, status +from fastapi import FastAPI, BackgroundTasks, Depends, status, HTTPException from fastapi.exceptions import RequestValidationError from starlette.requests import Request -from pydantic import BaseModel, Field +from pydantic import BaseModel from fastapi.encoders import jsonable_encoder from fastapi.responses import JSONResponse -from typing import List, Optional, Union, Dict +from typing import List, Optional, Dict, Any +from dbt_server import crud, schemas, tracer, helpers +from dbt_server.services import filesystem_service +from dbt_server.logging import DBT_SERVER_LOGGER as logger +from dbt_server.models import TaskState from dbt_server.state import StateController -from dbt_server import crud, schemas, helpers -from dbt_server import tracer - -from dbt_server.services import ( - filesystem_service, - dbt_service, - task_service, -) from dbt_server.exceptions import ( InvalidConfigurationException, @@ -28,7 +24,6 @@ InternalException, StateNotFoundException, ) -from dbt_server.logging import GLOBAL_LOGGER as logger # ORM stuff from sqlalchemy.orm import Session @@ -48,13 +43,6 @@ app = FastAPI() -@app.middleware("http") -async def log_request_start(request: Request, call_next): - logger.debug(f"Received request: {request.method} {request.url.path}") - response = await call_next(request) - return response - - class FileInfo(BaseModel): contents: str hash: str @@ -67,124 +55,11 @@ class PushProjectArgs(BaseModel): class ParseArgs(BaseModel): - state_id: str - version_check: Optional[bool] = None - profile: Optional[str] = None - target: Optional[str] = None - - -class BuildArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - resource_types: Optional[List[str]] = None - select: Union[None, str, List[str]] = None - threads: Optional[int] = None - exclude: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - state: Optional[str] = None - defer: Optional[bool] = None - fail_fast: Optional[bool] = None - full_refresh: Optional[bool] = None - store_failures: Optional[bool] = None - indirect_selection: str = "" - version_check: Optional[bool] = None - - -class RunArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - threads: Optional[int] = None - models: Union[None, str, List[str]] = None - select: Union[None, str, List[str]] = None - exclude: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - state: Optional[str] = None - defer: Optional[bool] = None - fail_fast: Optional[bool] = None - full_refresh: Optional[bool] = None - version_check: Optional[bool] = None - - -class TestArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - threads: Optional[int] = None - data_type: bool = Field(False, alias="data") - schema_type: bool = Field(False, alias="schema") - models: Union[None, str, List[str]] = None - select: Union[None, str, List[str]] = None - exclude: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - state: Optional[str] = None - defer: Optional[bool] = None - fail_fast: Optional[bool] = None - store_failures: Optional[bool] = None - full_refresh: Optional[bool] = None - indirect_selection: str = "" - version_check: Optional[bool] = None - - -class SeedArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - threads: Optional[int] = None - models: Union[None, str, List[str]] = None - select: Union[None, str, List[str]] = None - exclude: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - show: Optional[bool] = None - state: Optional[str] = None - selector_name: Optional[str] = None - full_refresh: Optional[bool] = None + state_id: Optional[str] = None + project_path: Optional[str] = None version_check: Optional[bool] = None - - -class ListArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - resource_types: Optional[List[str]] = None - models: Union[None, str, List[str]] = None - exclude: Union[None, str, List[str]] = None - select: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - output: Optional[str] = "name" - output_keys: Union[None, str, List[str]] = None - state: Optional[str] = None - indirect_selection: str = "eager" - - -class SnapshotArgs(BaseModel): - state_id: str - profile: Optional[str] = None - target: Optional[str] = None - single_threaded: Optional[bool] = None - threads: Optional[int] = None - resource_types: Optional[List[str]] = None - models: Union[None, str, List[str]] = None - select: Union[None, str, List[str]] = None - exclude: Union[None, str, List[str]] = None - selector_name: Optional[str] = None - state: Optional[str] = None - defer: Optional[bool] = None - - -class RunOperationArgs(BaseModel): - state_id: str profile: Optional[str] = None target: Optional[str] = None - macro: str - single_threaded: Optional[bool] = None - args: str = Field(default="{}") class SQLConfig(BaseModel): @@ -194,6 +69,15 @@ class SQLConfig(BaseModel): profile: Optional[str] = None +class DbtCommandArgs(BaseModel): + command: List[Any] + state_id: Optional[str] + # TODO: Need to handle this differently + profile: Optional[str] + callback_url: Optional[str] + task_id: Optional[str] + + @app.exception_handler(InvalidConfigurationException) async def configuration_exception_handler( request: Request, exc: InvalidConfigurationException @@ -242,13 +126,14 @@ async def handled_dbt_error(request: Request, exc: InvalidRequestException): if ALLOW_ORCHESTRATED_SHUTDOWN: @app.post("/shutdown") - async def shutdown(): - # raise 2 SIGTERM signals, just to - # make sure this really shuts down. + def shutdown(): # raising a SIGKILL logs some - # warnings about leaked semaphores - signal.raise_signal(signal.SIGTERM) - signal.raise_signal(signal.SIGTERM) + # warnings about leaked semaphores-- + # appears this is a known issue that should be + # solved once we move to python 3.9: + # https://bugs.python.org/issue45209 + signal.raise_signal(signal.SIGKILL) + signal.raise_signal(signal.SIGKILL) return JSONResponse( status_code=200, content={}, @@ -263,6 +148,7 @@ async def ready(): @app.post("/push") def push_unparsed_manifest(args: PushProjectArgs): # Parse / validate it + previous_state_id = filesystem_service.get_latest_state_id(None) state_id = filesystem_service.get_latest_state_id(args.state_id) size_in_files = len(args.body) @@ -275,7 +161,9 @@ def push_unparsed_manifest(args: PushProjectArgs): # Stupid example of reusing an existing manifest if not os.path.exists(path): reuse = False - filesystem_service.write_unparsed_manifest_to_disk(state_id, args.body) + filesystem_service.write_unparsed_manifest_to_disk( + state_id, previous_state_id, args.body + ) # Write messagepack repr to disk # Return a key that the client can use to operate on it? @@ -292,122 +180,88 @@ def push_unparsed_manifest(args: PushProjectArgs): @app.post("/parse") def parse_project(args: ParseArgs): - state = StateController.parse_from_source(args.state_id, args) + state = StateController.parse_from_source(args) state.serialize_manifest() - state.update_state_id() state.update_cache() tracer.add_tags_to_current_span({"manifest_size": state.manifest_size}) return JSONResponse( status_code=200, - content={"parsing": args.state_id, "path": state.serialize_path}, + content={ + "parsing": state.state_id or state.project_path, + "path": state.serialize_path, + }, ) -@app.post("/run") -async def run_models(args: RunArgs): - state_id = filesystem_service.get_latest_state_id(args.state_id) - path = filesystem_service.get_root_path(state_id) - serialize_path = filesystem_service.get_path(state_id, "manifest.msgpack") +@app.post("/async/dbt") +async def dbt_entry_async( + args: DbtCommandArgs, + background_tasks: BackgroundTasks, + db: Session = Depends(crud.get_db), +): + # example body: {"state_id": "123", "command":["run", "--threads", 1]} + state = StateController.load_state(args) - manifest = dbt_service.deserialize_manifest(serialize_path) - results = dbt_service.dbt_run(path, args, manifest) - encoded_results = jsonable_encoder(results.to_dict()) + if args.task_id: + task_id = args.task_id + else: + task_id = str(uuid.uuid4()) + + log_path = filesystem_service.get_log_path(task_id, state.state_id) + + task = schemas.Task( + task_id=task_id, + state=TaskState.PENDING, + command=(" ").join(str(param) for param in args.command), + log_path=log_path, + ) + + db_task = crud.get_task(db, task_id) + if db_task: + raise HTTPException(status_code=400, detail="Task already registered") + + background_tasks.add_task( + state.execute_async_command, task_id, args.command, db, args.callback_url + ) + created_task = crud.create_task(db, task) return JSONResponse( status_code=200, content={ - "parsing": args.state_id, - "path": serialize_path, - "res": encoded_results, + "task_id": created_task.task_id, + "state_id": state.state_id, + "state": created_task.state, + "command": created_task.command, + "log_path": created_task.log_path, }, ) -@app.post("/list") -async def list_resources(args: ListArgs): - state_id = filesystem_service.get_latest_state_id(args.state_id) - path = filesystem_service.get_root_path(state_id) - serialize_path = filesystem_service.get_path(state_id, "manifest.msgpack") - - manifest = dbt_service.deserialize_manifest(serialize_path) - results = dbt_service.dbt_list(path, args, manifest) - - encoded_results = jsonable_encoder(results) - +@app.post("/sync/dbt") +async def dbt_entry_sync(args: DbtCommandArgs): + # example body: {"command":["list", "--output", "json"]} + state = StateController.load_state(args) + # TODO: See what if any useful info is returned when there's no success + results, _ = state.execute_sync_command(args.command) + try: + encoded_results = jsonable_encoder(results.to_dict()) + except AttributeError: + encoded_results = jsonable_encoder(results) return JSONResponse( status_code=200, content={ - "parsing": args.state_id, - "path": serialize_path, + "parsing": state.state_id or state.project_path, + "path": state.serialize_path, + "command": (" ").join(str(param) for param in args.command), "res": encoded_results, }, ) -@app.post("/run-async") -async def run_models_async( - args: RunArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.run_async(background_tasks, db, args) - - -@app.post("/test-async") -async def test_async( - args: TestArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.test_async(background_tasks, db, args) - - -@app.post("/seed-async") -async def seed_async( - args: SeedArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.seed_async(background_tasks, db, args) - - -@app.post("/build-async") -async def build_async( - args: BuildArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.build_async(background_tasks, db, args) - - -@app.post("/snapshot-async") -async def snapshot_async( - args: SnapshotArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.snapshot_async(background_tasks, db, args) - - -@app.post("/run-operation-async") -async def run_operation_async( - args: RunOperationArgs, - background_tasks: BackgroundTasks, - response_model=schemas.Task, - db: Session = Depends(crud.get_db), -): - return task_service.run_operation_async(background_tasks, db, args) - - @app.post("/preview") async def preview_sql(sql: SQLConfig): - state = StateController.load_state(sql.state_id, sql) + state = StateController.load_state(sql) result = state.execute_query(sql.sql) compiled_code = helpers.extract_compiled_code_from_node(result) @@ -425,7 +279,7 @@ async def preview_sql(sql: SQLConfig): @app.post("/compile") def compile_sql(sql: SQLConfig): - state = StateController.load_state(sql.state_id, sql) + state = StateController.load_state(sql) result = state.compile_query(sql.sql) compiled_code = helpers.extract_compiled_code_from_node(result) @@ -454,15 +308,10 @@ def get_manifest_metadata(state): } -class Task(BaseModel): - task_id: str - - -@app.get("/stream-logs/{task_id}") -async def log_endpoint( +@app.get("/status/{task_id}") +def get_task_status( task_id: str, - request: Request, db: Session = Depends(crud.get_db), ): - event_generator = task_service.tail_logs_for_path(db, task_id, request) - return EventSourceResponse(event_generator, ping=2) + task = crud.get_task(db, task_id) + return JSONResponse(status_code=200, content={"status": task.state}) diff --git a/dev-requirements.txt b/dev-requirements.txt index 1a323be..23b9273 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,7 @@ +absl-py==1.4.0 black==22.6.0 flake8==5.0.0 +httpx==0.23.0 ipdb==0.13.9 pytest==7.1.2 pre-commit==2.20.0 diff --git a/requirements.txt b/requirements.txt index c1d4f33..6f2bb15 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,10 @@ SQLAlchemy==1.4.23 diff-match-patch==20200713 -fastapi==0.68.1 +fastapi==0.88.0 python-json-logger==2.0.4 sse-starlette==0.9.0 gunicorn==20.1.0 uvicorn[standard]==0.18.3 websockets==10.0 -psutil==5.9.2 \ No newline at end of file +psutil==5.9.2 +requests==2.26.0 diff --git a/tests/e2e/fixtures/__init__.py b/tests/e2e/fixtures/__init__.py index 458884b..ee2cc68 100644 --- a/tests/e2e/fixtures/__init__.py +++ b/tests/e2e/fixtures/__init__.py @@ -4,7 +4,9 @@ this_dir = os.path.dirname(this_file) PROFILES_YML_POSTGRES = os.path.join(this_dir, "profiles", "postgres") +PROFILES_YML_SNOWFLAKE = os.path.join(this_dir, "profiles", "snowflake") class Profiles: Postgres = PROFILES_YML_POSTGRES + Snowflake = PROFILES_YML_SNOWFLAKE diff --git a/tests/e2e/fixtures/jaffle_shop/README.md b/tests/e2e/fixtures/jaffle_shop/README.md new file mode 100644 index 0000000..f1ef940 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/README.md @@ -0,0 +1,7 @@ +# What's this? + +This project is cloned from with commit b0b77aac70f490770a1e77c02bb0a2b8771d3203 and is added some test contents. + +The pure manifest.json is stored under init_target folder, it's generated right after clone and can be used for state and defer testing. + +Meanwhile we add macros, selector, model, variable for testing purpose. diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitattributes b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitattributes new file mode 100644 index 0000000..89aced6 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitattributes @@ -0,0 +1,2 @@ +*.sql linguist-detectable +*.sql linguist-language=sql diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.github/workflows/integration_tests.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.github/workflows/integration_tests.yml new file mode 100644 index 0000000..34caf1f --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.github/workflows/integration_tests.yml @@ -0,0 +1,36 @@ +name: Integration Tests +on: + # Triggers the workflow on push or pull request events but only for the master branch + pull_request: + branches: [ master ] + +jobs: + ci: + runs-on: ubuntu-latest + env: + DBT_PROFILES_DIR: . # Use integration_tests/profiles.yml + + steps: + - name: Checkout Branch + uses: actions/checkout@v2 + + - name: Install Python + uses: actions/setup-python@v2 + with: + python-version: 3.9.6 + + - name: Install dbt + run: pip install -r requirements.txt + + - name: Run and Test + env: + CI_SNOWFLAKE_DBT_ACCOUNT: ${{ secrets.CI_SNOWFLAKE_DBT_ACCOUNT }} + CI_SNOWFLAKE_DBT_USER: ${{ secrets.CI_SNOWFLAKE_DBT_USER }} + CI_SNOWFLAKE_DBT_PASS: ${{ secrets.CI_SNOWFLAKE_DBT_PASS }} + CI_SNOWFLAKE_DBT_ROLE: ${{ secrets.CI_SNOWFLAKE_DBT_ROLE }} + CI_SNOWFLAKE_DBT_DATABASE: ${{ secrets.CI_SNOWFLAKE_DBT_DATABASE }} + CI_SNOWFLAKE_DBT_WAREHOUSE: ${{ secrets.CI_SNOWFLAKE_DBT_WAREHOUSE }} + run: | + cd integration_tests + dbt deps + dbt test -s test_type:singular diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitignore b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitignore new file mode 100644 index 0000000..58aa1f0 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/.gitignore @@ -0,0 +1,8 @@ + +target/ +dbt_modules/ +logs/ +.env +.user.yml +.vscode +.dbt_env diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/LICENSE b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/LICENSE new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/README.md b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/README.md new file mode 100644 index 0000000..a4f8d8e --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/README.md @@ -0,0 +1,301 @@ +![buildstatus](https://github.com/tnightengale/dbt-meta-testing/workflows/Integration%20Tests/badge.svg) + +# dbt Meta Testing +This [dbt](https://docs.getdbt.com/docs/introduction) package contains macros to assert test and documentation coverage from +`dbt_project.yml` configuration settings. + +## Table of Contents + - [Install](#install) + - [Configurations](#configurations) + - [Required Tests](#required-tests) + - [Required Docs](#required-docs) + - [Usage](#usage) + - [required_tests (source)](#required_tests-source) + - [required_docs (source)](#required_docs-source) + - [Contributions](#contributions) + - [Testing](#testing) + - [Verified Data Warehouses](#verified-data-warehouses) + +## Install +Include in `packages.yml`: + +```yaml +packages: + - package: tnightengale/dbt_meta_testing + version: 0.3.5 +``` +For latest release, see +https://github.com/tnightengale/dbt-meta-testing/releases. + +## Configurations +This package features two meta configs that can be applied to a dbt project: + +1. `+required_tests` +2. `+required_docs` + +Read the dbt documentation +[here](https://docs.getdbt.com/reference/model-configs) to learn more about +model configurations in dbt. + +### Required Tests +To require test coverage, define the `+required_tests` configuration on a model +path in `dbt_project.yml`: +```yaml +# dbt_project.yml +... +models: + project: + +required_docs: true + marts: + +required_tests: {"unique.*|not_null": 1} + model_2: + +required_tests: + "mocker.*|unique": 1 + "mock_schema_test": 1 + ".*data_test": 1 +``` + +The `+required_tests` config must be `None` or a `dict` with `str` keys and `int` +values. YAML dictionaries are accepted. + +All the regular +dbt configuration hierarchy rules apply. For example, individual model configs +will override configs from the `dbt_project.yml`: +```sql +# /models/marts/core/your_model.sql + +-- This overrides the config in dbt_project.yml, and this model will not require tests +{{ config(required_tests=None) }} + +SELECT +... +``` +> **_New in Version 0.3.3_** + +The keys of the config are evaluated against both data and schema tests +(including any custom tests) using the +[re.fullmatch](https://docs.python.org/3/library/re.html#re.fullmatch) function. + +Therefore, any test restriction which can be expressed in regex can be +evaluated. + +For example: +```yaml +# dbt_project.yml +... +models: + project: + +required_docs: true + # The following configuration on the `marts` model path requires + # each model in that path to have at least one test that either: + # + # 1. starts with "unique" (note the ".*" regex suffix) OR (note the "|" regex) + # 2. is an exact match for the "not_null" test. + + marts: + +required_tests: {"unique.*|not_null": 1} +``` + +Schema tests are matched against their common names, (eg. `not_null`, +`accepted_values`). + +Data tests are matched against their macro name. + +Custom schema tests are matched against their name, eg. `mock_schema_test`: + +```yaml +# models/schema.yml +... + - name: model_2 + description: "" + tests: + - equal_rowcount: + compare_model: ref('model_1') + columns: + - name: id + description: "The primary key for this table" + tests: + - unique + - not_null + - mock_schema_test +``` + +Models that do not meet their configured test minimums, because they either lack +the tests or are not documented, will be listed in the +error when validated via a `run-operation`: +``` +usr@home dbt-meta-testing $ dbt run-operation required_tests +Running with dbt=0.20.0 +Encountered an error while running operation: Compilation Error in macro required_tests (macros/required_tests.sql) + Insufficient test coverage from the 'required_tests' config on the following models: + Model: 'model_1' Test: 'not_null' Got: 1 Expected: 2 + Model: 'model_1' Test: 'mock_schema_test' Got: 0 Expected: 1 + + > in macro _evaluate_required_tests (macros/utils/required_tests/evaluate_required_tests.sql) + > called by macro required_tests (macros/required_tests.sql) + > called by macro required_tests (macros/required_tests.sql) +usr@home dbt-meta-testing $ +``` + +### Required Docs +To require documentation coverage, define the `+required_docs` configuration on +a model path in `dbt_project.yml`: +```yaml +# dbt_project.yml +... +models: + project: + +required_docs: true +``` +The `+required_docs` config must be a `bool`. + +It also **does not check ephemeral +models**. This is because it cannot leverage `adapter.get_columns_in_relation()` +macro on ephemeral models, which it uses to fetch columns from the data +warehouse and detect columns without documentation. + +When applied to a non-ephemeral model, this config will ensure 3 things: +1. The _model_ has a non-empty description +2. The _columns_ in the model are specified in the model `.yml` +3. The _columns_ specified in the model `.yml` have non-empty descriptions + +For example, the following configurations: +```yaml +# models/schema.yml +version: 2 + +models: + - name: model_1 + description: "A starter dbt model" + columns: + - name: id + description: "" + tests: + - unique + - not_null + + - name: model_2 + description: "" + tests: + - equal_rowcount: + compare_model: ref('model_1') + columns: + - name: id + description: "The primary key for this table" + tests: + - unique + - not_null + +``` + +Where `model_2` has a column `new` which is not defined in the `.yml` above: +```sql +-- models/example/model_2.sql +select + *, + 'new' as new +from {{ ref('model_1') }} +where id = 1 +``` + +And all models in the example path require docs: +```yaml +# dbt_project.yml +... +models: + project: + example: + +required_docs: true +``` + +Would result in the following error when validated via a `run-operation`: +``` +usr@home dbt-meta-testing $ dbt run-operation required_docs +Running with dbt=0.20.0 +Encountered an error while running operation: Compilation Error in macro required_docs (macros/required_docs.sql) + The following models are missing descriptions: + - model_2 + The following columns are missing from the model yml: + - model_2.new + The following columns are present in the model yml, but have blank descriptions: + - model_1.id + + > in macro _evaluate_required_docs (macros/utils/required_docs/evaluate_required_docs.sql) + > called by macro required_docs (macros/required_docs.sql) + > called by macro required_docs (macros/required_docs.sql) +usr@home dbt-meta-testing $ +``` + +## Usage +To assert either the `+required_tests` or `+required_docs` configuration, run +the correpsonding macro as a `run-operation` within the dbt CLI. + +By default the macro will check all models with the corresponding configuration. +If any model does not meet the configuration, the `run-operation` will fail +(non-zero) and display an appropriate error message. + +To assert the configuration for only a subset of the configured models (eg. new +models only in a CI) pass an argument, `models`, to the macro as a space +delimited string of model names to use. + +It's also possible to pass in the result of a `dbt ls -m ` +command, in order to make use of [dbt node selection +syntax](https://docs.getdbt.com/reference/node-selection/syntax). Use shell +subsitution in a dictionary representation. + +For example, to run only changed models using dbt's Slim CI feature: +```bash +dbt run-operation required_tests --args "{'models':'$(dbt list -m state:modified --state )'}" +``` + +Alternatively, a space +delimited string of model names will work as well: +```bash +dbt run-operation required_tests --args "{'models':'model1 model2 model3'}" +``` + +### required_tests ([source](macros/required_tests.sql)) +Validates that models meet the `+required_tests` configurations applied in +`dbt_project.yml`. Typically used only as a `run-operation` in a CI pipeline. + +Usage: +``` +dbt run-operation required_tests [--args "{'models': ''}"] +``` + +### required_docs ([source](macros/required_tests.sql)) +Validates that models meet the `+required_docs` configurations applied in +`dbt_project.yml`. Typically used only as a `run-operation` in a CI pipeline. + + +Usage: +``` +dbt run-operation required_docs [--args "{'models': ''}"] +``` +**Note:** Run this command _after_ `dbt run`: only models that already exist in +the warehouse can be validated for columns that are missing from the model `.yml`. +By default, column names are assumed to be lower case in the DBT documentation, +if this is not the case in your project, setting the variable +`convert_column_names_to_lower_case` to `false` in `dbt_project.yml` will +compare the column names in the case they appear. + +## Contributions +Feedback on this project is welcomed and encouraged. Please open an issue or +start a discussion if you would like to request a feature change or contribute +to this project. + +## Testing +The integration tests for this package are located at +[./integration_tests/tests/](integration_tests/tests/). + +To run the tests locally, ensure you have the correct environment variables set +according to the targets in +[./integration_tests/profiles.yml](integration_tests/profiles.yml) and use: +```bash +cd integration_tests +dbt test --data +``` + +### Verified Data Warehouses +This package has been tested for the following data warehouses: +- Snowflake diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/dbt_project.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/dbt_project.yml new file mode 100644 index 0000000..ba912c0 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/dbt_project.yml @@ -0,0 +1,26 @@ + +# Project name. +name: 'dbt_meta_testing' +version: '0.3.5' +config-version: 2 +require-dbt-version: ">=1.0.0" + +# The "profile" dbt uses for this project. +profile: 'dbt_meta_testing' + +# Configuration paths. +model-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +seed-paths: ["data"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +target-path: "target" +clean-targets: + - "target" + - "dbt_modules" + +# Configured for the dbt_meta_testing.logger macro. +vars: + logging_level: INFO diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/.gitignore b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/.gitignore new file mode 100644 index 0000000..dad33a4 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/.gitignore @@ -0,0 +1,4 @@ + +target/ +dbt_modules/ +logs/ diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/dbt_project.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/dbt_project.yml new file mode 100644 index 0000000..428dba8 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/dbt_project.yml @@ -0,0 +1,36 @@ + +# Project Name +name: dbt_meta_testing_integration_tests +version: '1.0.0' + +# This setting configures which "profile" dbt uses for this project. +profile: 'integration_tests' + +config-version: 2 + +model-paths: ["models"] +macro-paths: ["macros"] +test-paths: ["tests"] + +target-path: "target" +clean-targets: ["target", "dbt_modules"] + +models: + dbt_meta_testing_integration_tests: + +required_docs: true + staging: + +required_tests: true + marts: + +required_tests: {"unique": 1} + model_2: + +required_tests: + "mocker.*|unique": 1 + "mock_schema_test": 1 + ".*data_test": 1 + +vars: + running_intergration_tests: true + +dispatch: + - macro_namespace: dbt_meta_testing + search_order: ['dbt_meta_testing'] # enable override diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/equal_rowcount.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/equal_rowcount.sql new file mode 100644 index 0000000..93b6a14 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/equal_rowcount.sql @@ -0,0 +1,34 @@ +{% test equal_rowcount(model, compare_model) %} + +{#-- Needs to be set at parse time, before we return '' below --#} +{{ config(fail_calc = 'coalesce(diff_count, 0)') }} + +{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #} +{%- if not execute -%} + {{ return('') }} +{% endif %} + +with a as ( + + select count(*) as count_a from {{ model }} + +), +b as ( + + select count(*) as count_b from {{ compare_model }} + +), +final as ( + + select + count_a, + count_b, + abs(count_a - count_b) as diff_count + from a + cross join b + +) + +select * from final + +{% endtest %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/evaluate_case.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/evaluate_case.sql new file mode 100644 index 0000000..cc61c8d --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/evaluate_case.sql @@ -0,0 +1,14 @@ +{% macro evaluate_case() %} + + {{ log("Actual Output: " ~ varargs[0], info=true) }} + + {{ log("Expected Output: " ~ varargs[1], info=true) }} + + {% set output_equals_expected = varargs[0] == varargs[1] %} + + select 'integration_test_failed' as errors + {# /* Filter to 0 records if test passes. */ #} + {% if output_equals_expected %} limit 0 {% endif %} + + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/refs_block.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/refs_block.sql new file mode 100644 index 0000000..aa6f7a6 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/integration_tests/refs_block.sql @@ -0,0 +1,7 @@ +{% macro refs_block() %} + + -- depends_on: {{ ref('dbt_meta_testing_integration_tests', 'model_1') }} + -- depends_on: {{ ref('dbt_meta_testing_integration_tests', 'model_2') }} + -- depends_on: {{ ref('dbt_meta_testing_integration_tests', 'model_3') }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/mock_schema_test.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/mock_schema_test.sql new file mode 100644 index 0000000..658e99b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/macros/mock_schema_test.sql @@ -0,0 +1,6 @@ +{% test mock_schema_test(model, column_name) %} + +SELECT + 1 AS p + +{% endtest %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_2.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_2.sql new file mode 100644 index 0000000..557c173 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_2.sql @@ -0,0 +1,8 @@ + +-- Use the `ref` function to select from other models + +select + *, + 'a' as new +from {{ ref('model_1') }} +where id = 1 diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_3.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_3.sql new file mode 100644 index 0000000..557c173 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_3.sql @@ -0,0 +1,8 @@ + +-- Use the `ref` function to select from other models + +select + *, + 'a' as new +from {{ ref('model_1') }} +where id = 1 diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_4.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_4.sql new file mode 100644 index 0000000..febebaa --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/marts/model_4.sql @@ -0,0 +1,9 @@ + +-- Use the `ref` function to select from other models + +{{ config(required_tests=None, required_docs=None) }} +select + *, + 'a' as new +from {{ ref('model_1') }} +where id = 1 diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/schema.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/schema.yml new file mode 100644 index 0000000..45d471b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/schema.yml @@ -0,0 +1,30 @@ + +version: 2 + +models: + # dbt_meta_testing_integration_tests: + # +required_docs: true + # staging: + # +required_tests: {"unique": 1} + - name: model_1 + description: "A starter dbt model" + columns: + - name: id + description: "" + tests: + - unique + - not_null + # marts: + # +required_tests: true + - name: model_2 + description: "" + tests: + - equal_rowcount: + compare_model: ref('model_1') + columns: + - name: id + description: "The primary key for this table" + tests: + - unique + - not_null + - mock_schema_test diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/staging/model_1.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/staging/model_1.sql new file mode 100644 index 0000000..e3aa2ed --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/models/staging/model_1.sql @@ -0,0 +1,25 @@ + +/* + Welcome to your first dbt model! + Did you know that you can also configure models directly within SQL files? + This will override configurations stated in dbt_project.yml + + Try changing "table" to "view" below +*/ + +with source_data as ( + + select 1 as id + union all + select null as id + +) + +select * +from source_data + +/* + Uncomment the line below to remove records with null `id` values +*/ + +-- where id is not null diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/packages.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/packages.yml new file mode 100644 index 0000000..4a6b9c1 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/packages.yml @@ -0,0 +1,2 @@ +packages: + - local: ../ diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/profiles.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/profiles.yml new file mode 100644 index 0000000..7b9e00b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/profiles.yml @@ -0,0 +1,48 @@ +# HEY! This file is borrowed from the dbt-expectations package. +# You should __NEVER__ check credentials into version control. Thanks for reading :) + +config: + send_anonymous_usage_stats: False + use_colors: True + +integration_tests: + target: snowflake + outputs: + # postgres: + # type: postgres + # host: localhost + # user: "{{ env_var('CI_DBT_USER') }}" + # pass: "{{ env_var('CI_DBT_PASS') }}" + # port: "{{ env_var('CI_DBT_PORT') }}" + # dbname: "{{ env_var('CI_DBT_DBNAME') }}" + # schema: dbt_expectations_integration_tests + # threads: 1 + + # redshift: + # type: redshift + # host: "{{ env_var('CI_REDSHIFT_DBT_HOST') }}" + # user: "{{ env_var('CI_REDSHIFT_DBT_USER') }}" + # pass: "{{ env_var('CI_REDSHIFT_DBT_PASS') }}" + # dbname: "{{ env_var('CI_REDSHIFT_DBT_DBNAME') }}" + # port: 5439 + # schema: dbt_expectations_integration_tests + # threads: 1 + + # bigquery: + # type: bigquery + # method: service-account + # keyfile: "{{ env_var('GCLOUD_SERVICE_KEY_PATH') }}" + # project: 'dbt-integration-tests' + # schema: dbt_expectations_integration_tests + # threads: 1 + + snowflake: + type: snowflake + account: "{{ env_var('CI_SNOWFLAKE_DBT_ACCOUNT') }}" + user: "{{ env_var('CI_SNOWFLAKE_DBT_USER') }}" + password: "{{ env_var('CI_SNOWFLAKE_DBT_PASS') }}" + role: "{{ env_var('CI_SNOWFLAKE_DBT_ROLE') }}" + database: "{{ env_var('CI_SNOWFLAKE_DBT_DATABASE') }}" + warehouse: "{{ env_var('CI_SNOWFLAKE_DBT_WAREHOUSE') }}" + schema: dbt_meta_testing_integration_tests + threads: 1 diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/case_1.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/case_1.yml new file mode 100644 index 0000000..0f5627b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/case_1.yml @@ -0,0 +1,3 @@ +models_missing_descriptions: [model_1, model_2] +models_missing_columns: [[model_3, id], [model_3, new], [model_2, new]] +columns_missing_descriptions: [[model_1, id]] diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/dbt_project.yml b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/dbt_project.yml new file mode 100644 index 0000000..cf1c979 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/scripts/cases/dbt_project.yml @@ -0,0 +1,10 @@ + +config-version: 2 + +models: + dbt_meta_testing_integration_tests: + +required_docs: true + staging: + +required_tests: {"unique": 1} + marts: + +required_tests: true diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_1.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_1.sql new file mode 100644 index 0000000..0e94d9b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_1.sql @@ -0,0 +1,21 @@ +/* +Test case for full required_docs run. +*/ + +{{ refs_block() }} +{% if execute %} + + {% set missing_model_errors = ["model_3", "model_2"] %} + {% set missing_columns_errors = [["model_3", "id"], ["model_3", "new"], ["model_2", "new"]] %} + {% set missing_description_errors = [["model_1", "id"]] %} + + {% set actual_output = dbt_meta_testing.required_docs() %} + + {% set expected_output = dbt_meta_testing.error_required_docs( + missing_model_errors, + missing_columns_errors, + missing_description_errors) %} + + {{ evaluate_case(actual_output, expected_output) }} + +{% endif %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_2.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_2.sql new file mode 100644 index 0000000..96a6546 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_2.sql @@ -0,0 +1,14 @@ +/* +Test case for full required_tests run. +*/ + +{{ refs_block() }} +{% if execute %} + + {% set actual_output = dbt_meta_testing.required_tests() %} + + {% set expected_output = dbt_meta_testing.errors_invalid_config_tests(true, "model_1") %} + + {{ evaluate_case(actual_output, expected_output) }} + +{% endif %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_3.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_3.sql new file mode 100644 index 0000000..fc2b2ae --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/case_3.sql @@ -0,0 +1,14 @@ +/* +Test case for full required_tests on model_1. +*/ + +{{ refs_block() }} +{% if execute %} + + {% set actual_output = dbt_meta_testing.required_tests("model_3") %} + + {% set expected_output = dbt_meta_testing.error_required_tests([["model_3", "unique", 0, 1]]) %} + + {{ evaluate_case(actual_output, expected_output) }} + +{% endif %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/mocker_data_test.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/mocker_data_test.sql new file mode 100644 index 0000000..942bddd --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/integration_tests/tests/mocker_data_test.sql @@ -0,0 +1,6 @@ + + +select + * +from {{ ref("model_2") }} +where new != 'a' diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/fetch_configured_models.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/fetch_configured_models.sql new file mode 100644 index 0000000..beaacf6 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/fetch_configured_models.sql @@ -0,0 +1,71 @@ +{% macro fetch_configured_models(meta_config, models=none, resource_type="model") %} + {{ return(adapter.dispatch("fetch_configured_models", "dbt_meta_testing")(meta_config, models, resource_type)) }} +{% endmacro %} + +{% macro default__fetch_configured_models(meta_config, models, resource_type) %} + + {% set configured_models = [] %} + + {{ dbt_meta_testing.logger("var `models` is: " ~ models) }} + + {% for node in graph.nodes.values() | selectattr("resource_type", "equalto", resource_type) %} + + {% if meta_config in node.config.keys() %} + + {% do configured_models.append(node) %} + + {% endif %} + + {% endfor %} + + /* + If arg `models` is provided, filter fetched models to only those + provided, either in space delimited string or via `dbt list -m `. + + See documentation here for more details: https://github.com/tnightengale/quality-assurance-dbt. + */ + {% if models is not none and resource_type == "model" %} + + {% set filtered_models_list = [] %} + {% set final_models_list = [] %} + {% set models_list = models.split(" ") %} + + {{ dbt_meta_testing.logger("Building `filtered_models_list`:") }} + {% for m in models_list %} + + /* + Assumes "." delimited string is output from `dbt list` and the last + delimitee is the model name, eg. dbt_meta_testing.example.model_1 + */ + {% if "." in m %} {% set m = m.split(".")[-1] %} {% endif %} + + {% do filtered_models_list.append(m) %} + {{ dbt_meta_testing.logger("Appended to `filtered_models_list`: " ~ m) }} + + {% endfor %} + + {{ dbt_meta_testing.logger("`filtered_models_list` is: " ~ filtered_models_list) }} + {% for m in configured_models %} + + + {{ dbt_meta_testing.logger("`filtered_models_loop: " ~ loop.index ~ " " ~ m.name in filtered_models_list)}} + {% if m.name in filtered_models_list %} + + {% do final_models_list.append(m) %} + {{ dbt_meta_testing.logger("m is: " ~ m) }} + + {% endif %} + + {% endfor %} + + {% else %} + + {{ dbt_meta_testing.logger("else in fetch models triggered, configured is: " ~ configured_models) }} + {% set final_models_list = configured_models %} + + {% endif %} + + {{ dbt_meta_testing.logger("`final_models_list` is: " ~ final_models_list) }} + {{ return(final_models_list) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/logger.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/logger.sql new file mode 100644 index 0000000..263d8d3 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/logger.sql @@ -0,0 +1,29 @@ +{% macro logger(log_message, log_level="DEBUG", format='%H:%M:%S') %} + {{ return(adapter.dispatch("logger", "dbt_meta_testing")(log_message, log_level="DEBUG", format='%H:%M:%S')) }} +{% endmacro %} + +{% macro default__logger(log_message, log_level="DEBUG", format='%H:%M:%S') %} + + {% set log_levels = { + "DEBUG": 0, + "INFO": 1, + "WARNING": 2, + "ERROR": 3, + "CRITICAL": 4 + } %} + + {% set setting_level = var("logging_level", "INFO") %} + + {% if not log_level in log_levels.keys() %} + {% set formatted_keys = log_levels.keys() | list | join(',') %} + {{ + exceptions.raise_compiler_error( + "Invalid logging level. Got '" ~ level ~ "'. Require one of: '" + ~ formatted_keys ~ "'") + }} + {% endif %} + + {% set log_bool = log_levels[log_level] >= log_levels[setting_level] %} + {{ log(modules.datetime.datetime.now() ~ ' ' ~ log_level ~ ':' ~ this ~ ':' ~ log_message, info=log_bool) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_docs.sql new file mode 100644 index 0000000..b358625 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_docs.sql @@ -0,0 +1,37 @@ +{% macro required_docs(models=none) %} + {{ return(adapter.dispatch("required_docs", "dbt_meta_testing")(models))}} +{% endmacro %} + +{% macro default__required_docs(models) %} + + -- Start + {% set start_msg = "Checking `required_docs` config..." %} + {% if not var("running_intergration_tests", false) is true %}{{ log(start_msg, info=true) }}{% endif %} + + -- Fetch models based on config and `models` var + {% set filtered_models = dbt_meta_testing.fetch_configured_models('required_docs', models) %} + + -- Validate configuration + {% set any_error = dbt_meta_testing.validate_required_docs(filtered_models) %} + {% if any_error is not none %} + + {% set result = dbt_meta_testing.format_raise_error(any_error) %} + + {% endif %} + + -- Evaluate configuration + {% set any_error = dbt_meta_testing.evaluate_required_docs(filtered_models) %} + {% if any_error is not none %} + + {% set result = dbt_meta_testing.format_raise_error(any_error) %} + + {% else %} + + {% set result = "Success: `required_docs` passed." %} + {% if not var("running_intergration_tests", false) is true %}{{ log(result, info=true) }}{% endif %} + + {% endif %} + + {{ return(result) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_tests.sql new file mode 100644 index 0000000..9f54ce1 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/required_tests.sql @@ -0,0 +1,37 @@ +{% macro required_tests(models=none) %} + {{ return(adapter.dispatch("required_tests", "dbt_meta_testing")(models)) }} +{% endmacro %} + +{% macro default__required_tests(models) %} + + -- Start + {% set start_msg = "Checking `required_tests` config..." %} + {% if not var("running_intergration_tests", false) is true %}{{ log(start_msg, info=true) }}{% endif %} + + -- Fetch models based on config and `models` var + {% set filtered_models = dbt_meta_testing.fetch_configured_models('required_tests', models) %} + + -- Validate configuration + {% set any_error = dbt_meta_testing.validate_required_tests(filtered_models) %} + {% if any_error is not none %} + + {{ return(dbt_meta_testing.format_raise_error(any_error)) }} + + {% endif %} + + -- Evaluate configuration + {% set any_error = dbt_meta_testing.evaluate_required_tests(filtered_models) %} + {% if any_error is not none %} + + {% set result = dbt_meta_testing.format_raise_error(any_error) %} + + {% else %} + + {% set result = "Success. `required_tests` passed." %} + {% if not var("running_intergration_tests", false) is true %}{{ log(result, info=true) }}{% endif %} + + {% endif %} + + {{ return(result) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/_get_meta_tests_namespace.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/_get_meta_tests_namespace.sql new file mode 100644 index 0000000..a343a87 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/_get_meta_tests_namespace.sql @@ -0,0 +1,4 @@ +{% macro _get_meta_test_namespaces() %} + {% set override_namespaces = var('dbt_meta_test_dispatch_list', []) %} + {% do return(override_namespaces + ['dbt_meta_testing']) %} +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_docs.sql new file mode 100644 index 0000000..353a668 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_docs.sql @@ -0,0 +1,15 @@ +{% macro error_invalid_config_docs() %} + {{ return(adapter.dispatch("error_invalid_config_docs", "dbt_meta_testing")(varargs))}} +{% endmacro %} + +{% macro default__error_invalid_config_docs(varargs) %} + + {% set error %} + Invalid 'required_docs' configuration. + Expected boolean. Received: '{{ varargs[0] }}' + on model '{{ varargs[1] }}' + {% endset %} + + {{ return(error) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_tests.sql new file mode 100644 index 0000000..3cd9a45 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_invalid_config_tests.sql @@ -0,0 +1,15 @@ +{% macro errors_invalid_config_tests() %} + {{ return(adapter.dispatch("errors_invalid_config_tests", "dbt_meta_testing")(varargs))}} +{% endmacro %} + +{% macro default__errors_invalid_config_tests(varargs) %} + + {% set error %} + Invalid 'required_tests' configuration. + Expected dict or None. Received: '{{ varargs[0] }}' + on model '{{ varargs[1] }}' + {% endset %} + + {{ return(error) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_docs.sql new file mode 100644 index 0000000..1b7bf53 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_docs.sql @@ -0,0 +1,36 @@ +{% macro error_required_docs(missing_model_errors, missing_columns_errors, missing_description_errors) %} + {{ return( + adapter.dispatch("error_required_docs", + "dbt_meta_testing")( + missing_model_errors, + missing_columns_errors, + missing_description_errors + ) + ) }} +{% endmacro %} + +{% macro default__error_required_docs( + missing_model_errors, + missing_columns_errors, + missing_description_errors + ) %} + + {% set all_errors = [] %} + {% if missing_model_errors | length > 0 %} + + {% do all_errors.append("The following models are missing descriptions:") %} + {% do all_errors.append(dbt_meta_testing.format_error_docs(missing_model_errors)) %}{% endif %} + + {% if missing_columns_errors | length > 0 %} + + {% do all_errors.append("The following columns are missing from the model yml:") %} + {% do all_errors.append(dbt_meta_testing.format_error_docs(missing_columns_errors)) %}{% endif %} + + {% if missing_description_errors | length > 0 %} + + {% do all_errors.append("The following columns are missing descriptions:") %} + {% do all_errors.append(dbt_meta_testing.format_error_docs(missing_description_errors)) %}{% endif %} + + {{ return(all_errors | join("\n")) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_tests.sql new file mode 100644 index 0000000..5f93d7f --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/errors/error_required_tests.sql @@ -0,0 +1,12 @@ +{% macro error_required_tests() %} + {{ return(adapter.dispatch("error_required_tests", "dbt_meta_testing")(varargs))}} +{% endmacro %} + +{% macro default__error_required_tests(varargs) %} + + {% set all_errors = ["Insufficient test coverage from the 'required_tests' config on the following models:"] + + dbt_meta_testing.format_error_tests(varargs[0]) %} + + {{ return(all_errors | join('\n')) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_docs.sql new file mode 100644 index 0000000..33a0808 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_docs.sql @@ -0,0 +1,33 @@ +{% macro format_error_docs(error_list) %} + {{ return(adapter.dispatch("format_error_docs", "dbt_meta_testing")(error_list))}} +{% endmacro %} + +{% macro default__format_error_docs(error_list) %} + +{# /* +Formats a list of either strings or tuples into a bulleted list for error output in error_required_docs. +*/ #} + + {% set output_list = [] %} + + {% for obj in error_list %} + + {% if obj is string %} + + {% do output_list.append(" - " ~ obj) %} + + {% elif obj is iterable %} + + {% do output_list.append(" - " ~ obj[0] ~ "." ~ obj[1]) %} + + {% else %} + + {{ exceptions.raise_compiler_error("List elements must be string or tuple.") }} + + {% endif %} + + {% endfor %} + + {{ return(output_list | sort | join("\n")) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_tests.sql new file mode 100644 index 0000000..dfa31c9 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_error_tests.sql @@ -0,0 +1,29 @@ +{% macro format_error_tests(error_list) %} + {{ return(adapter.dispatch("format_error_tests", "dbt_meta_testing")(error_list))}} +{% endmacro %} + +{% macro default__format_error_tests(error_list) %} + +{# /* +Formats a list of tuples into a bulleted list for error output in error_required_tests. +*/ #} + + {% set output_list = [] %} + + {% for obj in error_list %} + + {% if obj is iterable %} + + {% do output_list.append("- Model: '" ~ obj[0] ~ "' Test: '" ~ obj[1] ~ "' Got: " ~ obj[2] ~ " Expected: " ~ obj[3]) %} + + {% else %} + + {{ exceptions.raise_compiler_error("List elements must be ordered tuple of (model, test, required, provided).") }} + + {% endif %} + + {% endfor %} + + {{ return(output_list) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_raise_error.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_raise_error.sql new file mode 100644 index 0000000..5b2c4de --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/formatters/format_raise_error.sql @@ -0,0 +1,17 @@ +{% macro format_raise_error(error_to_raise) %} + {{ return(adapter.dispatch("format_raise_error", "dbt_meta_testing")(error_to_raise))}} +{% endmacro %} + +{% macro default__format_raise_error(error_to_raise) %} + + {% if var("running_intergration_tests", false) is true %} + + {{ return(error_to_raise) }} + + {% else %} + + {{ exceptions.raise_compiler_error(error_to_raise) }} + + {% endif %} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/evaluate_required_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/evaluate_required_docs.sql new file mode 100644 index 0000000..f89f05b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/evaluate_required_docs.sql @@ -0,0 +1,78 @@ +{% macro evaluate_required_docs(models_to_evaluate) %} + {{ return(adapter.dispatch("evaluate_required_docs", "dbt_meta_testing")(models_to_evaluate))}} +{% endmacro %} + +{% macro default__evaluate_required_docs(models_to_evaluate) %} + + {# /* + Evaluate if each model meets +required_docs config. + */ #} + + {% set missing_model_errors = [] %} + {% set missing_columns_errors = [] %} + {% set missing_description_errors = [] %} + + {% for model in models_to_evaluate %} + + {% if model.config.required_docs==True and model.config.get("materialized", "") not in ("", "ephemeral")%} + + {% set model_columns = adapter.get_columns_in_relation(ref(model.package_name, model.name)) + | map(attribute="column") | list %} + {{ dbt_meta_testing.logger(model_columns | map(attribute="column") | list) }} + + {% if model.description == "" %} + + {% do missing_model_errors.append(model.name) %} + + {% endif %} + + {% for column in model_columns %} + + {% if var("convert_column_names_to_lower_case", true) %} + {% set column = column | lower %} + {% endif %} + + {% if column in model.columns.keys() %} + + {{ dbt_meta_testing.logger(column ~ " is in " ~ model.columns.keys()) }} + {% if model.columns[column].description == "" %} + + {% do missing_description_errors.append((model.name, column)) %} + + {% endif %} + + {% else %} + + {% do missing_columns_errors.append((model.name, column)) %} + + {% endif %} + + {% endfor %} + + {% endif %} + + {% endfor %} + + {% set errors = missing_model_errors + missing_columns_errors + missing_description_errors %} + {% if errors | length > 0 %} + + {{ dbt_meta_testing.logger(missing_model_errors) }} + {{ dbt_meta_testing.logger(missing_columns_errors) }} + {{ dbt_meta_testing.logger(missing_description_errors) }} + + {% set result = dbt_meta_testing.error_required_docs( + missing_model_errors, + missing_columns_errors, + missing_description_errors + ) + %} + + {% else %} + + {% set result = none %} + + {% endif %} + + {{ return(result) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/validate_required_docs.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/validate_required_docs.sql new file mode 100644 index 0000000..1f88ca1 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_docs/validate_required_docs.sql @@ -0,0 +1,25 @@ +{% macro validate_required_docs(models_to_validate) %} + {{ return(adapter.dispatch("validate_required_docs", "dbt_meta_testing")(models_to_validate))}} +{% endmacro %} + +{% macro default__validate_required_docs(models_to_validate) %} + + {# /* + Validate that all +required_docs configs are bool. + */ #} + + {{ dbt_meta_testing.logger('models to validate are ' ~ models_to_validate) }} + + {% for model in models_to_validate %} + + {% if not model.config.required_docs is boolean %} + + {{ return(dbt_meta_testing.error_invalid_config_docs(config, model.name)) }} + + {% endif %} + + {% endfor %} + + {{ return(none) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/evaluate_required_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/evaluate_required_tests.sql new file mode 100644 index 0000000..da6a102 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/evaluate_required_tests.sql @@ -0,0 +1,38 @@ +{% macro evaluate_required_tests(models_to_evaluate) %} + {{ return(adapter.dispatch("evaluate_required_tests", "dbt_meta_testing")(models_to_evaluate))}} +{% endmacro %} + +{% macro default__evaluate_required_tests(models_to_evaluate) %} + + {# /* + Evaluate if each model meets +required_tests minimum. + */ #} + + {% set tests_per_model = dbt_meta_testing.tests_per_model() %} + {% set test_errors = [] %} + + {% for model in models_to_evaluate %}{% if model.config.required_tests is mapping %} + {% for test_key in model.config.required_tests.keys() %} + + {% set provided_test_list = tests_per_model[model.unique_id] %} + + {% set required_test_count = model.config.required_tests[test_key] %} + {% set matching_test_count = dbt_meta_testing.get_regex_match_count(provided_test_list, test_key) %} + + {% if matching_test_count < required_test_count %} + {% do test_errors.append((model.name, test_key, matching_test_count, required_test_count)) %} + {% endif %} + + {% endfor %}{% endif %} + {% endfor %} + + + {% if test_errors | length > 0 %} + {% set result = dbt_meta_testing.error_required_tests(test_errors) %} + {% else %} + {% set result = none %} + {% endif %} + + {{ return(result) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/get_regex_match_count.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/get_regex_match_count.sql new file mode 100644 index 0000000..acf915c --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/get_regex_match_count.sql @@ -0,0 +1,16 @@ +{% macro get_regex_match_count(list_of_strings, regex_to_check) %} + {{ return(adapter.dispatch("get_regex_match_count", "dbt_meta_testing")(list_of_strings, regex_to_check))}} +{% endmacro %} + +{% macro default__get_regex_match_count(list_of_strings, regex_to_check) %} + + {# Return count of strings in list_of_strings that match regex_to_check #} + {% set matches = [] %} + {% for string in list_of_strings %} + {% set match = modules.re.fullmatch(regex_to_check, string) %} + {% if match %}{% do matches.append(match) %}{% endif %} + {% endfor %} + + {% do return(matches | length) %} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/tests_per_model.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/tests_per_model.sql new file mode 100644 index 0000000..b88b705 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/tests_per_model.sql @@ -0,0 +1,31 @@ + +{% macro tests_per_model() %} + {{ return(adapter.dispatch("tests_per_model", "dbt_meta_testing")())}} +{% endmacro %} + +{% macro default__tests_per_model() %} + + {# /* + Construct a dict of all models and their schema tests in the current project. + */ #} + + {% set enabled_model_names = dbt_meta_testing.fetch_configured_models("enabled", resource_type="model") | map(attribute="unique_id") | list %} + {% set enabled_test_nodes = dbt_meta_testing.fetch_configured_models("enabled", resource_type="test") %} + + -- Create `result` dict with all enabled models unique_id's as keys and empty lists as values + {% set result = {} %} + {% for id in enabled_model_names %}{% do result.update({id: []}) %}{% endfor %} + + {% for test_node in enabled_test_nodes %} + {% for dependent_node in test_node.depends_on.nodes %} + {% if dependent_node.startswith('model.') %} + -- Use common names for schema tests, (e.g. "unique") under the "test_metadata" key + {% set test_identifier = test_node.get("test_metadata",{}).get("name") or test_node["name"] %} + {% do result[dependent_node].append(test_identifier) %} + {% endif %} + {% endfor %} + {% endfor %} + + {% do return(result) %} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/validate_required_tests.sql b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/validate_required_tests.sql new file mode 100644 index 0000000..8a43360 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/macros/utils/required_tests/validate_required_tests.sql @@ -0,0 +1,70 @@ +{% macro validate_required_tests(models_to_validate) %} + {{ return(adapter.dispatch("validate_required_tests", "dbt_meta_testing")(models_to_validate))}} +{% endmacro %} + +{% macro default__validate_required_tests(models_to_validate) %} + + {# /* + Validate that all +required_tests configs are either dict or None + and that all keys in a dict are defined tests. + */ #} + + {{ dbt_meta_testing.logger('models to validate are ' ~ models_to_validate) }} + + -- # TO DO: break out into function that asserts against a contract + -- Fetch unique tests from +required_tests config + {% set all_required_tests = [] %} + + {% for model in models_to_validate %} + + {% set config = model.config.required_tests %} + + {{ dbt_meta_testing.logger('config is: ' ~ config) }} + + -- Validate that config is dict or none + {% if config is mapping %} + + {% for k in config.keys() %} + + {% do all_required_tests.append(k) %} + + {% endfor %} + + {% elif config is none %} + + -- Pass + {{ dbt_meta_testing.logger("model '" ~ model.name ~ "' has required_tests=null") }} + + {% else %} + + {{ return(dbt_meta_testing.errors_invalid_config_tests(config, model.name)) }} + + {% endif %} + + {% endfor %} + + + {% set unique_required_tests = all_required_tests | unique | list %} + + {{ dbt_meta_testing.logger('unique_required_tests: ' ~ unique_required_tests) }} + + + -- Fetch unique defined tests from graph + {% set unique_defined_tests = [] %} + + {% for test_name in graph.nodes.values() + | selectattr("resource_type", "equalto", "test") + | selectattr("test_metadata", "defined") + | map(attribute="test_metadata") + | map(attribute="name") + | unique %} + + {{ dbt_meta_testing.logger('test name ' ~ loop.index ~ ' ' ~ test_name) }} + + {% do unique_defined_tests.append(test_name) %} + + {% endfor %} + + {{ return(none) }} + +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/requirements.txt b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/requirements.txt new file mode 100644 index 0000000..ca506cd --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_packages/dbt_meta_testing/requirements.txt @@ -0,0 +1 @@ +dbt-snowflake==1.0.0 diff --git a/tests/e2e/fixtures/jaffle_shop/dbt_project.yml b/tests/e2e/fixtures/jaffle_shop/dbt_project.yml new file mode 100644 index 0000000..48bc7e6 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/dbt_project.yml @@ -0,0 +1,30 @@ +name: 'jaffle_shop' + +config-version: 2 +version: '0.1' + +profile: 'jaffle_shop' + +model-paths: ["models"] +seed-paths: ["seeds"] +test-paths: ["tests"] +analysis-paths: ["analysis"] +macro-paths: ["macros"] + +target-path: "target" +clean-targets: + - "target" + - "dbt_modules" + - "logs" + +require-dbt-version: [">=1.0.0", "<2.0.0"] + +models: + jaffle_shop: + materialized: table + staging: + materialized: view + +# Testing purpose. +vars: + test_var: 1 diff --git a/tests/e2e/fixtures/jaffle_shop/init_target/manifest.json b/tests/e2e/fixtures/jaffle_shop/init_target/manifest.json new file mode 100644 index 0000000..fb891c8 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/init_target/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v8.json", "dbt_version": "1.5.0b1", "generated_at": "2023-02-21T21:20:46.342944Z", "invocation_id": "d6ee5b09-1824-425b-8f9c-61a496b9de4a", "env": {}, "project_id": "06e5b98c2db46f8a72cc4f66410e9b3b", "user_id": "c68be00d-31d4-4eb1-8424-e96f3d9d2c4d", "send_anonymous_usage_stats": true, "adapter_type": "snowflake"}, "nodes": {"model.jaffle_shop.customers": {"database": "AD_HOC", "schema": "semantic_layer", "name": "customers", "resource_type": "model", "package_name": "jaffle_shop", "path": "customers.sql", "original_file_path": "models/customers.sql", "unique_id": "model.jaffle_shop.customers", "fqn": ["jaffle_shop", "customers"], "alias": "customers", "checksum": {"name": "sha256", "checksum": "455b90a31f418ae776213ad9932c7cb72d19a5269a8c722bd9f4e44957313ce8"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "post-hook": [], "pre-hook": []}, "tags": [], "description": "This table has basic information about a customer, as well as some derived facts based on a customer's orders", "columns": {"customer_id": {"name": "customer_id", "description": "This is a unique identifier for a customer", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "first_name": {"name": "first_name", "description": "Customer's first name. PII.", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "last_name": {"name": "last_name", "description": "Customer's last name. PII.", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "first_order": {"name": "first_order", "description": "Date (UTC) of a customer's first order", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "most_recent_order": {"name": "most_recent_order", "description": "Date (UTC) of a customer's most recent order", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "number_of_orders": {"name": "number_of_orders", "description": "Count of the number of orders a customer has placed", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "total_order_amount": {"name": "total_order_amount", "description": "Total value (AUD) of a customer's orders", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table"}, "created_at": 1677002900.463537, "relation_name": "AD_HOC.semantic_layer.customers", "raw_code": "with customers as (\n\n select * from {{ ref('stg_customers') }}\n\n),\n\norders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by customer_id\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders on\n payments.order_id = orders.order_id\n\n group by orders.customer_id\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders\n on customers.customer_id = customer_orders.customer_id\n\n left join customer_payments\n on customers.customer_id = customer_payments.customer_id\n\n)\n\nselect * from final", "language": "sql", "refs": [["stg_customers"], ["stg_orders"], ["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"]}, "compiled_path": null, "constraints_enabled": false}, "model.jaffle_shop.orders": {"database": "AD_HOC", "schema": "semantic_layer", "name": "orders", "resource_type": "model", "package_name": "jaffle_shop", "path": "orders.sql", "original_file_path": "models/orders.sql", "unique_id": "model.jaffle_shop.orders", "fqn": ["jaffle_shop", "orders"], "alias": "orders", "checksum": {"name": "sha256", "checksum": "53950235d8e29690d259e95ee49bda6a5b7911b44c739b738a646dc6014bcfcd"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "post-hook": [], "pre-hook": []}, "tags": [], "description": "This table has basic information about orders, as well as some derived facts based on payments", "columns": {"order_id": {"name": "order_id", "description": "This is a unique identifier for an order", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "customer_id": {"name": "customer_id", "description": "Foreign key to the customers table", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "order_date": {"name": "order_date", "description": "Date (UTC) that the order was placed", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "status": {"name": "status", "description": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "amount": {"name": "amount", "description": "Total amount (AUD) of the order", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "credit_card_amount": {"name": "credit_card_amount", "description": "Amount of the order (AUD) paid for by credit card", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "coupon_amount": {"name": "coupon_amount", "description": "Amount of the order (AUD) paid for by coupon", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "bank_transfer_amount": {"name": "bank_transfer_amount", "description": "Amount of the order (AUD) paid for by bank transfer", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "gift_card_amount": {"name": "gift_card_amount", "description": "Amount of the order (AUD) paid for by gift card", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table"}, "created_at": 1677002900.4654431, "relation_name": "AD_HOC.semantic_layer.orders", "raw_code": "{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %}\n\nwith orders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n {% for payment_method in payment_methods -%}\n sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount,\n {% endfor -%}\n\n sum(amount) as total_amount\n\n from payments\n\n group by order_id\n\n),\n\nfinal as (\n\n select\n orders.order_id,\n orders.customer_id,\n orders.order_date,\n orders.status,\n\n {% for payment_method in payment_methods -%}\n\n order_payments.{{ payment_method }}_amount,\n\n {% endfor -%}\n\n order_payments.total_amount as amount\n\n from orders\n\n\n left join order_payments\n on orders.order_id = order_payments.order_id\n\n)\n\nselect * from final", "language": "sql", "refs": [["stg_orders"], ["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"]}, "compiled_path": null, "constraints_enabled": false}, "model.jaffle_shop.stg_customers": {"database": "AD_HOC", "schema": "semantic_layer", "name": "stg_customers", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_customers.sql", "original_file_path": "models/staging/stg_customers.sql", "unique_id": "model.jaffle_shop.stg_customers", "fqn": ["jaffle_shop", "staging", "stg_customers"], "alias": "stg_customers", "checksum": {"name": "sha256", "checksum": "6f18a29204dad1de6dbb0c288144c4990742e0a1e065c3b2a67b5f98334c22ba"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"customer_id": {"name": "customer_id", "description": "", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1677002900.483946, "relation_name": "AD_HOC.semantic_layer.stg_customers", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_customers') }}\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_customers"]}, "compiled_path": null, "constraints_enabled": false}, "model.jaffle_shop.stg_payments": {"database": "AD_HOC", "schema": "semantic_layer", "name": "stg_payments", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_payments.sql", "original_file_path": "models/staging/stg_payments.sql", "unique_id": "model.jaffle_shop.stg_payments", "fqn": ["jaffle_shop", "staging", "stg_payments"], "alias": "stg_payments", "checksum": {"name": "sha256", "checksum": "eb899938258d1fba27fca716a7c334119912a2f9601282026097a7b6ce8cfcd2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"payment_id": {"name": "payment_id", "description": "", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "payment_method": {"name": "payment_method", "description": "", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1677002900.4853601, "relation_name": "AD_HOC.semantic_layer.stg_payments", "raw_code": "with source as (\n \n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_payments') }}\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n -- `amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_payments"]}, "compiled_path": null, "constraints_enabled": false}, "model.jaffle_shop.stg_orders": {"database": "AD_HOC", "schema": "semantic_layer", "name": "stg_orders", "resource_type": "model", "package_name": "jaffle_shop", "path": "staging/stg_orders.sql", "original_file_path": "models/staging/stg_orders.sql", "unique_id": "model.jaffle_shop.stg_orders", "fqn": ["jaffle_shop", "staging", "stg_orders"], "alias": "stg_orders", "checksum": {"name": "sha256", "checksum": "afffa9cbc57e5fd2cf5898ebf571d444a62c9d6d7929d8133d30567fb9a2ce97"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {"order_id": {"name": "order_id", "description": "", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}, "status": {"name": "status", "description": "", "meta": {}, "data_type": null, "constraints": null, "constraints_check": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "jaffle_shop://models/staging/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view"}, "created_at": 1677002900.484761, "relation_name": "AD_HOC.semantic_layer.stg_orders", "raw_code": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_orders') }}\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", "language": "sql", "refs": [["raw_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.jaffle_shop.raw_orders"]}, "compiled_path": null, "constraints_enabled": false}, "seed.jaffle_shop.raw_customers": {"database": "AD_HOC", "schema": "semantic_layer", "name": "raw_customers", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_customers.csv", "original_file_path": "seeds/raw_customers.csv", "unique_id": "seed.jaffle_shop.raw_customers", "fqn": ["jaffle_shop", "raw_customers"], "alias": "raw_customers", "checksum": {"name": "sha256", "checksum": "24579b4b26098d43265376f3c50be8b10faf8e8fd95f5508074f10f76a12671d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.45019, "relation_name": "AD_HOC.semantic_layer.raw_customers", "raw_code": "", "root_path": "/Users/dichenqian/Documents/code/dbt-server/tests/e2e/fixtures/jaffle_shop/testing", "depends_on": {"macros": []}}, "seed.jaffle_shop.raw_orders": {"database": "AD_HOC", "schema": "semantic_layer", "name": "raw_orders", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_orders.csv", "original_file_path": "seeds/raw_orders.csv", "unique_id": "seed.jaffle_shop.raw_orders", "fqn": ["jaffle_shop", "raw_orders"], "alias": "raw_orders", "checksum": {"name": "sha256", "checksum": "ee6c68d1639ec2b23a4495ec12475e09b8ed4b61e23ab0411ea7ec76648356f7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.451699, "relation_name": "AD_HOC.semantic_layer.raw_orders", "raw_code": "", "root_path": "/Users/dichenqian/Documents/code/dbt-server/tests/e2e/fixtures/jaffle_shop/testing", "depends_on": {"macros": []}}, "seed.jaffle_shop.raw_payments": {"database": "AD_HOC", "schema": "semantic_layer", "name": "raw_payments", "resource_type": "seed", "package_name": "jaffle_shop", "path": "raw_payments.csv", "original_file_path": "seeds/raw_payments.csv", "unique_id": "seed.jaffle_shop.raw_payments", "fqn": ["jaffle_shop", "raw_payments"], "alias": "raw_payments", "checksum": {"name": "sha256", "checksum": "03fd407f3135f84456431a923f22fc185a2154079e210c20b690e3ab11687d11"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "constraints_enabled": false, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4528792, "relation_name": "AD_HOC.semantic_layer.raw_payments", "raw_code": "", "root_path": "/Users/dichenqian/Documents/code/dbt-server/tests/e2e/fixtures/jaffle_shop/testing", "depends_on": {"macros": []}}, "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "unique_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_customers_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1", "fqn": ["jaffle_shop", "unique_customers_customer_id"], "alias": "unique_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4659488, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.customers"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.customers"}, "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('customers')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_customers_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d", "fqn": ["jaffle_shop", "not_null_customers_customer_id"], "alias": "not_null_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4672792, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.customers"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.customers"}, "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "unique_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_orders_order_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.unique_orders_order_id.fed79b3a6e", "fqn": ["jaffle_shop", "unique_orders_order_id"], "alias": "unique_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.468373, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "order_id", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_order_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_order_id.cf6c17daed", "fqn": ["jaffle_shop", "not_null_orders_order_id"], "alias": "not_null_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.469328, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "order_id", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_customer_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_customer_id.c5f02694af", "fqn": ["jaffle_shop", "not_null_orders_customer_id"], "alias": "not_null_orders_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.470248, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.orders"}, "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": {"test_metadata": {"name": "relationships", "kwargs": {"to": "ref('customers')", "field": "customer_id", "column_name": "customer_id", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "relationships_orders_customer_id__customer_id__ref_customers_", "resource_type": "test", "package_name": "jaffle_shop", "path": "relationships_orders_customer_id__customer_id__ref_customers_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "fqn": ["jaffle_shop", "relationships_orders_customer_id__customer_id__ref_customers_"], "alias": "relationships_orders_customer_id__customer_id__ref_customers_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.471182, "relation_name": null, "raw_code": "{{ test_relationships(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["customers"], ["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_relationships", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.orders"}, "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "fqn": ["jaffle_shop", "accepted_values_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758"}, "created_at": 1677002900.4740689, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758\") }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "status", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_amount.106140f9fd": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_amount.106140f9fd", "fqn": ["jaffle_shop", "not_null_orders_amount"], "alias": "not_null_orders_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4776058, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "credit_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_credit_card_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_credit_card_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59", "fqn": ["jaffle_shop", "not_null_orders_credit_card_amount"], "alias": "not_null_orders_credit_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.47938, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "credit_card_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "coupon_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_coupon_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_coupon_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625", "fqn": ["jaffle_shop", "not_null_orders_coupon_amount"], "alias": "not_null_orders_coupon_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.480363, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "coupon_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "bank_transfer_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_bank_transfer_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_bank_transfer_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49", "fqn": ["jaffle_shop", "not_null_orders_bank_transfer_amount"], "alias": "not_null_orders_bank_transfer_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.481288, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "bank_transfer_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "gift_card_amount", "model": "{{ get_where_subquery(ref('orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_orders_gift_card_amount", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_orders_gift_card_amount.sql", "original_file_path": "models/schema.yml", "unique_id": "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a", "fqn": ["jaffle_shop", "not_null_orders_gift_card_amount"], "alias": "not_null_orders_gift_card_amount", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.482346, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "gift_card_amount", "file_key_name": "models.orders"}, "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "unique_stg_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada", "fqn": ["jaffle_shop", "staging", "unique_stg_customers_customer_id"], "alias": "unique_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4858398, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_customers"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.stg_customers"}, "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "customer_id", "model": "{{ get_where_subquery(ref('stg_customers')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_stg_customers_customer_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_customers_customer_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa", "fqn": ["jaffle_shop", "staging", "not_null_stg_customers_customer_id"], "alias": "not_null_stg_customers_customer_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.486965, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_customers"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_customers"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "customer_id", "file_key_name": "models.stg_customers"}, "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "unique_stg_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a", "fqn": ["jaffle_shop", "staging", "unique_stg_orders_order_id"], "alias": "unique_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.488058, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "order_id", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "order_id", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_stg_orders_order_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_orders_order_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64", "fqn": ["jaffle_shop", "staging", "not_null_stg_orders_order_id"], "alias": "not_null_stg_orders_order_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.489154, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "order_id", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["placed", "shipped", "completed", "return_pending", "returned"], "column_name": "status", "model": "{{ get_where_subquery(ref('stg_orders')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "fqn": ["jaffle_shop", "staging", "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned"], "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58"}, "created_at": 1677002900.4901052, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58\") }}", "language": "sql", "refs": [["stg_orders"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.stg_orders"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "status", "file_key_name": "models.stg_orders"}, "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "unique_stg_payments_payment_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "unique_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.unique_stg_payments_payment_id.3744510712", "fqn": ["jaffle_shop", "staging", "unique_stg_payments_payment_id"], "alias": "unique_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.493682, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "payment_id", "file_key_name": "models.stg_payments"}, "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "payment_id", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "not_null_stg_payments_payment_id", "resource_type": "test", "package_name": "jaffle_shop", "path": "not_null_stg_payments_payment_id.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075", "fqn": ["jaffle_shop", "staging", "not_null_stg_payments_payment_id"], "alias": "not_null_stg_payments_payment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1677002900.4946141, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "payment_id", "file_key_name": "models.stg_payments"}, "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": {"test_metadata": {"name": "accepted_values", "kwargs": {"values": ["credit_card", "coupon", "bank_transfer", "gift_card"], "column_name": "payment_method", "model": "{{ get_where_subquery(ref('stg_payments')) }}"}, "namespace": null}, "database": "AD_HOC", "schema": "semantic_layer_dbt_test__audit", "name": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", "resource_type": "test", "package_name": "jaffle_shop", "path": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef.sql", "original_file_path": "models/staging/schema.yml", "unique_id": "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "fqn": ["jaffle_shop", "staging", "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card"], "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef"}, "created_at": 1677002900.495552, "relation_name": null, "raw_code": "{{ test_accepted_values(**_dbt_generic_test_kwargs) }}{{ config(alias=\"accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef\") }}", "language": "sql", "refs": [["stg_payments"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_accepted_values", "macro.dbt.get_where_subquery"], "nodes": ["model.jaffle_shop.stg_payments"]}, "compiled_path": null, "constraints_enabled": false, "column_name": "payment_method", "file_key_name": "models.stg_payments"}}, "sources": {}, "macros": {"macro.dbt_snowflake.snowflake__get_catalog": {"name": "snowflake__get_catalog", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_catalog", "macro_sql": "{% macro snowflake__get_catalog(information_schema, schemas) -%}\n {% set query %}\n with tables as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n table_type as \"table_type\",\n comment as \"table_comment\",\n\n -- note: this is the _role_ that owns the table\n table_owner as \"table_owner\",\n\n 'Clustering Key' as \"stats:clustering_key:label\",\n clustering_key as \"stats:clustering_key:value\",\n 'The key used to cluster this table' as \"stats:clustering_key:description\",\n (clustering_key is not null) as \"stats:clustering_key:include\",\n\n 'Row Count' as \"stats:row_count:label\",\n row_count as \"stats:row_count:value\",\n 'An approximate count of rows in this table' as \"stats:row_count:description\",\n (row_count is not null) as \"stats:row_count:include\",\n\n 'Approximate Size' as \"stats:bytes:label\",\n bytes as \"stats:bytes:value\",\n 'Approximate size of the table as reported by Snowflake' as \"stats:bytes:description\",\n (bytes is not null) as \"stats:bytes:include\",\n\n 'Last Modified' as \"stats:last_modified:label\",\n to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as \"stats:last_modified:value\",\n 'The timestamp for last update/change' as \"stats:last_modified:description\",\n (last_altered is not null and table_type='BASE TABLE') as \"stats:last_modified:include\"\n\n from {{ information_schema }}.tables\n\n ),\n\n columns as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n\n column_name as \"column_name\",\n ordinal_position as \"column_index\",\n data_type as \"column_type\",\n comment as \"column_comment\"\n\n from {{ information_schema }}.columns\n )\n\n select *\n from tables\n join columns using (\"table_database\", \"table_schema\", \"table_name\")\n where (\n {%- for schema in schemas -%}\n upper(\"table_schema\") = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n order by \"column_index\"\n {%- endset -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.11482, "supported_languages": null}, "macro.dbt_snowflake.snowflake__create_table_as": {"name": "snowflake__create_table_as", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__create_table_as", "macro_sql": "{% macro snowflake__create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {%- if language == 'sql' -%}\n {%- set transient = config.get('transient', default=true) -%}\n {%- set cluster_by_keys = config.get('cluster_by', default=none) -%}\n {%- set enable_automatic_clustering = config.get('automatic_clustering', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n\n {%- if cluster_by_keys is not none and cluster_by_keys is string -%}\n {%- set cluster_by_keys = [cluster_by_keys] -%}\n {%- endif -%}\n {%- if cluster_by_keys is not none -%}\n {%- set cluster_by_string = cluster_by_keys|join(\", \")-%}\n {% else %}\n {%- set cluster_by_string = none -%}\n {%- endif -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace {% if temporary -%}\n temporary\n {%- elif transient -%}\n transient\n {%- endif %} table {{ relation }}\n {% if config.get('constraints_enabled', False) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }}\n {% endif %}\n {% if copy_grants and not temporary -%} copy grants {%- endif %} as\n (\n {%- if cluster_by_string is not none -%}\n select * from(\n {{ compiled_code }}\n ) order by ({{ cluster_by_string }})\n {%- else -%}\n {{ compiled_code }}\n {%- endif %}\n );\n {% if cluster_by_string is not none and not temporary -%}\n alter table {{relation}} cluster by ({{cluster_by_string}});\n {%- endif -%}\n {% if enable_automatic_clustering and cluster_by_string is not none and not temporary -%}\n alter table {{relation}} resume recluster;\n {%- endif -%}\n\n {%- elif language == 'python' -%}\n {{ py_write_table(compiled_code=compiled_code, target_relation=relation, temporary=temporary) }}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"snowflake__create_table_as macro didn't get supported language, it got %s\" % language) %}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl", "macro.dbt_snowflake.py_write_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1319182, "supported_languages": null}, "macro.dbt_snowflake.get_column_comment_sql": {"name": "get_column_comment_sql", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.get_column_comment_sql", "macro_sql": "{% macro get_column_comment_sql(column_name, column_dict) -%}\n {% if (column_name|upper in column_dict) -%}\n {% set matched_column = column_name|upper -%}\n {% elif (column_name|lower in column_dict) -%}\n {% set matched_column = column_name|lower -%}\n {% elif (column_name in column_dict) -%}\n {% set matched_column = column_name -%}\n {% else -%}\n {% set matched_column = None -%}\n {% endif -%}\n {% if matched_column -%}\n {{ adapter.quote(column_name) }} COMMENT $${{ column_dict[matched_column]['description'] | replace('$', '[$]') }}$$\n {%- else -%}\n {{ adapter.quote(column_name) }} COMMENT $$$$\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.132638, "supported_languages": null}, "macro.dbt_snowflake.get_persist_docs_column_list": {"name": "get_persist_docs_column_list", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.get_persist_docs_column_list", "macro_sql": "{% macro get_persist_docs_column_list(model_columns, query_columns) %}\n(\n {% for column_name in query_columns %}\n {{ get_column_comment_sql(column_name, model_columns) }}\n {{- \", \" if not loop.last else \"\" }}\n {% endfor %}\n)\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.get_column_comment_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1329222, "supported_languages": null}, "macro.dbt_snowflake.snowflake__create_view_as_with_temp_flag": {"name": "snowflake__create_view_as_with_temp_flag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__create_view_as_with_temp_flag", "macro_sql": "{% macro snowflake__create_view_as_with_temp_flag(relation, sql, is_temporary=False) -%}\n {%- set secure = config.get('secure', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create or replace {% if secure -%}\n secure\n {%- endif %} {% if is_temporary -%}\n temporary\n {%- endif %} view {{ relation }}\n {% if config.persist_column_docs() -%}\n {% set model_columns = model.columns %}\n {% set query_columns = get_columns_in_query(sql) %}\n {{ get_persist_docs_column_list(model_columns, query_columns) }}\n\n {%- endif %}\n {% if copy_grants -%} copy grants {%- endif %} as (\n {{ sql }}\n );\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt_snowflake.get_persist_docs_column_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1337469, "supported_languages": null}, "macro.dbt_snowflake.snowflake__create_view_as": {"name": "snowflake__create_view_as", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__create_view_as", "macro_sql": "{% macro snowflake__create_view_as(relation, sql) -%}\n {{ snowflake__create_view_as_with_temp_flag(relation, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__create_view_as_with_temp_flag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.133904, "supported_languages": null}, "macro.dbt_snowflake.snowflake__get_columns_in_relation": {"name": "snowflake__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_columns_in_relation", "macro_sql": "{% macro snowflake__get_columns_in_relation(relation) -%}\n {%- set sql -%}\n describe table {{ relation }}\n {%- endset -%}\n {%- set result = run_query(sql) -%}\n\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many columns in relation {{ relation }}! dbt can only get\n information about relations with fewer than {{ maximum }} columns.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n\n {% set columns = [] %}\n {% for row in result %}\n {% do columns.append(api.Column.from_description(row['name'], row['type'])) %}\n {% endfor %}\n {% do return(columns) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.134679, "supported_languages": null}, "macro.dbt_snowflake.snowflake__list_schemas": {"name": "snowflake__list_schemas", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__list_schemas", "macro_sql": "{% macro snowflake__list_schemas(database) -%}\n {# 10k limit from here: https://docs.snowflake.net/manuals/sql-reference/sql/show-schemas.html#usage-notes #}\n {% set maximum = 10000 %}\n {% set sql -%}\n show terse schemas in database {{ database }}\n limit {{ maximum }}\n {%- endset %}\n {% set result = run_query(sql) %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in database {{ database }}! dbt can only get\n information about databases with fewer than {{ maximum }} schemas.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {{ return(result) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.135235, "supported_languages": null}, "macro.dbt_snowflake.snowflake__list_relations_without_caching": {"name": "snowflake__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__list_relations_without_caching", "macro_sql": "{% macro snowflake__list_relations_without_caching(schema_relation) %}\n {%- set sql -%}\n show terse objects in {{ schema_relation }}\n {%- endset -%}\n\n {%- set result = run_query(sql) -%}\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many objects in schema {{ schema_relation }}! dbt can only get\n information about schemas with fewer than {{ maximum }} objects.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {%- do return(result) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1357398, "supported_languages": null}, "macro.dbt_snowflake.snowflake__check_schema_exists": {"name": "snowflake__check_schema_exists", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__check_schema_exists", "macro_sql": "{% macro snowflake__check_schema_exists(information_schema, schema) -%}\n {% call statement('check_schema_exists', fetch_result=True) -%}\n select count(*)\n from {{ information_schema }}.schemata\n where upper(schema_name) = upper('{{ schema }}')\n and upper(catalog_name) = upper('{{ information_schema.database }}')\n {%- endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.136064, "supported_languages": null}, "macro.dbt_snowflake.snowflake__rename_relation": {"name": "snowflake__rename_relation", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__rename_relation", "macro_sql": "{% macro snowflake__rename_relation(from_relation, to_relation) -%}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ to_relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1362581, "supported_languages": null}, "macro.dbt_snowflake.snowflake__alter_column_type": {"name": "snowflake__alter_column_type", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__alter_column_type", "macro_sql": "{% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%}\n {% call statement('alter_column_type') %}\n alter table {{ relation }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }};\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.136513, "supported_languages": null}, "macro.dbt_snowflake.snowflake__alter_relation_comment": {"name": "snowflake__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_comment", "macro_sql": "{% macro snowflake__alter_relation_comment(relation, relation_comment) -%}\n comment on {{ relation.type }} {{ relation }} IS $${{ relation_comment | replace('$', '[$]') }}$$;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1367161, "supported_languages": null}, "macro.dbt_snowflake.snowflake__alter_column_comment": {"name": "snowflake__alter_column_comment", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__alter_column_comment", "macro_sql": "{% macro snowflake__alter_column_comment(relation, column_dict) -%}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n alter {{ relation.type }} {{ relation }} alter\n {% for column_name in existing_columns if (column_name in existing_columns) or (column_name|lower in existing_columns) %}\n {{ get_column_comment_sql(column_name, column_dict) }} {{- ',' if not loop.last else ';' }}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.get_column_comment_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.137235, "supported_languages": null}, "macro.dbt_snowflake.get_current_query_tag": {"name": "get_current_query_tag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.get_current_query_tag", "macro_sql": "{% macro get_current_query_tag() -%}\n {{ return(run_query(\"show parameters like 'query_tag' in session\").rows[0]['value']) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.13742, "supported_languages": null}, "macro.dbt_snowflake.set_query_tag": {"name": "set_query_tag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.set_query_tag", "macro_sql": "{% macro set_query_tag() -%}\n {{ return(adapter.dispatch('set_query_tag', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__set_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1375842, "supported_languages": null}, "macro.dbt_snowflake.snowflake__set_query_tag": {"name": "snowflake__set_query_tag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__set_query_tag", "macro_sql": "{% macro snowflake__set_query_tag() -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% set original_query_tag = get_current_query_tag() %}\n {{ log(\"Setting query_tag to '\" ~ new_query_tag ~ \"'. Will reset to '\" ~ original_query_tag ~ \"' after materialization.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(new_query_tag)) %}\n {{ return(original_query_tag)}}\n {% endif %}\n {{ return(none)}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.get_current_query_tag", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.138064, "supported_languages": null}, "macro.dbt_snowflake.unset_query_tag": {"name": "unset_query_tag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.unset_query_tag", "macro_sql": "{% macro unset_query_tag(original_query_tag) -%}\n {{ return(adapter.dispatch('unset_query_tag', 'dbt')(original_query_tag)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.138258, "supported_languages": null}, "macro.dbt_snowflake.snowflake__unset_query_tag": {"name": "snowflake__unset_query_tag", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__unset_query_tag", "macro_sql": "{% macro snowflake__unset_query_tag(original_query_tag) -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% if original_query_tag %}\n {{ log(\"Resetting query_tag to '\" ~ original_query_tag ~ \"'.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(original_query_tag)) %}\n {% else %}\n {{ log(\"No original query_tag, unsetting parameter.\") }}\n {% do run_query(\"alter session unset query_tag\") %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.138744, "supported_languages": null}, "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns": {"name": "snowflake__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns", "macro_sql": "{% macro snowflake__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns %}\n\n {% set sql -%}\n alter {{ relation.type }} {{ relation }} add column\n {% for column in add_columns %}\n {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n {% endif %}\n\n {% if remove_columns %}\n\n {% set sql -%}\n alter {{ relation.type }} {{ relation }} drop column\n {% for column in remove_columns %}\n {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1395829, "supported_languages": null}, "macro.dbt_snowflake.snowflake_dml_explicit_transaction": {"name": "snowflake_dml_explicit_transaction", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake_dml_explicit_transaction", "macro_sql": "{% macro snowflake_dml_explicit_transaction(dml) %}\n {#\n Use this macro to wrap all INSERT, MERGE, UPDATE, DELETE, and TRUNCATE\n statements before passing them into run_query(), or calling in the 'main' statement\n of a materialization\n #}\n {% set dml_transaction -%}\n begin;\n {{ dml }};\n commit;\n {%- endset %}\n\n {% do return(dml_transaction) %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1397948, "supported_languages": null}, "macro.dbt_snowflake.snowflake__truncate_relation": {"name": "snowflake__truncate_relation", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_snowflake.snowflake__truncate_relation", "macro_sql": "{% macro snowflake__truncate_relation(relation) -%}\n {% set truncate_dml %}\n truncate table {{ relation }}\n {% endset %}\n {% call statement('truncate_relation') -%}\n {{ snowflake_dml_explicit_transaction(truncate_dml) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_snowflake.snowflake_dml_explicit_transaction"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.140039, "supported_languages": null}, "macro.dbt_snowflake.snowflake__copy_grants": {"name": "snowflake__copy_grants", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/apply_grants.sql", "original_file_path": "macros/apply_grants.sql", "unique_id": "macro.dbt_snowflake.snowflake__copy_grants", "macro_sql": "{% macro snowflake__copy_grants() %}\n {% set copy_grants = config.get('copy_grants', False) %}\n {{ return(copy_grants) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.140336, "supported_languages": null}, "macro.dbt_snowflake.snowflake__support_multiple_grantees_per_dcl_statement": {"name": "snowflake__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/apply_grants.sql", "original_file_path": "macros/apply_grants.sql", "unique_id": "macro.dbt_snowflake.snowflake__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro snowflake__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(False) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.140451, "supported_languages": null}, "macro.dbt_snowflake.materialization_test_snowflake": {"name": "materialization_test_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "unique_id": "macro.dbt_snowflake.materialization_test_snowflake", "macro_sql": "{%- materialization test, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n {% set relations = materialization_test_default() %}\n {% do unset_query_tag(original_query_tag) %}\n {{ return(relations) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.materialization_test_default", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1408021, "supported_languages": ["sql"]}, "macro.dbt_snowflake.snowflake__get_merge_sql": {"name": "snowflake__get_merge_sql", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/merge.sql", "original_file_path": "macros/materializations/merge.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_merge_sql", "macro_sql": "{% macro snowflake__get_merge_sql(target, source_sql, unique_key, dest_columns, incremental_predicates) -%}\n\n {#\n Workaround for Snowflake not being happy with a merge on a constant-false predicate.\n When no unique_key is provided, this macro will do a regular insert. If a unique_key\n is provided, then this macro will do a proper merge instead.\n #}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute='name')) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {%- set dml -%}\n {%- if unique_key is none -%}\n\n {{ sql_header if sql_header is not none }}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source_sql }}\n )\n\n {%- else -%}\n\n {{ default__get_merge_sql(target, source_sql, unique_key, dest_columns, incremental_predicates) }}\n\n {%- endif -%}\n {%- endset -%}\n\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.default__get_merge_sql", "macro.dbt_snowflake.snowflake_dml_explicit_transaction"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1420438, "supported_languages": null}, "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql": {"name": "snowflake__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/merge.sql", "original_file_path": "macros/materializations/merge.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql", "macro_sql": "{% macro snowflake__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}\n {% set dml = default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql", "macro.dbt_snowflake.snowflake_dml_explicit_transaction"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.142355, "supported_languages": null}, "macro.dbt_snowflake.snowflake__snapshot_merge_sql": {"name": "snowflake__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/merge.sql", "original_file_path": "macros/materializations/merge.sql", "unique_id": "macro.dbt_snowflake.snowflake__snapshot_merge_sql", "macro_sql": "{% macro snowflake__snapshot_merge_sql(target, source, insert_cols) %}\n {% set dml = default__snapshot_merge_sql(target, source, insert_cols) %}\n {% do return(snowflake_dml_explicit_transaction(dml)) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_merge_sql", "macro.dbt_snowflake.snowflake_dml_explicit_transaction"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.142612, "supported_languages": null}, "macro.dbt_snowflake.snowflake__load_csv_rows": {"name": "snowflake__load_csv_rows", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/seed.sql", "original_file_path": "macros/materializations/seed.sql", "unique_id": "macro.dbt_snowflake.snowflake__load_csv_rows", "macro_sql": "{% macro snowflake__load_csv_rows(model, agate_table) %}\n {% set batch_size = get_batch_size() %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query('BEGIN', auto_begin=False) %}\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n {% do adapter.add_query('COMMIT', auto_begin=False) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1448598, "supported_languages": null}, "macro.dbt_snowflake.materialization_seed_snowflake": {"name": "materialization_seed_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/seed.sql", "original_file_path": "macros/materializations/seed.sql", "unique_id": "macro.dbt_snowflake.materialization_seed_snowflake", "macro_sql": "{% materialization seed, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n\n {% set relations = materialization_seed_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.materialization_seed_default", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1451762, "supported_languages": ["sql"]}, "macro.dbt_snowflake.materialization_view_snowflake": {"name": "materialization_view_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/view.sql", "original_file_path": "macros/materializations/view.sql", "unique_id": "macro.dbt_snowflake.materialization_view_snowflake", "macro_sql": "{% materialization view, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n\n {% do persist_docs(target_relation, model, for_columns=false) %}\n\n {% do return(to_return) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.create_or_replace_view", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1457431, "supported_languages": ["sql"]}, "macro.dbt_snowflake.materialization_table_snowflake": {"name": "materialization_table_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_snowflake.materialization_table_snowflake", "macro_sql": "{% materialization table, adapter='snowflake', supported_languages=['sql', 'python']%}\n\n {% set original_query_tag = set_query_tag() %}\n\n {%- set identifier = model['alias'] -%}\n {%- set language = model['language'] -%}\n\n {% set grant_config = config.get('grants') %}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database, type='table') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#-- Drop the relation if it was a view to \"convert\" it in a table. This may lead to\n -- downtime, but it should be a relatively infrequent occurrence #}\n {% if old_relation is not none and not old_relation.is_table %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ drop_relation_if_exists(old_relation) }}\n {% endif %}\n\n {% call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1482618, "supported_languages": ["sql", "python"]}, "macro.dbt_snowflake.py_write_table": {"name": "py_write_table", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_snowflake.py_write_table", "macro_sql": "{% macro py_write_table(compiled_code, target_relation, temporary=False) %}\n{{ compiled_code }}\ndef materialize(session, df, target_relation):\n # make sure pandas exists\n import importlib.util\n package_name = 'pandas'\n if importlib.util.find_spec(package_name):\n import pandas\n if isinstance(df, pandas.core.frame.DataFrame):\n # session.write_pandas does not have overwrite function\n df = session.createDataFrame(df)\n {% set target_relation_name = target_relation | string | replace('\"', '\\\\\"') %}\n df.write.mode(\"overwrite\").save_as_table(\"{{ target_relation_name }}\", create_temp_table={{temporary}})\n\ndef main(session):\n dbt = dbtObj(session.table)\n df = model(dbt, session)\n materialize(session, df, dbt.this)\n return \"OK\"\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1485791, "supported_languages": null}, "macro.dbt_snowflake.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_snowflake.py_script_comment", "macro_sql": "{% macro py_script_comment()%}\n# To run this in snowsight, you need to select entry point to be main\n# And you may have to modify the return type to text to get the result back\n# def main(session):\n# dbt = dbtObj(session.table)\n# df = model(dbt, session)\n# return df.collect()\n\n# to run this in local notebook, you need to create a session following examples https://github.com/Snowflake-Labs/sfguide-getting-started-snowpark-python\n# then you can do the following to run model\n# dbt = dbtObj(session.table)\n# df = model(dbt, session)\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1486819, "supported_languages": null}, "macro.dbt_snowflake.dbt_snowflake_get_tmp_relation_type": {"name": "dbt_snowflake_get_tmp_relation_type", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_snowflake.dbt_snowflake_get_tmp_relation_type", "macro_sql": "{% macro dbt_snowflake_get_tmp_relation_type(strategy, unique_key, language) %}\n{%- set tmp_relation_type = config.get('tmp_relation_type') -%}\n /* {#\n High-level principles:\n If we are running multiple statements (DELETE + INSERT),\n and we want to guarantee identical inputs to both statements,\n then we must first save the model query results as a temporary table\n (which presumably comes with a performance cost).\n If we are running a single statement (MERGE or INSERT alone),\n we _may_ save the model query definition as a view instead,\n for (presumably) faster overall incremental processing.\n\n Low-level specifics:\n If an invalid option is specified, then we will raise an\n excpetion with corresponding message.\n\n Languages other than SQL (like Python) will use a temporary table.\n With the default strategy of merge, the user may choose between a temporary\n table and view (defaulting to view).\n\n The append strategy can use a view because it will run a single INSERT statement.\n\n When unique_key is none, the delete+insert strategy can use a view beacuse a\n single INSERT statement is run with no DELETES as part of the statement.\n Otherwise, play it safe by using a temporary table.\n #} */\n\n {% if language == \"python\" and tmp_relation_type is not none %}\n {% do exceptions.raise_compiler_error(\n \"Python models currently only support 'table' for tmp_relation_type but \"\n ~ tmp_relation_type ~ \" was specified.\"\n ) %}\n {% endif %}\n\n {% if strategy == \"delete+insert\" and tmp_relation_type is not none and tmp_relation_type != \"table\" and unique_key is not none %}\n {% do exceptions.raise_compiler_error(\n \"In order to maintain consistent results when `unique_key` is not none,\n the `delete+insert` strategy only supports `table` for `tmp_relation_type` but \"\n ~ tmp_relation_type ~ \" was specified.\"\n )\n %}\n {% endif %}\n\n {% if language != \"sql\" %}\n {{ return(\"table\") }}\n {% elif tmp_relation_type == \"table\" %}\n {{ return(\"table\") }}\n {% elif tmp_relation_type == \"view\" %}\n {{ return(\"view\") }}\n {% elif strategy in (\"default\", \"merge\", \"append\") %}\n {{ return(\"view\") }}\n {% elif strategy == \"delete+insert\" and unique_key is none %}\n {{ return(\"view\") }}\n {% else %}\n {{ return(\"table\") }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.151378, "supported_languages": null}, "macro.dbt_snowflake.materialization_incremental_snowflake": {"name": "materialization_incremental_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_snowflake.materialization_incremental_snowflake", "macro_sql": "{% materialization incremental, adapter='snowflake', supported_languages=['sql', 'python'] -%}\n\n {% set original_query_tag = set_query_tag() %}\n\n {#-- Set vars --#}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n {%- set language = model['language'] -%}\n {% set target_relation = this %}\n {% set existing_relation = load_relation(this) %}\n\n {#-- The temp relation will be a view (faster) or temp table, depending on upsert/merge strategy --#}\n {%- set unique_key = config.get('unique_key') -%}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set tmp_relation_type = dbt_snowflake_get_tmp_relation_type(incremental_strategy, unique_key, language) %}\n {% set tmp_relation = make_temp_relation(this).incorporate(type=tmp_relation_type) %}\n\n {% set grant_config = config.get('grants') %}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if existing_relation is none %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif existing_relation.is_view %}\n {#-- Can't overwrite a view with a table - we must drop --#}\n {{ log(\"Dropping relation \" ~ target_relation ~ \" because it is a view and this model is a table.\") }}\n {% do adapter.drop_relation(existing_relation) %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n {% elif full_refresh_mode %}\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% else %}\n {#-- Create the temp relation, either as a view or as a temp table --#}\n {% if tmp_relation_type == 'view' %}\n {%- call statement('create_tmp_relation') -%}\n {{ snowflake__create_view_as_with_temp_flag(tmp_relation, compiled_code, True) }}\n {%- endcall -%}\n {% else %}\n {%- call statement('create_tmp_relation', language=language) -%}\n {{ create_table_as(True, tmp_relation, compiled_code, language) }}\n {%- endcall -%}\n {% endif %}\n\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': tmp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n\n {%- call statement('main') -%}\n {{ strategy_sql_macro_func(strategy_arg_dict) }}\n {%- endcall -%}\n {% endif %}\n\n {% do drop_relation_if_exists(tmp_relation) %}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = target_relation.incorporate(type='table') %}\n\n {% set should_revoke =\n should_revoke(existing_relation.is_table, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.should_full_refresh", "macro.dbt.load_relation", "macro.dbt_snowflake.dbt_snowflake_get_tmp_relation_type", "macro.dbt.make_temp_relation", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt_snowflake.snowflake__create_view_as_with_temp_flag", "macro.dbt.process_schema_changes", "macro.dbt.drop_relation_if_exists", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.154806, "supported_languages": ["sql", "python"]}, "macro.dbt_snowflake.snowflake__get_incremental_default_sql": {"name": "snowflake__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_incremental_default_sql", "macro_sql": "{% macro snowflake__get_incremental_default_sql(arg_dict) %}\n {{ return(get_incremental_merge_sql(arg_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.154979, "supported_languages": null}, "macro.dbt_snowflake.materialization_snapshot_snowflake": {"name": "materialization_snapshot_snowflake", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/materializations/snapshot.sql", "original_file_path": "macros/materializations/snapshot.sql", "unique_id": "macro.dbt_snowflake.materialization_snapshot_snowflake", "macro_sql": "{% materialization snapshot, adapter='snowflake' %}\n {% set original_query_tag = set_query_tag() %}\n {% set relations = materialization_snapshot_default() %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return(relations) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.materialization_snapshot_default", "macro.dbt_snowflake.unset_query_tag"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.155338, "supported_languages": ["sql"]}, "macro.dbt_snowflake.snowflake__current_timestamp": {"name": "snowflake__current_timestamp", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp", "macro_sql": "{% macro snowflake__current_timestamp() -%}\n convert_timezone('UTC', current_timestamp())\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.155634, "supported_languages": null}, "macro.dbt_snowflake.snowflake__snapshot_string_as_time": {"name": "snowflake__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_snowflake.snowflake__snapshot_string_as_time", "macro_sql": "{% macro snowflake__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"to_timestamp_ntz('\" ~ timestamp ~ \"')\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.155812, "supported_languages": null}, "macro.dbt_snowflake.snowflake__snapshot_get_time": {"name": "snowflake__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_snowflake.snowflake__snapshot_get_time", "macro_sql": "{% macro snowflake__snapshot_get_time() -%}\n to_timestamp_ntz({{ current_timestamp() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.155916, "supported_languages": null}, "macro.dbt_snowflake.snowflake__current_timestamp_backcompat": {"name": "snowflake__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp_backcompat", "macro_sql": "{% macro snowflake__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1560168, "supported_languages": null}, "macro.dbt_snowflake.snowflake__current_timestamp_in_utc_backcompat": {"name": "snowflake__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro snowflake__current_timestamp_in_utc_backcompat() %}\n convert_timezone('UTC', {{ snowflake__current_timestamp_backcompat() }})::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__current_timestamp_backcompat", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.156158, "supported_languages": null}, "macro.dbt_snowflake.snowflake__escape_single_quotes": {"name": "snowflake__escape_single_quotes", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt_snowflake.snowflake__escape_single_quotes", "macro_sql": "{% macro snowflake__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\", \"\\\\'\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.156348, "supported_languages": null}, "macro.dbt_snowflake.snowflake__get_columns_spec_ddl": {"name": "snowflake__get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/get_columns_spec_ddl.sql", "original_file_path": "macros/utils/get_columns_spec_ddl.sql", "unique_id": "macro.dbt_snowflake.snowflake__get_columns_spec_ddl", "macro_sql": "{% macro snowflake__get_columns_spec_ddl() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set ns = namespace(at_least_one_check=False) -%}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns -%}\n {%- set col = user_provided_columns[i] -%}\n {% set constraints = col['constraints'] -%}\n {%- set ns.at_least_one_check = ns.at_least_one_check or col['constraints_check'] %}\n {{ col['name'] }} {{ col['data_type'] }} {% for x in constraints %} {{ x or \"\" }} {% endfor %} {{ \",\" if not loop.last }}\n {%- endfor %}\n)\n {%- if ns.at_least_one_check -%}\n {{exceptions.warn(\"We noticed you have `constraints_check` configs, these are NOT compatible with Snowflake and will be ignored\")}}\n {%- endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.15738, "supported_languages": null}, "macro.dbt_snowflake.snowflake__right": {"name": "snowflake__right", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt_snowflake.snowflake__right", "macro_sql": "{% macro snowflake__right(string_text, length_expression) %}\n\n case when {{ length_expression }} = 0\n then ''\n else\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n end\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1576152, "supported_languages": null}, "macro.dbt_snowflake.snowflake__safe_cast": {"name": "snowflake__safe_cast", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt_snowflake.snowflake__safe_cast", "macro_sql": "{% macro snowflake__safe_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1577759, "supported_languages": null}, "macro.dbt_snowflake.snowflake__bool_or": {"name": "snowflake__bool_or", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt_snowflake.snowflake__bool_or", "macro_sql": "{% macro snowflake__bool_or(expression) -%}\n\n boolor_agg({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1579032, "supported_languages": null}, "macro.dbt_snowflake.snowflake__array_construct": {"name": "snowflake__array_construct", "resource_type": "macro", "package_name": "dbt_snowflake", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt_snowflake.snowflake__array_construct", "macro_sql": "{% macro snowflake__array_construct(inputs, data_type) -%}\n array_construct( {{ inputs|join(' , ') }} )\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1580749, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.159183, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.15938, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.159521, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.159669, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.15981, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1601999, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.160565, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1608698, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.161333, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.161671, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.165409, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.165587, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.165813, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1665668, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.166733, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1669068, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.168322, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1697412, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.174044, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.174342, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.174509, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.174597, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.17474, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.174855, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.175064, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.175976, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1761742, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1764328, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.176872, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1833339, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1854699, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.185932, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.186239, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1866398, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1870239, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.188704, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.189253, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.189929, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.190165, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.1908858, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.197883, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.19984, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.200109, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.201206, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.201494, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2021499, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.202801, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.203739, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.20399, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.204183, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.204489, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.204683, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.204984, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.205177, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.205514, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.205724, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.205889, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.206192, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.211481, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.217536, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2188249, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.220059, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.220927, "supported_languages": null}, "macro.dbt.get_columns_spec_ddl": {"name": "get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_columns_spec_ddl", "macro_sql": "{%- macro get_columns_spec_ddl() -%}\n {{ adapter.dispatch('get_columns_spec_ddl', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.221768, "supported_languages": null}, "macro.dbt.default__get_columns_spec_ddl": {"name": "default__get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_columns_spec_ddl", "macro_sql": "{% macro default__get_columns_spec_ddl() -%}\n {{ return(columns_spec_ddl()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.221889, "supported_languages": null}, "macro.dbt.columns_spec_ddl": {"name": "columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.columns_spec_ddl", "macro_sql": "{% macro columns_spec_ddl() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {% set col = user_provided_columns[i] %}\n {% set constraints = col['constraints'] %}\n {% set constraints_check = col['constraints_check'] %}\n {{ col['name'] }} {{ col['data_type'] }} {% for x in constraints %} {{ x or \"\" }} {% endfor %} {% if constraints_check -%} check {{ constraints_check or \"\" }} {%- endif %} {{ \",\" if not loop.last }}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2225878, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2227561, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.222889, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- set column_names_config_only = [] -%}\n {%- for i in user_provided_columns -%}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = col['name'] -%}\n {%- set column_names_config_only = column_names_config_only.append(col_name) -%}\n {%- endfor -%}\n {%- set sql_file_provided_columns = get_columns_in_query(sql) -%}\n\n {#- uppercase both schema and sql file columns -#}\n {%- set column_names_config_upper= column_names_config_only|map('upper')|join(',') -%}\n {%- set column_names_config_formatted = column_names_config_upper.split(',') -%}\n {%- set sql_file_provided_columns_upper = sql_file_provided_columns|map('upper')|join(',') -%}\n {%- set sql_file_provided_columns_formatted = sql_file_provided_columns_upper.split(',') -%}\n\n {%- if column_names_config_formatted != sql_file_provided_columns_formatted -%}\n {%- do exceptions.raise_compiler_error('Please ensure the name, order, and number of columns in your `yml` file match the columns in your SQL file.\\nSchema File Columns: ' ~ column_names_config_formatted ~ '\\nSQL File Columns: ' ~ sql_file_provided_columns_formatted ~ ' ' ) %}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.223806, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.226531, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.227159, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.227343, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2277792, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% if config.get('constraints_enabled', False) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.228388, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.23123, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2315829, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2318249, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.23338, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2337549, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.233919, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.234099, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.234365, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.237803, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2432761, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.244194, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.244431, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.244926, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.245127, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.245261, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.24541, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2455292, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.245701, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2458282, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2463412, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.246547, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.247892, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.248306, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2485352, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.249068, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2493281, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2496169, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2500238, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.250278, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.250622, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.250924, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2511759, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.251755, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.253286, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.253868, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.254175, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.256124, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2574122, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.258185, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2585092, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.258771, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.258852, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2591941, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.259362, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2596118, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.259744, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.259997, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.260111, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2604601, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2606301, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.260865, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.260942, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2612119, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.261361, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2616692, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.261817, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.262508, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.263047, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.263419, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2635899, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.263886, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.264023, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.264278, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.264452, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.264703, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2648609, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.265106, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.265208, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.265495, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.26563, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.26587, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2659688, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2669282, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.267082, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.267243, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.267393, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.267617, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.267772, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2679338, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.268121, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2682931, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.268445, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.268607, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.268749, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2689059, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.269048, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.269357, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.269492, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.269737, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.269838, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.270175, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2704482, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.270599, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.271134, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.271298, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.271519, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.271803, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.271936, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2723808, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2726269, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.272908, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.273037, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2734132, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.273599, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.273768, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.273948, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2744472, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.274602, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.274743, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.274845, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.275011, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2750862, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.275248, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_snowflake.snowflake__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.275413, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.275879, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.276014, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.276177, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.276597, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.280242, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2805219, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.280751, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2810612, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2813199, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.281665, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.281858, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.282078, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.282258, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.282429, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.282634, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.282936, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.283187, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2837708, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.284, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.284142, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.28434, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.284789, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.285202, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.286962, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.287074, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.28724, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.287416, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.287756, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2879412, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.288037, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.288287, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.288476, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.288707, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.288893, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.289122, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.289823, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2900128, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.290272, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.290514, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2916732, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2923892, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2925591, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.292765, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2929332, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.293209, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.293775, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2955048, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2957911, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.296, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.296171, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.296364, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.296623, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.296833, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.297151, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2973351, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.2975008, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.299587, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.299766, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.30009, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.300289, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.30065, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.300891, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.301802, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_snowflake.snowflake__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.302069, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.302892, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.3045278, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.305057, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.30571, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = this | string | replace('\"', '\\\\\"') %}\n def __repr__(self):\n return \"{{ this_relation_name }}\"\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.306247, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.3063328, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.3068721, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.307104, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.307402, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1677002900.3076842, "supported_languages": null}}, "docs": {"doc.jaffle_shop.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "jaffle_shop", "path": "overview.md", "original_file_path": "models/overview.md", "unique_id": "doc.jaffle_shop.__overview__", "block_contents": "## Data Documentation for Jaffle Shop\n\n`jaffle_shop` is a fictional ecommerce store.\n\nThis [dbt](https://www.getdbt.com/) project is for testing out code.\n\nThe source code can be found [here](https://github.com/clrcrl/jaffle_shop)."}, "doc.jaffle_shop.orders_status": {"name": "orders_status", "resource_type": "doc", "package_name": "jaffle_shop", "path": "docs.md", "original_file_path": "models/docs.md", "unique_id": "doc.jaffle_shop.orders_status", "block_contents": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {}, "parent_map": {"model.jaffle_shop.customers": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], "model.jaffle_shop.orders": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], "model.jaffle_shop.stg_customers": ["seed.jaffle_shop.raw_customers"], "model.jaffle_shop.stg_payments": ["seed.jaffle_shop.raw_payments"], "model.jaffle_shop.stg_orders": ["seed.jaffle_shop.raw_orders"], "seed.jaffle_shop.raw_customers": [], "seed.jaffle_shop.raw_orders": [], "seed.jaffle_shop.raw_payments": [], "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": ["model.jaffle_shop.customers"], "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": ["model.jaffle_shop.customers"], "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": ["model.jaffle_shop.orders"], "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"], "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_amount.106140f9fd": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": ["model.jaffle_shop.orders"], "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": ["model.jaffle_shop.orders"], "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": ["model.jaffle_shop.stg_customers"], "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": ["model.jaffle_shop.stg_customers"], "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": ["model.jaffle_shop.stg_orders"], "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": ["model.jaffle_shop.stg_payments"]}, "child_map": {"model.jaffle_shop.customers": ["test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1"], "model.jaffle_shop.orders": ["test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3", "test.jaffle_shop.not_null_orders_amount.106140f9fd", "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49", "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625", "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59", "test.jaffle_shop.not_null_orders_customer_id.c5f02694af", "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a", "test.jaffle_shop.not_null_orders_order_id.cf6c17daed", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2", "test.jaffle_shop.unique_orders_order_id.fed79b3a6e"], "model.jaffle_shop.stg_customers": ["model.jaffle_shop.customers", "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa", "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada"], "model.jaffle_shop.stg_payments": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278", "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075", "test.jaffle_shop.unique_stg_payments_payment_id.3744510712"], "model.jaffle_shop.stg_orders": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad", "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64", "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a"], "seed.jaffle_shop.raw_customers": ["model.jaffle_shop.stg_customers"], "seed.jaffle_shop.raw_orders": ["model.jaffle_shop.stg_orders"], "seed.jaffle_shop.raw_payments": ["model.jaffle_shop.stg_payments"], "test.jaffle_shop.unique_customers_customer_id.c5af1ff4b1": [], "test.jaffle_shop.not_null_customers_customer_id.5c9bf9911d": [], "test.jaffle_shop.unique_orders_order_id.fed79b3a6e": [], "test.jaffle_shop.not_null_orders_order_id.cf6c17daed": [], "test.jaffle_shop.not_null_orders_customer_id.c5f02694af": [], "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.c6ec7f58f2": [], "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.be6b5b5ec3": [], "test.jaffle_shop.not_null_orders_amount.106140f9fd": [], "test.jaffle_shop.not_null_orders_credit_card_amount.d3ca593b59": [], "test.jaffle_shop.not_null_orders_coupon_amount.ab90c90625": [], "test.jaffle_shop.not_null_orders_bank_transfer_amount.7743500c49": [], "test.jaffle_shop.not_null_orders_gift_card_amount.413a0d2d7a": [], "test.jaffle_shop.unique_stg_customers_customer_id.c7614daada": [], "test.jaffle_shop.not_null_stg_customers_customer_id.e2cfb1f9aa": [], "test.jaffle_shop.unique_stg_orders_order_id.e3b841c71a": [], "test.jaffle_shop.not_null_stg_orders_order_id.81cfe2fe64": [], "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.080fb20aad": [], "test.jaffle_shop.unique_stg_payments_payment_id.3744510712": [], "test.jaffle_shop.not_null_stg_payments_payment_id.c19cc50075": [], "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.3c3820f278": []}} \ No newline at end of file diff --git a/tests/e2e/fixtures/jaffle_shop/init_target/partial_parse.msgpack b/tests/e2e/fixtures/jaffle_shop/init_target/partial_parse.msgpack new file mode 100644 index 0000000..178bcc4 Binary files /dev/null and b/tests/e2e/fixtures/jaffle_shop/init_target/partial_parse.msgpack differ diff --git a/tests/e2e/fixtures/jaffle_shop/macros/test_macros.sql b/tests/e2e/fixtures/jaffle_shop/macros/test_macros.sql new file mode 100644 index 0000000..d383e09 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/macros/test_macros.sql @@ -0,0 +1,4 @@ +-- Testing purpose +{% macro test_macro(int_value=2) %} + {{ int_value }} + {{var("test_var")}} +{% endmacro %} diff --git a/tests/e2e/fixtures/jaffle_shop/models/customers.sql b/tests/e2e/fixtures/jaffle_shop/models/customers.sql new file mode 100644 index 0000000..4ef7b36 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/customers.sql @@ -0,0 +1,69 @@ +with customers as ( + + select * from {{ ref('stg_customers') }} + +), + +orders as ( + + select * from {{ ref('stg_orders') }} + +), + +payments as ( + + select * from {{ ref('stg_payments') }} + +), + +customer_orders as ( + + select + customer_id, + + min(order_date) + {{ test_macro(2) }} as first_order, + max(order_date) as most_recent_order, + count(order_id) as number_of_orders + from orders + + group by customer_id + +), + +customer_payments as ( + + select + orders.customer_id, + sum(amount) as total_amount + + from payments + + left join orders on + payments.order_id = orders.order_id + + group by orders.customer_id + +), + +final as ( + + select + customers.customer_id, + customers.first_name, + customers.last_name, + customer_orders.first_order, + customer_orders.most_recent_order, + customer_orders.number_of_orders, + customer_payments.total_amount as customer_lifetime_value + + from customers + + left join customer_orders + on customers.customer_id = customer_orders.customer_id + + left join customer_payments + on customers.customer_id = customer_payments.customer_id + +) + +select * from final diff --git a/tests/e2e/fixtures/jaffle_shop/models/docs.md b/tests/e2e/fixtures/jaffle_shop/models/docs.md new file mode 100644 index 0000000..c6ae93b --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/docs.md @@ -0,0 +1,14 @@ +{% docs orders_status %} + +Orders can be one of the following statuses: + +| status | description | +|----------------|------------------------------------------------------------------------------------------------------------------------| +| placed | The order has been placed but has not yet left the warehouse | +| shipped | The order has ben shipped to the customer and is currently in transit | +| completed | The order has been received by the customer | +| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse | +| returned | The order has been returned by the customer and received at the warehouse | + + +{% enddocs %} diff --git a/tests/e2e/fixtures/jaffle_shop/models/orders.sql b/tests/e2e/fixtures/jaffle_shop/models/orders.sql new file mode 100644 index 0000000..cbb2934 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/orders.sql @@ -0,0 +1,56 @@ +{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %} + +with orders as ( + + select * from {{ ref('stg_orders') }} + +), + +payments as ( + + select * from {{ ref('stg_payments') }} + +), + +order_payments as ( + + select + order_id, + + {% for payment_method in payment_methods -%} + sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount, + {% endfor -%} + + sum(amount) as total_amount + + from payments + + group by order_id + +), + +final as ( + + select + orders.order_id, + orders.customer_id, + orders.order_date, + orders.status, + + {% for payment_method in payment_methods -%} + + order_payments.{{ payment_method }}_amount, + + {% endfor -%} + + order_payments.total_amount as amount + + from orders + + + left join order_payments + on orders.order_id = order_payments.order_id + +) + +select * from final diff --git a/tests/e2e/fixtures/jaffle_shop/models/overview.md b/tests/e2e/fixtures/jaffle_shop/models/overview.md new file mode 100644 index 0000000..0544c42 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/overview.md @@ -0,0 +1,11 @@ +{% docs __overview__ %} + +## Data Documentation for Jaffle Shop + +`jaffle_shop` is a fictional ecommerce store. + +This [dbt](https://www.getdbt.com/) project is for testing out code. + +The source code can be found [here](https://github.com/clrcrl/jaffle_shop). + +{% enddocs %} diff --git a/tests/e2e/fixtures/jaffle_shop/models/schema.yml b/tests/e2e/fixtures/jaffle_shop/models/schema.yml new file mode 100644 index 0000000..381349c --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/schema.yml @@ -0,0 +1,82 @@ +version: 2 + +models: + - name: customers + description: This table has basic information about a customer, as well as some derived facts based on a customer's orders + + columns: + - name: customer_id + description: This is a unique identifier for a customer + tests: + - unique + - not_null + + - name: first_name + description: Customer's first name. PII. + + - name: last_name + description: Customer's last name. PII. + + - name: first_order + description: Date (UTC) of a customer's first order + + - name: most_recent_order + description: Date (UTC) of a customer's most recent order + + - name: number_of_orders + description: Count of the number of orders a customer has placed + + - name: total_order_amount + description: Total value (AUD) of a customer's orders + + - name: orders + description: This table has basic information about orders, as well as some derived facts based on payments + + columns: + - name: order_id + tests: + - unique + - not_null + description: This is a unique identifier for an order + + - name: customer_id + description: Foreign key to the customers table + tests: + - not_null + - relationships: + to: ref('customers') + field: customer_id + + - name: order_date + description: Date (UTC) that the order was placed + + - name: status + description: '{{ doc("orders_status") }}' + tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + - name: amount + description: Total amount (AUD) of the order + tests: + - not_null + + - name: credit_card_amount + description: Amount of the order (AUD) paid for by credit card + tests: + - not_null + + - name: coupon_amount + description: Amount of the order (AUD) paid for by coupon + tests: + - not_null + + - name: bank_transfer_amount + description: Amount of the order (AUD) paid for by bank transfer + tests: + - not_null + + - name: gift_card_amount + description: Amount of the order (AUD) paid for by gift card + tests: + - not_null diff --git a/tests/e2e/fixtures/jaffle_shop/models/staging/schema.yml b/tests/e2e/fixtures/jaffle_shop/models/staging/schema.yml new file mode 100644 index 0000000..c207e4c --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/staging/schema.yml @@ -0,0 +1,31 @@ +version: 2 + +models: + - name: stg_customers + columns: + - name: customer_id + tests: + - unique + - not_null + + - name: stg_orders + columns: + - name: order_id + tests: + - unique + - not_null + - name: status + tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + - name: stg_payments + columns: + - name: payment_id + tests: + - unique + - not_null + - name: payment_method + tests: + - accepted_values: + values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] diff --git a/tests/e2e/fixtures/jaffle_shop/models/staging/stg_customers.sql b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_customers.sql new file mode 100644 index 0000000..cad0472 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_customers.sql @@ -0,0 +1,22 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_customers') }} + +), + +renamed as ( + + select + id as customer_id, + first_name, + last_name + + from source + +) + +select * from renamed diff --git a/tests/e2e/fixtures/jaffle_shop/models/staging/stg_orders.sql b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_orders.sql new file mode 100644 index 0000000..a654dcb --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_orders.sql @@ -0,0 +1,23 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_orders') }} + +), + +renamed as ( + + select + id as order_id, + user_id as customer_id, + order_date, + status + + from source + +) + +select * from renamed diff --git a/tests/e2e/fixtures/jaffle_shop/models/staging/stg_payments.sql b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_payments.sql new file mode 100644 index 0000000..700cf7f --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/models/staging/stg_payments.sql @@ -0,0 +1,25 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_payments') }} + +), + +renamed as ( + + select + id as payment_id, + order_id, + payment_method, + + -- `amount` is currently stored in cents, so we convert it to dollars + amount / 100 as amount + + from source + +) + +select * from renamed diff --git a/tests/e2e/fixtures/jaffle_shop/packages.yml b/tests/e2e/fixtures/jaffle_shop/packages.yml new file mode 100644 index 0000000..13605a3 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/packages.yml @@ -0,0 +1,4 @@ +# Testing purpose. +packages: + - package: tnightengale/dbt_meta_testing + version: 0.3.6 diff --git a/tests/e2e/fixtures/jaffle_shop/seeds/raw_customers.csv b/tests/e2e/fixtures/jaffle_shop/seeds/raw_customers.csv new file mode 100644 index 0000000..b3e6747 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/seeds/raw_customers.csv @@ -0,0 +1,101 @@ +id,first_name,last_name +1,Michael,P. +2,Shawn,M. +3,Kathleen,P. +4,Jimmy,C. +5,Katherine,R. +6,Sarah,R. +7,Martin,M. +8,Frank,R. +9,Jennifer,F. +10,Henry,W. +11,Fred,S. +12,Amy,D. +13,Kathleen,M. +14,Steve,F. +15,Teresa,H. +16,Amanda,H. +17,Kimberly,R. +18,Johnny,K. +19,Virginia,F. +20,Anna,A. +21,Willie,H. +22,Sean,H. +23,Mildred,A. +24,David,G. +25,Victor,H. +26,Aaron,R. +27,Benjamin,B. +28,Lisa,W. +29,Benjamin,K. +30,Christina,W. +31,Jane,G. +32,Thomas,O. +33,Katherine,M. +34,Jennifer,S. +35,Sara,T. +36,Harold,O. +37,Shirley,J. +38,Dennis,J. +39,Louise,W. +40,Maria,A. +41,Gloria,C. +42,Diana,S. +43,Kelly,N. +44,Jane,R. +45,Scott,B. +46,Norma,C. +47,Marie,P. +48,Lillian,C. +49,Judy,N. +50,Billy,L. +51,Howard,R. +52,Laura,F. +53,Anne,B. +54,Rose,M. +55,Nicholas,R. +56,Joshua,K. +57,Paul,W. +58,Kathryn,K. +59,Adam,A. +60,Norma,W. +61,Timothy,R. +62,Elizabeth,P. +63,Edward,G. +64,David,C. +65,Brenda,W. +66,Adam,W. +67,Michael,H. +68,Jesse,E. +69,Janet,P. +70,Helen,F. +71,Gerald,C. +72,Kathryn,O. +73,Alan,B. +74,Harry,A. +75,Andrea,H. +76,Barbara,W. +77,Anne,W. +78,Harry,H. +79,Jack,R. +80,Phillip,H. +81,Shirley,H. +82,Arthur,D. +83,Virginia,R. +84,Christina,R. +85,Theresa,M. +86,Jason,C. +87,Phillip,B. +88,Adam,T. +89,Margaret,J. +90,Paul,P. +91,Todd,W. +92,Willie,O. +93,Frances,R. +94,Gregory,H. +95,Lisa,P. +96,Jacqueline,A. +97,Shirley,D. +98,Nicole,M. +99,Mary,G. +100,Jean,M. diff --git a/tests/e2e/fixtures/jaffle_shop/seeds/raw_orders.csv b/tests/e2e/fixtures/jaffle_shop/seeds/raw_orders.csv new file mode 100644 index 0000000..c487062 --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/seeds/raw_orders.csv @@ -0,0 +1,100 @@ +id,user_id,order_date,status +1,1,2018-01-01,returned +2,3,2018-01-02,completed +3,94,2018-01-04,completed +4,50,2018-01-05,completed +5,64,2018-01-05,completed +6,54,2018-01-07,completed +7,88,2018-01-09,completed +8,2,2018-01-11,returned +9,53,2018-01-12,completed +10,7,2018-01-14,completed +11,99,2018-01-14,completed +12,59,2018-01-15,completed +13,84,2018-01-17,completed +14,40,2018-01-17,returned +15,25,2018-01-17,completed +16,39,2018-01-18,completed +17,71,2018-01-18,completed +18,64,2018-01-20,returned +19,54,2018-01-22,completed +20,20,2018-01-23,completed +21,71,2018-01-23,completed +22,86,2018-01-24,completed +23,22,2018-01-26,return_pending +24,3,2018-01-27,completed +25,51,2018-01-28,completed +26,32,2018-01-28,completed +27,94,2018-01-29,completed +28,8,2018-01-29,completed +29,57,2018-01-31,completed +30,69,2018-02-02,completed +31,16,2018-02-02,completed +32,28,2018-02-04,completed +33,42,2018-02-04,completed +34,38,2018-02-06,completed +35,80,2018-02-08,completed +36,85,2018-02-10,completed +37,1,2018-02-10,completed +38,51,2018-02-10,completed +39,26,2018-02-11,completed +40,33,2018-02-13,completed +41,99,2018-02-14,completed +42,92,2018-02-16,completed +43,31,2018-02-17,completed +44,66,2018-02-17,completed +45,22,2018-02-17,completed +46,6,2018-02-19,completed +47,50,2018-02-20,completed +48,27,2018-02-21,completed +49,35,2018-02-21,completed +50,51,2018-02-23,completed +51,71,2018-02-24,completed +52,54,2018-02-25,return_pending +53,34,2018-02-26,completed +54,54,2018-02-26,completed +55,18,2018-02-27,completed +56,79,2018-02-28,completed +57,93,2018-03-01,completed +58,22,2018-03-01,completed +59,30,2018-03-02,completed +60,12,2018-03-03,completed +61,63,2018-03-03,completed +62,57,2018-03-05,completed +63,70,2018-03-06,completed +64,13,2018-03-07,completed +65,26,2018-03-08,completed +66,36,2018-03-10,completed +67,79,2018-03-11,completed +68,53,2018-03-11,completed +69,3,2018-03-11,completed +70,8,2018-03-12,completed +71,42,2018-03-12,shipped +72,30,2018-03-14,shipped +73,19,2018-03-16,completed +74,9,2018-03-17,shipped +75,69,2018-03-18,completed +76,25,2018-03-20,completed +77,35,2018-03-21,shipped +78,90,2018-03-23,shipped +79,52,2018-03-23,shipped +80,11,2018-03-23,shipped +81,76,2018-03-23,shipped +82,46,2018-03-24,shipped +83,54,2018-03-24,shipped +84,70,2018-03-26,placed +85,47,2018-03-26,shipped +86,68,2018-03-26,placed +87,46,2018-03-27,placed +88,91,2018-03-27,shipped +89,21,2018-03-28,placed +90,66,2018-03-30,shipped +91,47,2018-03-31,placed +92,84,2018-04-02,placed +93,66,2018-04-03,placed +94,63,2018-04-03,placed +95,27,2018-04-04,placed +96,90,2018-04-06,placed +97,89,2018-04-07,placed +98,41,2018-04-07,placed +99,85,2018-04-09,placed diff --git a/tests/e2e/fixtures/jaffle_shop/seeds/raw_payments.csv b/tests/e2e/fixtures/jaffle_shop/seeds/raw_payments.csv new file mode 100644 index 0000000..a587baa --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/seeds/raw_payments.csv @@ -0,0 +1,114 @@ +id,order_id,payment_method,amount +1,1,credit_card,1000 +2,2,credit_card,2000 +3,3,coupon,100 +4,4,coupon,2500 +5,5,bank_transfer,1700 +6,6,credit_card,600 +7,7,credit_card,1600 +8,8,credit_card,2300 +9,9,gift_card,2300 +10,9,bank_transfer,0 +11,10,bank_transfer,2600 +12,11,credit_card,2700 +13,12,credit_card,100 +14,13,credit_card,500 +15,13,bank_transfer,1400 +16,14,bank_transfer,300 +17,15,coupon,2200 +18,16,credit_card,1000 +19,17,bank_transfer,200 +20,18,credit_card,500 +21,18,credit_card,800 +22,19,gift_card,600 +23,20,bank_transfer,1500 +24,21,credit_card,1200 +25,22,bank_transfer,800 +26,23,gift_card,2300 +27,24,coupon,2600 +28,25,bank_transfer,2000 +29,25,credit_card,2200 +30,25,coupon,1600 +31,26,credit_card,3000 +32,27,credit_card,2300 +33,28,bank_transfer,1900 +34,29,bank_transfer,1200 +35,30,credit_card,1300 +36,31,credit_card,1200 +37,32,credit_card,300 +38,33,credit_card,2200 +39,34,bank_transfer,1500 +40,35,credit_card,2900 +41,36,bank_transfer,900 +42,37,credit_card,2300 +43,38,credit_card,1500 +44,39,bank_transfer,800 +45,40,credit_card,1400 +46,41,credit_card,1700 +47,42,coupon,1700 +48,43,gift_card,1800 +49,44,gift_card,1100 +50,45,bank_transfer,500 +51,46,bank_transfer,800 +52,47,credit_card,2200 +53,48,bank_transfer,300 +54,49,credit_card,600 +55,49,credit_card,900 +56,50,credit_card,2600 +57,51,credit_card,2900 +58,51,credit_card,100 +59,52,bank_transfer,1500 +60,53,credit_card,300 +61,54,credit_card,1800 +62,54,bank_transfer,1100 +63,55,credit_card,2900 +64,56,credit_card,400 +65,57,bank_transfer,200 +66,58,coupon,1800 +67,58,gift_card,600 +68,59,gift_card,2800 +69,60,credit_card,400 +70,61,bank_transfer,1600 +71,62,gift_card,1400 +72,63,credit_card,2900 +73,64,bank_transfer,2600 +74,65,credit_card,0 +75,66,credit_card,2800 +76,67,bank_transfer,400 +77,67,credit_card,1900 +78,68,credit_card,1600 +79,69,credit_card,1900 +80,70,credit_card,2600 +81,71,credit_card,500 +82,72,credit_card,2900 +83,73,bank_transfer,300 +84,74,credit_card,3000 +85,75,credit_card,1900 +86,76,coupon,200 +87,77,credit_card,0 +88,77,bank_transfer,1900 +89,78,bank_transfer,2600 +90,79,credit_card,1800 +91,79,credit_card,900 +92,80,gift_card,300 +93,81,coupon,200 +94,82,credit_card,800 +95,83,credit_card,100 +96,84,bank_transfer,2500 +97,85,bank_transfer,1700 +98,86,coupon,2300 +99,87,gift_card,3000 +100,87,credit_card,2600 +101,88,credit_card,2900 +102,89,bank_transfer,2200 +103,90,bank_transfer,200 +104,91,credit_card,1900 +105,92,bank_transfer,1500 +106,92,coupon,200 +107,93,gift_card,2600 +108,94,coupon,700 +109,95,coupon,2400 +110,96,gift_card,1700 +111,97,bank_transfer,1400 +112,98,bank_transfer,1000 +113,99,credit_card,2400 diff --git a/tests/e2e/fixtures/jaffle_shop/selectors.yml b/tests/e2e/fixtures/jaffle_shop/selectors.yml new file mode 100644 index 0000000..7228aad --- /dev/null +++ b/tests/e2e/fixtures/jaffle_shop/selectors.yml @@ -0,0 +1,5 @@ +# Testing purpose. +selectors: + - name: test_selector + definition: + 'stg_customers+' diff --git a/tests/e2e/fixtures/profiles/postgres/profiles_template.yml b/tests/e2e/fixtures/profiles/postgres/profiles_template.yml new file mode 100644 index 0000000..02f5735 --- /dev/null +++ b/tests/e2e/fixtures/profiles/postgres/profiles_template.yml @@ -0,0 +1,12 @@ +jaffle_shop: + target: postgres + outputs: + postgres: + type: postgres + database: postgres + host: localhost + user: %USER + password: %PASSWORD + port: %PORT + schema: analytics + threads: 4 diff --git a/tests/e2e/fixtures/profiles/snowflake/profiles.yml b/tests/e2e/fixtures/profiles/snowflake/profiles.yml new file mode 100644 index 0000000..8c32881 --- /dev/null +++ b/tests/e2e/fixtures/profiles/snowflake/profiles.yml @@ -0,0 +1,14 @@ +user: + outputs: + default: + account: testAccount + client_session_keep_alive: false + database: TEST + password: test-password + role: TEST + schema: test_schema + threads: 4 + type: snowflake + user: TEST + warehouse: TEST + target: default diff --git a/tests/e2e/fixtures/profiles/snowflake/profiles_template.yml b/tests/e2e/fixtures/profiles/snowflake/profiles_template.yml new file mode 100644 index 0000000..a749931 --- /dev/null +++ b/tests/e2e/fixtures/profiles/snowflake/profiles_template.yml @@ -0,0 +1,14 @@ +user: + outputs: + default: + account: %ACCOUNT + client_session_keep_alive: false + database: %DATABASE + password: %PASSWORD + role: %ROLE + schema: %SCHEMA + threads: 4 + type: snowflake + user: %USER + warehouse: %WAREHOUSE + target: default diff --git a/tests/e2e/fixtures/test-project/__init__.py b/tests/e2e/fixtures/test-project/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/e2e/fixtures/test-project/dbt_project.yml b/tests/e2e/fixtures/test-project/dbt_project.yml new file mode 100644 index 0000000..1fd5e40 --- /dev/null +++ b/tests/e2e/fixtures/test-project/dbt_project.yml @@ -0,0 +1,27 @@ + +name: 'jaffle_shop' +version: 2.0 + +config-version: 2 +profile: 'user' +source-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +data-paths: ["data"] +macro-paths: ["macros"] + + +target-path: "target" +clean-targets: + - "target" + - "dbt_modules" + - "logs" + +models: + jaffle_shop: + materialized: table + staging: + materialized: view + +vars: + test_var: '2022-03-15T10:00:00' diff --git a/tests/e2e/fixtures/test-project/manifest.msgpack b/tests/e2e/fixtures/test-project/manifest.msgpack new file mode 100644 index 0000000..ed9bc74 Binary files /dev/null and b/tests/e2e/fixtures/test-project/manifest.msgpack differ diff --git a/tests/e2e/helpers.py b/tests/e2e/helpers.py index db09735..45bd7de 100644 --- a/tests/e2e/helpers.py +++ b/tests/e2e/helpers.py @@ -1,17 +1,53 @@ -import contextlib import os -import dbt +from importlib import util +from unittest import TestCase +DBT_POSTGRES_PACKAGE_NAME = "dbt.adapters.postgres" +DBT_SNOWFLAKE_PACKAGE_NAME = "dbt.adapters.snowflake" -@contextlib.contextmanager -def profiles_dir(profiles_yml_dir): - original_value = dbt.flags.PROFILES_DIR - original_env_var = os.environ.get("DBT_PROFILES_DIR", "") - dbt.flags.PROFILES_DIR = profiles_yml_dir - os.environ["DBT_PROFILES_DIR"] = profiles_yml_dir +def set_dbt_working_dir_env(working_dir: str): + os.environ["__DBT_WORKING_DIR"] = working_dir - yield - dbt.flags.PROFILES_DIR = original_value - os.environ["DBT_PROFILES_DIR"] = original_env_var +def set_dbt_profiles_dir_env(profiles_dir: str): + os.environ["DBT_PROFILES_DIR"] = profiles_dir + + +class DbtCoreTestBase(TestCase): + """A base class to setup local dbt core environments and delete after + tests. + Example: + from tests.e2e.fixtures import Profiles + class DerivedTest(DbtCoreTestBase): + def setUp(self) -> None: + self.set_envs("working_dir", Profiles.Postgres) + + def tearDown(self)-> None: + # If you don't explicitly define tearDown in derived class, + # DbtCoreTestBase.tearDown() will executed automatically. + super().tearDown() + """ + + def set_envs(self, working_dir, profiles_dir) -> None: + set_dbt_working_dir_env(working_dir) + set_dbt_profiles_dir_env(profiles_dir) + + def tearDown(self) -> None: + del os.environ["__DBT_WORKING_DIR"] + del os.environ["DBT_PROFILES_DIR"] + + +def _is_packge_installed(package_name: str): + """Returns if `package_name` is installed in python env.""" + return util.find_spec(package_name) is not None + + +def miss_postgres_adaptor_package(): + """Returns true if postgres adaptor isn't installed in python env.""" + return not _is_packge_installed(DBT_POSTGRES_PACKAGE_NAME) + + +def miss_snowflake_adaptor_package(): + """Returns true if snowflake adaptor isn't installed in python env.""" + return not _is_packge_installed(DBT_SNOWFLAKE_PACKAGE_NAME) diff --git a/tests/e2e/local_run.py b/tests/e2e/local_run.py new file mode 100644 index 0000000..d050055 --- /dev/null +++ b/tests/e2e/local_run.py @@ -0,0 +1,67 @@ +# It's pretty similar to smoke_test.py that it can sends commands to your local +# dbt-server for testing purpose. +# +# Example run: +# # Under root directory. +# python3 -m tests.e2e.local_run --local_task_db_path=working-dir/sql_app.db \ +# --dbt_local_server_port=8580 --dbt_project_path=tests/e2e/fixtures/jaffle_shop +# +from absl import app +from absl import flags +import logging +from dbt_server.models import TaskState +from tests.e2e.smoke_test_utils import DbtServerSmokeTest +from tests.e2e.smoke_test_utils import read_testcase_file + +flags.DEFINE_integer( + "command_exec_timeout_seconds", + 60, + "How many seconds shall we wait after command is issued. Test will fail if" + "timeout happens.", +) + +flags.DEFINE_integer( + "dbt_local_server_port", 8580, "Dbt local server port for testing." +) + +flags.DEFINE_string( + "local_task_db_path", + None, + "A local path point to dbt-server sqlite db file, task status will be " "checked.", +) + +flags.DEFINE_string( + "testcase_path", "tests/e2e/testcases/local_run.txt", "Testcase file path." +) + +flags.DEFINE_string("dbt_project_path", None, "Dbt project path.") + +flags.mark_flag_as_required("local_task_db_path") +flags.mark_flag_as_required("dbt_project_path") + + +def main(argv): + del argv # Unused. + dbt_local_server_port = flags.FLAGS.dbt_local_server_port + local_task_db_path = flags.FLAGS.local_task_db_path + dbt_project_path = flags.FLAGS.dbt_project_path + testcase_path = flags.FLAGS.testcase_path + command_exec_timeout_seconds = flags.FLAGS.command_exec_timeout_seconds + + smoke_test = DbtServerSmokeTest( + dbt_project_path, dbt_local_server_port, local_task_db_path + ) + # Manually parse the project first. + smoke_test.parse() + commands = read_testcase_file(testcase_path) + for command in commands: + try: + smoke_test.run_async_testcase( + command.split(), TaskState.FINISHED, command_exec_timeout_seconds + ) + except Exception as e: + logging.error(str(e)) + + +if __name__ == "__main__": + app.run(main) diff --git a/tests/e2e/smoke_test.py b/tests/e2e/smoke_test.py new file mode 100644 index 0000000..6de5909 --- /dev/null +++ b/tests/e2e/smoke_test.py @@ -0,0 +1,285 @@ +# Script that sends http request to local dbt-server for testing purpose. +# - It will setup the dbt-server and dbt project, user needs to install correct +# dbt-core package, adaptor packages and environment(e.g. local postgres db). +# - Environment variables of data warehouse connection are required to run +# tests, if not specified the test will be skipped. See test comment below to +# check required env vars. +# - Requests are sent one by one, it won't touch concurrency issue. +# TODO(dichen): switch to async end point for parse. +# TODO(dichen): until bug fixed, we must run dbt deps before sending init parse +# command, hence for short term, we checked in dbt_packages fixtures but we +# should remove them later. +# What can be done in future? +# - More commands for other use cases. +# - Switch parse endpoint to async endpoint. +# - Test other endpoints, e.g sync endpoint. +# +# Example run: +# # Under root directory. +# pytest -v tests/e2e/smoke_test.py + +from dbt_server.models import TaskState +import logging +import os +from os import path +import pytest +from shutil import rmtree +from tempfile import TemporaryDirectory +from tests.e2e.smoke_test_utils import copy_jaffle_shop_fixture +from tests.e2e.smoke_test_utils import DbtServerSmokeTest +from tests.e2e.smoke_test_utils import TESTING_FIXTURE +from tests.e2e.smoke_test_utils import parse_placeholder_string +from tests.e2e.smoke_test_utils import replace_placeholders +from tests.e2e.smoke_test_utils import read_testcase_file +from time import time +from time import sleep +from threading import Thread +from unittest import TestCase +import uvicorn + +# +# Required env vars for different adaptors. If env vars are missing for specific +# adaptor, tests will be skipped. +# + +POSTGRES_PROFILE_PLACEHOLDER_REFERENCE = { + "%USER": os.getenv("SMOKE_TEST_POSTGRES_USER", ""), + "%PASSWORD": os.getenv("SMOKE_TEST_POSTGRES_PASSWORD", ""), + "%PORT": os.getenv("SMOKE_TEST_POSTGRES_PORT", "5432"), +} + +SNOWFLAKE_PROFILE_PLACEHOLDER_REFERENCE = { + "%ACCOUNT": os.getenv("SMOKE_TEST_SNOWFLAKE_ACCOUNT", ""), + "%DATABASE": os.getenv("SMOKE_TEST_SNOWFLAKE_DATABASE", ""), + "%PASSWORD": os.getenv("SMOKE_TEST_SNOWFLAKE_PASSWORD", ""), + "%ROLE": os.getenv("SMOKE_TEST_SNOWFLAKE_ROLE", ""), + "%SCHEMA": os.getenv("SMOKE_TEST_SNOWFLAKE_SCHEMA", ""), + "%USER": os.getenv("SMOKE_TEST_SNOWFLAKE_USER", ""), + "%WAREHOUSE": os.getenv("SMOKE_TEST_SNOWFLAKE_WAREHOUSE", ""), +} + + +# State directory has manifest.json file to be used as --state. +JAFFLE_SHOP_STATE_DIR = "init_target" +PROFILE_YML = "profiles.yml" +POSTGRES_PROFILES_TEMPLATE_PATH = "profiles/postgres/profiles_template.yml" +SNOWFLAKE_PROFILES_TEMPLATE_PATH = "profiles/snowflake/profiles_template.yml" + +TEST_LOCAL_DBT_SERVER_PORT = 8580 +TEST_LOCAL_TASK_DB_PATH = "working-dir/sql_app.db" +TEST_LOCAL_STATE_ID_FILE = "working-dir/latest-state-id.txt" +TEST_LOCAL_PROJECT_PATH_FILE = "working-dir/latest-project-path.txt" + +DBT_SERVER_WAIT_SECONDS = 5 + + +dbt_server_start_timestamp_seconds = None + + +def _start_dbt_server(): + """Starts dbt server locally.""" + # If state file or project path file exists, dbt-server will try to + # initialize local manifest cache, it may cause dbt-server crash. + # We are not able to config those file pathes hence just delete them + # now. + # TODO(dichen): consider introduce flag to control those file pathes. + if os.path.isfile(TEST_LOCAL_STATE_ID_FILE): + os.remove(TEST_LOCAL_STATE_ID_FILE) + if os.path.isfile(TEST_LOCAL_PROJECT_PATH_FILE): + os.remove(TEST_LOCAL_PROJECT_PATH_FILE) + # Store start timestamps to make sure server is ready before sending out + # any commands. + global dbt_server_start_timestamp_seconds + dbt_server_start_timestamp_seconds = time() + logging.info(f"Start dbt-server locally, port = {TEST_LOCAL_DBT_SERVER_PORT}") + uvicorn.run( + "dbt_server.server:app", port=TEST_LOCAL_DBT_SERVER_PORT, loop="asyncio" + ) + + +def start_dbt_server() -> None: + Thread(target=_start_dbt_server, daemon=True).start() + + +start_dbt_server() + + +class TestJaffleShopBase(TestCase): + def wait_dbt_server(self): + """Waits until dbt server is ready. It will block current working + threads.""" + logging.info("Wait dbt-server setup.") + while True: + now = time() + global dbt_server_start_timestamp_seconds + if ( + dbt_server_start_timestamp_seconds + and now > dbt_server_start_timestamp_seconds + DBT_SERVER_WAIT_SECONDS + ): + logging.info("Dbt-server is ready.") + return + sleep(DBT_SERVER_WAIT_SECONDS) + + def materialize_profiles_yml(self) -> None: + """Materializes profiles.yml in testing folder. + Derived class should override this function.""" + raise NotImplementedError("Not implemented.") + + def setUp(self) -> None: + self.temp_dir = TemporaryDirectory().name + copy_jaffle_shop_fixture(self.temp_dir) + self.materialize_profiles_yml() + self.smoke_test = DbtServerSmokeTest( + self.temp_dir, TEST_LOCAL_DBT_SERVER_PORT, TEST_LOCAL_TASK_DB_PATH + ) + self.wait_dbt_server() + + def write_profile(self, profile_content: str): + """Writes `profile_content` to profile path.""" + with open(path.join(self.temp_dir, PROFILE_YML), "w") as output_file: + output_file.write(profile_content) + + def tearDown(self) -> None: + rmtree(self.temp_dir) + + def get_jaffle_shop_override_placeholder_reference(self): + """Returns common placeholder refenrece for jaffle shop testing project.""" + return { + "%STATE_DIR": f"{self.temp_dir}/{JAFFLE_SHOP_STATE_DIR}", + "%SELECTOR": "test_selector", + "%VARIABLE": "test_var: 1", + "%MODEL": "customers", + "%MACRO_NAME": "test_macro", + "%MACRO_ARGS": "int_value: 1", + } + + +@pytest.mark.skipif( + "" in POSTGRES_PROFILE_PLACEHOLDER_REFERENCE.values(), + reason=f"""Smoke test for postgres adaptor requires env vars set for placeholders = { + str(list(POSTGRES_PROFILE_PLACEHOLDER_REFERENCE.keys()))}""", +) +class TestJaffleShopPostgresBase(TestJaffleShopBase): + def materialize_profiles_yml(self) -> None: + with open( + path.join(TESTING_FIXTURE, POSTGRES_PROFILES_TEMPLATE_PATH), "r" + ) as template_file: + self.write_profile( + parse_placeholder_string( + POSTGRES_PROFILE_PLACEHOLDER_REFERENCE, template_file.read() + ) + ) + + def setUp(self) -> None: + super().setUp() + # Trigger parse command to setup project path. + self.smoke_test.parse(self.temp_dir) + + def tearDown(self) -> None: + super().tearDown() + + +@pytest.mark.skipif( + "" in SNOWFLAKE_PROFILE_PLACEHOLDER_REFERENCE.values(), + reason=f"""Smoke test for snowflake adaptor requires env vars set for placeholders = { + str(list(SNOWFLAKE_PROFILE_PLACEHOLDER_REFERENCE.keys()))}""", +) +class TestJaffleShopSnowflakeBase(TestJaffleShopBase): + def materialize_profiles_yml(self) -> None: + with open( + path.join(TESTING_FIXTURE, SNOWFLAKE_PROFILES_TEMPLATE_PATH), "r" + ) as template_file: + self.write_profile( + parse_placeholder_string( + SNOWFLAKE_PROFILE_PLACEHOLDER_REFERENCE, template_file.read() + ) + ) + + def setUp(self) -> None: + super().setUp() + # Trigger parse command to setup project path. + self.smoke_test.parse(self.temp_dir) + + def tearDown(self) -> None: + super().tearDown() + + +@pytest.mark.skip("Test is slow. If you want to manually run, comment pytest " "mark.") +# TODO(dichen): Consider add pytest mark to testcases so we can group them +# together. +class TestIde(TestJaffleShopBase): + IDE_COMMAND_SUCCESS_FILE = "tests/e2e/testcases/ide_commands.txt" + IDE_COMMAND_FAILURE_FILE = "tests/e2e/testcases/ide_commands_failure.txt" + + def _test_success(self) -> None: + success_commands = read_testcase_file(self.IDE_COMMAND_SUCCESS_FILE) + for command in success_commands: + command_list = command.split() + replace_placeholders( + self.get_jaffle_shop_override_placeholder_reference(), command_list + ) + self.smoke_test.run_async_testcase(command_list, TaskState.FINISHED) + + def _test_error(self) -> None: + failure_command = read_testcase_file(self.IDE_COMMAND_FAILURE_FILE) + for command in failure_command: + command_list = command.split() + replace_placeholders( + self.get_jaffle_shop_override_placeholder_reference(), command_list + ) + self.smoke_test.run_async_testcase(command_list, TaskState.ERROR) + + +class TestIdePostgres(TestJaffleShopPostgresBase, TestIde): + def test_success(self) -> None: + self._test_success() + + def test_error(self) -> None: + self._test_error() + + +class TestIdeSnowflake(TestJaffleShopSnowflakeBase, TestIde): + def test_success(self) -> None: + self._test_success() + + def test_error(self) -> None: + self._test_error() + + +class TestSimple(TestJaffleShopBase): + SIMPLE_COMMAND_SUCCESS_FILE = "tests/e2e/testcases/simple_commands.txt" + SIMPLE_COMMAND_FAILURE_FILE = "tests/e2e/testcases/simple_commands_failure.txt" + + def _test_success(self) -> None: + success_commands = read_testcase_file(self.SIMPLE_COMMAND_SUCCESS_FILE) + for command in success_commands: + command_list = command.split() + replace_placeholders( + self.get_jaffle_shop_override_placeholder_reference(), command_list + ) + self.smoke_test.run_async_testcase(command_list, TaskState.FINISHED) + + def _test_error(self) -> None: + failure_command = read_testcase_file(self.SIMPLE_COMMAND_FAILURE_FILE) + for command in failure_command: + command_list = command.split() + replace_placeholders( + self.get_jaffle_shop_override_placeholder_reference(), command_list + ) + self.smoke_test.run_async_testcase(command_list, TaskState.ERROR) + + +class TestSimplePostgres(TestJaffleShopPostgresBase, TestSimple): + def test_success(self) -> None: + self._test_success() + + def test_error(self) -> None: + self._test_error() + + +class TestSimpleSnowflake(TestJaffleShopSnowflakeBase, TestSimple): + def test_success(self) -> None: + self._test_success() + + def test_error(self) -> None: + self._test_error() diff --git a/tests/e2e/smoke_test_utils.py b/tests/e2e/smoke_test_utils.py new file mode 100644 index 0000000..a16a7a5 --- /dev/null +++ b/tests/e2e/smoke_test_utils.py @@ -0,0 +1,185 @@ +from dbt_server.models import Task +from dbt_server.models import TaskState +from sqlalchemy import create_engine +from sqlalchemy import select +import logging +from requests import post +from shutil import copytree +from os import path +from tests.e2e.helpers import set_dbt_profiles_dir_env +from time import time +from time import sleep +from typing import List + +LOCAL_URL = "http://127.0.0.1" +# How long we will get task status from task db periodically. +POLLING_SECONDS = 1 + +TESTING_FIXTURE = "tests/e2e/fixtures" +JAFFLE_SHOP_DIR = "jaffle_shop" + +ASYNC_DBT_URL = "async/dbt" + + +class DbtServerSmokeTest: + """DbtServerSmokeTest provides testing helpers interacting and verifying + dbt-server for caller. + + This class is thread-safe.""" + + def __init__( + self, dbt_project_dir: str, dbt_local_server_port: int, local_task_db_path: str + ): + """Initializes smoke test environment. + + Args: + dbt_project_dir: directory to dbt project. + dbt_local_server_port: local dbt-server port. + local_task_db_path: path to local task sqlite db file.""" + self.dbt_project_dir = dbt_project_dir + self.dbt_local_server_port = dbt_local_server_port + self.task_db_engine = create_engine( + f"sqlite:///{local_task_db_path}", + connect_args={"check_same_thread": False}, + ) + + def post_request(self, url_path: str, body_obj: dict) -> dict: + """Sends a http post request to local dbt server with given `url_path` + and json `body_obj`, the content type will always be json. Returns + http response json body. + + Args: + url_path: string of local dbt server url, DON'T add prefix /. + Correct example: async/get. + body_obj: dict of json body. + + Raises: + Exception: if response status code is not 200.""" + url = f"{LOCAL_URL}:{self.dbt_local_server_port}/{url_path}" + resp = post(url, json=body_obj) + if resp.status_code != 200: + logging.error(f"Request to {url_path} failed {resp.status_code}.") + logging.error(f"body = {str(body_obj)}") + raise Exception(f"Request failure with code = {resp.status_code}") + return resp.json() + + def parse(self, profile_dir: str = None) -> None: + """Sends a http post request to make dbt server parse dbt project. + + Args: + profile_dir: string to directory of profiles.yml, if it's set env + var will be set and dbt-server will try to get profiles from + that. + + Raises: + Exception: if response status code is not 200.""" + if profile_dir: + set_dbt_profiles_dir_env(profile_dir) + self.post_request("parse", {"project_path": self.dbt_project_dir}) + + def wait_async_exec( + self, task_id: str, command_exec_timeout_seconds: int = 60 + ) -> TaskState: + """Waits task with `task_id` to be finished. Returns task final state. + Raises Exception if task is not finished after given timeout config. + + Args: + task_id: string of task id, we will lookup task status in task db + according to task_id. + command_exec_timeout_seconds: timeout seconds of each command.""" + start_timestamp_seconds = time() + while time() < start_timestamp_seconds + command_exec_timeout_seconds: + stmt = select(Task).where(Task.task_id == task_id) + with self.task_db_engine.connect() as conn: + tasks = list(conn.execute(stmt)) + if len(tasks) == 1 and tasks[0].state in [ + TaskState.FINISHED, + TaskState.ERROR, + ]: + return tasks[0].state + sleep(POLLING_SECONDS) + raise Exception( + f"Task {task_id} is not finished after {command_exec_timeout_seconds}s" + ) + + def run_async_testcase( + self, + command_list: List[str], + expected_db_task_status: TaskState, + command_exec_timeout_seconds: int = 60, + ) -> None: + """Sends post request to async endpoints with `command_list` as request + body. If execution is timeout after `command_exec_timeout_seconds` + seconds or task status is not `expected_db_task_status` or local log + file doesn't exist but status is finished, raises Exceptions. + Args: + command_list: list string of commands, e.g. [build, -h]. + expected_db_task_status: task state we expect. + command_exec_timeout_seconds: timeout seconds of each command.""" + logging.info( + f"Start async test case {str(command_list)}, expect {expected_db_task_status}" + ) + resp = self.post_request(ASYNC_DBT_URL, {"command": command_list}) + task_id = resp["task_id"] + task_status = self.wait_async_exec(task_id, command_exec_timeout_seconds) + if task_status != expected_db_task_status: + raise Exception( + f'Error task_id={task_id}, status != {expected_db_task_status}, error = {resp["error"]}' + ) + # Ensure log file is created when task is finished but won't check + # details. + if task_status == TaskState.FINISHED and not path.isfile( + path.join(self.dbt_project_dir, resp["log_path"]) + ): + raise Exception(f"Can't find log file for task_id={task_id}") + + +def copy_jaffle_shop_fixture(dir: str): + """Copy jaffle shop test fixture to `dir`.""" + copytree(path.join(TESTING_FIXTURE, JAFFLE_SHOP_DIR), dir, dirs_exist_ok=True) + + +def read_testcase_file(path: str) -> List[str]: + """Reads testcase file and filter out empty line or starts with # which + is comment.""" + with open(path, "r") as f: + return list( + filter( + lambda line: (not line.startswith("#")) and line, + [line.strip() for line in f.readlines()], + ) + ) + + +def parse_placeholder_string(placeholder_reference: dict, input_string: str) -> str: + """Parses input `input_string` based on `placeholder_reference`, + return parsed string. + + Args: + placeholder_reference: key is placeholder name, value is replaced value, + input string will be replaced with value according to this. + input_string: string that may have placeholder. + + Example: + parse_placeholder_string({"%key": "value"}, "command %key") will return + "command value".""" + for placeholder, value in placeholder_reference.items(): + input_string = input_string.replace(placeholder, value) + return input_string + + +def replace_placeholders( + placeholder_reference: dict, input_string_list: List[str] +) -> None: + """Similar to parse_placeholder_string, replaces input `input_string_list` + based on `placeholder_reference`. + + Example: + parse_placeholder_string({"%key": "value"}, ["command", "%key"]) will + make input_string_list = ["command", "value"].""" + + for index in range(len(input_string_list)): + for placeholder, value in placeholder_reference.items(): + input_string_list[index] = input_string_list[index].replace( + placeholder, value + ) diff --git a/tests/e2e/test_compilation.py b/tests/e2e/test_compilation.py index 5ca0579..702561e 100644 --- a/tests/e2e/test_compilation.py +++ b/tests/e2e/test_compilation.py @@ -1,31 +1,48 @@ import re from fastapi.testclient import TestClient -from unittest import TestCase from dbt_server.server import app -from .helpers import profiles_dir -from .fixtures import simple, simple2, invalid, Profiles +from tests.e2e.fixtures import simple, simple2, invalid, Profiles +from tests.e2e.helpers import DbtCoreTestBase +from tests.e2e.helpers import miss_postgres_adaptor_package import hashlib import json +import pytest import tempfile -import os client = TestClient(app) +# Match profile file. +TEST_PROFILE = "user" -class ManifestBuildingTestCase(TestCase): - def setUp(self): + +@pytest.mark.skipif( + miss_postgres_adaptor_package(), reason="This test requires dbt-postgres installed." +) +class ManifestBuildingTestBase(DbtCoreTestBase): + """ManifestBuildingTestBase provides helper function API parse, compile, + push endpoints functionality with predefined profiles files in real + environment. + Notice: you need to install dbt-postgres package to run this test + successfully. + """ + + def setUp(self, profiles_dir): # Override working-dir path to keep things clean in dev... self.temp_dir = tempfile.TemporaryDirectory() - os.environ["__DBT_WORKING_DIR"] = self.temp_dir.name + self.set_envs(self.temp_dir.name, profiles_dir) def tearDown(self): + super().tearDown() self.temp_dir.cleanup() - del os.environ["__DBT_WORKING_DIR"] - @classmethod - def push_fixture_data(cls, file_dict): + def push_fixture_data(self, file_dict): + """Calls dbt server push end point to push `file_dict`. + + Args: + file_dict: key is the file path, value is file content. + """ manifest = { key: { "contents": value, @@ -42,20 +59,24 @@ def push_fixture_data(cls, file_dict): return response - @classmethod - def parse_fixture_data(cls, state_id): + def parse_fixture_data(self, profile, state_id): + """Calls dbt server parse end point to parse fixture data using + `profile` specified by `state_id` which is pushed already. + """ response = client.post( "/parse", json={ "state_id": state_id, - "profile": "user", + "profile": profile, }, ) return response - @classmethod - def compile_against_state(cls, state_id, sql): + def compile_against_state(self, state_id, sql): + """Calls dbt server compile end point to compile given `sql` according + to fixture specified by `state_id`. + """ response = client.post( "/compile", json={ @@ -67,26 +88,24 @@ def compile_against_state(cls, state_id, sql): return response -class ValidManifestBuildingTestCase(ManifestBuildingTestCase): +class TestManifestBuildingPostgres(ManifestBuildingTestBase): def setUp(self): - super().setUp() - # Stub out profiles.yml file - with profiles_dir(Profiles.Postgres): - # Push project code - resp_push = self.push_fixture_data(simple.FILES) - self.assertEqual(resp_push.status_code, 200) - data = resp_push.json() - self.state_id = data["state"] - - # parse project code - resp_parse = self.parse_fixture_data(self.state_id) - self.assertEqual(resp_parse.status_code, 200) + super().setUp(Profiles.Postgres) + + # Push project code + resp_push = self.push_fixture_data(simple.FILES) + self.assertEqual(resp_push.status_code, 200) + data = resp_push.json() + self.state_id = data["state"] + + # parse project code + resp_parse = self.parse_fixture_data(TEST_PROFILE, self.state_id) + self.assertEqual(resp_parse.status_code, 200) def test_valid_query(self): # Compile a query with state valid_query = "select {{ 1 + 1 }}" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, valid_query) + resp = self.compile_against_state(self.state_id, valid_query) data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(data["compiled_code"], "select 2") @@ -94,8 +113,7 @@ def test_valid_query(self): def test_valid_query_implicit_state(self): # Compile a query with implicit latest state valid_query = "select {{ 2 + 2 }}" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(None, valid_query) + resp = self.compile_against_state(None, valid_query) data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(data["compiled_code"], "select 4") @@ -103,8 +121,7 @@ def test_valid_query_implicit_state(self): def test_valid_model_reference(self): # Compile a query which results in a dbt compilation error valid_query = "select * from {{ ref('model_1') }}" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, valid_query) + resp = self.compile_against_state(self.state_id, valid_query) data = resp.json() self.assertEqual(resp.status_code, 200) compiled = 'select * from "analytics"."analytics"."model_1"' @@ -113,8 +130,7 @@ def test_valid_model_reference(self): def test_invalid_query_python_error(self): # Compile a query which results in a python error invalid_query = "select {{ 1 / 0 }}" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, invalid_query) + resp = self.compile_against_state(self.state_id, invalid_query) data = resp.json() self.assertEqual(resp.status_code, 400) self.assertEqual(data["message"], "division by zero") @@ -122,8 +138,7 @@ def test_invalid_query_python_error(self): def test_invalid_query_dbt_compilation_error(self): # Compile a query which results in a dbt compilation error invalid_query = "select * from {{ ref('not_a_model') }}" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, invalid_query) + resp = self.compile_against_state(self.state_id, invalid_query) data = resp.json() self.assertEqual(resp.status_code, 400) assert bool(re.match("compilation error", data["message"], re.I)) @@ -131,8 +146,7 @@ def test_invalid_query_dbt_compilation_error(self): def test_valid_query_call_macro(self): # Compile a query that calls a dbt user-space macro valid_macro_query = "select '{{ my_new_project.my_macro('josh wills') }}'" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, valid_macro_query) + resp = self.compile_against_state(self.state_id, valid_macro_query) self.assertEqual(resp.status_code, 200) data = resp.json() assert "compiled_code" in data @@ -140,8 +154,7 @@ def test_valid_query_call_macro(self): def test_invalid_query_call_macro(self): valid_macro_query = "select '{{ my_macro(unexpected=true) }}'" - with profiles_dir(Profiles.Postgres): - resp = self.compile_against_state(self.state_id, valid_macro_query) + resp = self.compile_against_state(self.state_id, valid_macro_query) self.assertEqual(resp.status_code, 400) data = resp.json() self.maxDiff = None @@ -158,24 +171,26 @@ def test_cached_compilation(self): # # This test ensures that this property is accessible on the cached # manifest - with profiles_dir(Profiles.Postgres): - resp_push = self.push_fixture_data(simple.FILES) - self.assertEqual(resp_push.status_code, 200) - data = resp_push.json() - state_id = data["state"] + resp_push = self.push_fixture_data(simple.FILES) + self.assertEqual(resp_push.status_code, 200) + data = resp_push.json() + state_id = data["state"] - resp_parse = self.parse_fixture_data(state_id) - self.assertEqual(resp_parse.status_code, 200) + resp_parse = self.parse_fixture_data(TEST_PROFILE, state_id) + self.assertEqual(resp_parse.status_code, 200) - valid_macro_query = "select '{{ graph.nodes.values() }}'" - resp = self.compile_against_state(state_id, valid_macro_query) + valid_macro_query = "select '{{ graph.nodes.values() }}'" + resp = self.compile_against_state(state_id, valid_macro_query) + + self.assertEqual(resp.status_code, 200) + data = resp.json() + assert "compiled_code" in data - self.assertEqual(resp.status_code, 200) - data = resp.json() - assert "compiled_code" in data +class CodeChangeTestCase(ManifestBuildingTestBase): + def setUp(self): + super().setUp(Profiles.Postgres) -class CodeChangeTestCase(ManifestBuildingTestCase): def test_changing_code(self): """ This test exists to ensure that manifest/config caching does not prevent callers @@ -183,81 +198,81 @@ def test_changing_code(self): While only one of these states will be cached in memory at each time, callers should be able to compile queries against a state of their choosing in arbitrary order. """ - with profiles_dir(Profiles.Postgres): - # Push project code (first project) - resp_push = self.push_fixture_data(simple.FILES) - self.assertEqual(resp_push.status_code, 200) - data = resp_push.json() - state_id_1 = data["state"] - - # parse project code - resp_parse = self.parse_fixture_data(state_id_1) - self.assertEqual(resp_parse.status_code, 200) - - # Compile a query with state - valid_query = "select * from {{ ref('model_1') }}" - resp = self.compile_against_state(state_id_1, valid_query) - data = resp.json() - self.assertEqual(resp.status_code, 200) - self.assertEqual( - data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' - ) - - # ------- reparse with different code -------# - - # Push project code (second project) - resp_push = self.push_fixture_data(simple2.FILES) - self.assertEqual(resp_push.status_code, 200) - data = resp_push.json() - state_id_2 = data["state"] - - # parse project code - resp_parse = self.parse_fixture_data(state_id_2) - self.assertEqual(resp_parse.status_code, 200) - - # Compile a query with state - valid_query = "select * from {{ ref('model_1') }}" - resp = self.compile_against_state(state_id_2, valid_query) - data = resp.json() - self.assertEqual(resp.status_code, 200) - self.assertEqual( - data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' - ) - - assert state_id_1 != state_id_2 - - # ------- compile with initial state-------# - - valid_query = "select * from {{ ref('model_1') }}" - resp = self.compile_against_state(state_id_1, valid_query) - data = resp.json() - self.assertEqual(resp.status_code, 200) - self.assertEqual( - data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' - ) - - -class InvalidManifestBuildingTestCase(ManifestBuildingTestCase): + # Push project code (first project) + resp_push = self.push_fixture_data(simple.FILES) + self.assertEqual(resp_push.status_code, 200) + data = resp_push.json() + state_id_1 = data["state"] + + # parse project code + resp_parse = self.parse_fixture_data(TEST_PROFILE, state_id_1) + self.assertEqual(resp_parse.status_code, 200) + + # Compile a query with state + valid_query = "select * from {{ ref('model_1') }}" + resp = self.compile_against_state(state_id_1, valid_query) + data = resp.json() + self.assertEqual(resp.status_code, 200) + self.assertEqual( + data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' + ) + + # ------- reparse with different code -------# + + # Push project code (second project) + resp_push = self.push_fixture_data(simple2.FILES) + self.assertEqual(resp_push.status_code, 200) + data = resp_push.json() + state_id_2 = data["state"] + + # parse project code + resp_parse = self.parse_fixture_data(TEST_PROFILE, state_id_2) + self.assertEqual(resp_parse.status_code, 200) + + # Compile a query with state + valid_query = "select * from {{ ref('model_1') }}" + resp = self.compile_against_state(state_id_2, valid_query) + data = resp.json() + self.assertEqual(resp.status_code, 200) + self.assertEqual( + data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' + ) + + assert state_id_1 != state_id_2 + + # ------- compile with initial state-------# + + valid_query = "select * from {{ ref('model_1') }}" + resp = self.compile_against_state(state_id_1, valid_query) + data = resp.json() + self.assertEqual(resp.status_code, 200) + self.assertEqual( + data["compiled_code"], 'select * from "analytics"."analytics"."model_1"' + ) + + +class InvalidManifestBuildingTestCase(ManifestBuildingTestBase): + def setUp(self): + super().setUp(Profiles.Postgres) + def test_compilation_with_invalid_manifest(self): - # Stub out profiles.yml file - with profiles_dir(Profiles.Postgres): - # Push project code - resp_push = self.push_fixture_data(invalid.FILES) - self.assertEqual(resp_push.status_code, 200) - data = resp_push.json() - state_id = data["state"] - - # parse project code - resp_parse = self.parse_fixture_data(state_id) - - self.assertEqual(resp_parse.status_code, 400) - data = resp_parse.json() - self.assertTrue(bool(re.match("compilation error", data["message"], re.I))) - - valid_query = "select {{ 1 + 1 }}" - resp = self.compile_against_state(state_id, valid_query) - data = resp.json() - self.assertEqual(resp.status_code, 422) - self.assertTrue( - data["message"].startswith("[Errno 2] No such file or directory") - ) + # Push project code + resp_push = self.push_fixture_data(invalid.FILES) + self.assertEqual(resp_push.status_code, 200) + data = resp_push.json() + state_id = data["state"] + + # parse project code + resp_parse = self.parse_fixture_data(TEST_PROFILE, state_id) + + self.assertEqual(resp_parse.status_code, 400) + data = resp_parse.json() + self.assertTrue(bool(re.match("compilation error", data["message"], re.I))) + + valid_query = "select {{ 1 + 1 }}" + resp = self.compile_against_state(state_id, valid_query) + data = resp.json() + self.assertEqual(resp.status_code, 422) + self.assertTrue( + data["message"].startswith("[Errno 2] No such file or directory") + ) diff --git a/tests/e2e/test_state.py b/tests/e2e/test_state.py new file mode 100644 index 0000000..496a2e8 --- /dev/null +++ b/tests/e2e/test_state.py @@ -0,0 +1,46 @@ +import shutil +import tempfile + +from dbt_server.state import StateController, LAST_PARSED +from dbt_server.views import DbtCommandArgs +from tests.e2e.helpers import DbtCoreTestBase +from tests.e2e.fixtures import Profiles +import pytest +from tests.e2e.helpers import miss_snowflake_adaptor_package + + +@pytest.mark.skipif( + miss_snowflake_adaptor_package(), + reason="This test requires dbt-snowflake installed.", +) +class StateControllerTestCase(DbtCoreTestBase): + """ + Full functionality test class using a real dbt project manifest + """ + + def setUp(self): + self.temp_dir = tempfile.TemporaryDirectory() + self.set_envs(self.temp_dir.name, Profiles.Snowflake) + + self.state_id = "test123" + self.state_dir = f"{self.temp_dir.name}/state-{self.state_id}" + shutil.copytree("tests/e2e/fixtures/test-project", self.state_dir) + + def tearDown(self): + super().tearDown() + self.temp_dir.cleanup() + LAST_PARSED.reset() + + def test_load_state(self): + # CURRENTLY USING SNOWFLAKE DUE TO DBT VERSION MISMATCH WITH POSTGRES + args = DbtCommandArgs(command=["run"], state_id=self.state_id) + result = StateController.load_state(args) + + assert result.state_id == self.state_id + assert result.config.profile_name == "user" + assert result.config.target_name == "default" + assert result.config.user_config is not None + assert result.config.credentials is not None + + # Should not cache on load + assert LAST_PARSED.lookup(self.state_id) is None diff --git a/tests/e2e/test_views.py b/tests/e2e/test_views.py new file mode 100644 index 0000000..99eea18 --- /dev/null +++ b/tests/e2e/test_views.py @@ -0,0 +1,183 @@ +import json +import shutil +from unittest.mock import patch +import uuid +import tempfile +from fastapi.testclient import TestClient + +from dbt_server import views, crud +from dbt_server.state import LAST_PARSED, StateController +from dbt_server.models import Task, Base +from dbt_server.services.filesystem_service import DBT_LOG_FILE_NAME +from dbt_server.views import app +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from tests.e2e.helpers import DbtCoreTestBase +from tests.e2e.helpers import miss_postgres_adaptor_package +from tests.e2e.fixtures import Profiles + + +@pytest.mark.skipif( + miss_postgres_adaptor_package(), reason="This test requires dbt-postgres installed." +) +class TestDbtEntryAsync(DbtCoreTestBase): + def setUp(self): + self.client = TestClient(app) + self.temp_dir = tempfile.TemporaryDirectory() + self.set_envs(self.temp_dir.name, Profiles.Postgres) + + self.state_id = "test123" + self.state_dir = f"{self.temp_dir.name}/state-{self.state_id}" + shutil.copytree("tests/e2e/fixtures/test-project", self.state_dir) + + self.engine = create_engine( + f"sqlite:///{self.temp_dir.name}/sql_app.db", + echo=True, + connect_args={"check_same_thread": False}, + ) + Base.metadata.create_all(bind=self.engine, tables=[Task.__table__]) + self.SessionLocal = sessionmaker(bind=self.engine) + self.SessionLocal.configure(bind=self.engine, expire_on_commit=False) + self.db = self.SessionLocal() + app.dependency_overrides[crud.get_db] = self.mock_get_db + + def tearDown(self): + super().tearDown() + self.db.close() + self.temp_dir.cleanup() + LAST_PARSED.reset() + + def mock_get_db(self): + return self.SessionLocal() + + @patch("dbt.parser.manifest.ManifestLoader.track_project_load") + def test_dbt_entry_async_project_path(self, mock_tracking): + """ + Test that parse with a project_path results in manifest cacheing and + subsequent call of the async command endpoint pulls the correct manifest. + + Also test that expected log file is created and populated with valid json logs + """ + args = views.ParseArgs(project_path=self.state_dir) + state = StateController.parse_from_source(args) + state.serialize_manifest() + state.update_cache() + + args = views.DbtCommandArgs(command=["run", "--threads", 1]) + response = self.client.post("/async/dbt", json=args.dict()) + + self.assertEqual(response.status_code, 200) + json_response = response.json() + + # check that the task_id is a valid uuid + self.assertTrue(isinstance(uuid.UUID(json_response["task_id"]), uuid.UUID)) + + self.assertEqual(json_response["state"], "pending") + self.assertEqual(json_response["command"], "run --threads 1") + + expected_log_path = ( + f'{self.temp_dir.name}/{json_response.get("task_id")}/{DBT_LOG_FILE_NAME}' + ) + self.assertEqual(json_response["log_path"], expected_log_path) + + # check that the task is added to the database + task = self.db.query(Task).filter_by(task_id=json_response["task_id"]).first() + self.assertIsNotNone(task) + self.assertEqual(task.command, "run --threads 1") + self.assertEqual(task.log_path, expected_log_path) + + # check that log file is populated with valid json logs + data = [] + with open(expected_log_path) as f: + for line in f: + data.append(json.loads(line)) + + self.assertTrue(data) + + @patch("dbt_server.views.StateController.execute_async_command") + @patch("dbt.parser.manifest.ManifestLoader.track_project_load") + def test_dbt_entry_state_id(self, mock_tracking, mock_execute): + """ + Test that parse with a state-id results in manifest cacheing and + subsequent call of the async command endpoint pulls the correct manifest. + + Mocks actual command execution to prevent log files being written in + permanent directory + """ + args = views.ParseArgs(state_id=self.state_id) + state = StateController.parse_from_source(args) + state.serialize_manifest() + state.update_cache() + + args = views.DbtCommandArgs(command=["run", "--threads", 1]) + response = self.client.post("/async/dbt", json=args.dict()) + + self.assertEqual(response.status_code, 200) + json_response = response.json() + + # check that the task_id is a valid uuid + self.assertTrue(isinstance(uuid.UUID(json_response["task_id"]), uuid.UUID)) + + self.assertEqual(json_response["state"], "pending") + self.assertEqual(json_response["command"], "run --threads 1") + + expected_log_path = f'{self.state_dir}/{json_response.get("task_id")}/dbt.log' + self.assertEqual(json_response["log_path"], expected_log_path) + + # check that the task is added to the database + task = self.db.query(Task).filter_by(task_id=json_response["task_id"]).first() + self.assertIsNotNone(task) + self.assertEqual(task.command, "run --threads 1") + self.assertEqual(task.log_path, expected_log_path) + + def test_dbt_entry_no_state_found(self): + """ + Test that calling the async/dbt endpoint without first calling parse + results in a properly handled StateNotFoundException + """ + args = views.DbtCommandArgs(command=["run", "--threads", 1]) + response = self.client.post("/async/dbt", json=args.dict()) + self.assertEqual(response.status_code, 422) + + +@pytest.mark.skipif( + miss_postgres_adaptor_package(), reason="This test requires dbt-postgres installed." +) +class TestDbtEntrySync(DbtCoreTestBase): + def setUp(self): + self.client = TestClient(app) + self.temp_dir = tempfile.TemporaryDirectory() + self.set_envs(self.temp_dir.name, Profiles.Postgres) + + self.state_id = "test123" + self.state_dir = f"{self.temp_dir.name}/state-{self.state_id}" + shutil.copytree("tests/e2e/fixtures/test-project", self.state_dir) + + def tearDown(self): + super().tearDown() + self.temp_dir.cleanup() + LAST_PARSED.reset() + + @patch("dbt.parser.manifest.ManifestLoader.track_project_load") + def test_dbt_entry_sync_project_path(self, mock_tracking): + """ + Test that parse with a project_path results in manifest cacheing and + subsequent call of the sync command endpoint pulls the correct manifest and returns + expected results + """ + args = views.ParseArgs(project_path=self.state_dir) + state = StateController.parse_from_source(args) + state.serialize_manifest() + state.update_cache() + + args = views.DbtCommandArgs(command=["run", "--threads", 1]) + response = self.client.post("/sync/dbt", json=args.dict()) + + self.assertEqual(response.status_code, 200) + json_response = response.json() + + self.assertIsNotNone(json_response["res"]) + self.assertEqual(json_response["command"], "run --threads 1") + self.assertEqual(json_response["parsing"], self.state_dir) + self.assertEqual(json_response["path"], f"{self.state_dir}/manifest.msgpack") diff --git a/tests/e2e/testcases/ide_commands.txt b/tests/e2e/testcases/ide_commands.txt new file mode 100644 index 0000000..e974993 --- /dev/null +++ b/tests/e2e/testcases/ide_commands.txt @@ -0,0 +1,80 @@ +# Commands for IDE use case. +docs generate +source freshness +source freshness --vars %VARIABLE +source freshness --selector %SELECTOR +deps +list +build +build --vars %VARIABLE +build -x +build --fail-fast +build --store-failures +build --resource-type test +build --resource-type seed +build --resource-type snapshot +build --resource-type model +build --resource-type all +build -s %MODEL +build --select %MODEL +build --exclude %MODEL +build --selector %SELECTOR +build --threads 4 +build --state %STATE_DIR --defer +build --no-defer +build --full-refresh +build -f +snapshot +snapshot --vars %VARIABLE +snapshot --threads 4 +snapshot -m %MODEL +snapshot --models %MODEL +snapshot -s %MODEL +snapshot --select %MODEL +snapshot --exclude %MODEL +snapshot --selector %SELECTOR +snapshot --state %STATE_DIR --defer +snapshot --no-defer +run +run --vars %VARIABLE +run -x +run --fail-fast +run -s %MODEL +run --select %MODEL +run --exclude %MODEL +run --selector %SELECTOR +run --threads 4 +run --state %STATE_DIR --defer +run --no-defer +run --full-refresh +run -f +compile +test +test --vars %VARIABLE +test -x +test --fail-fast +test --store-failures +test --indirect-selection eager +test --indirect-selection cautious +test --threads 4 +test --no-version-check +test -m %MODEL +test --models %MODEL +test -s %MODEL +test --select %MODEL +test --exclude %MODEL +test --selector %SELECTOR +test --state %STATE_DIR --defer +test --no-defer +seed +seed --full-refresh +seed --no-version-check +seed -m %MODEL +seed --models %MODEL +seed -s %MODEL +seed --select %MODEL +seed --exclude %MODEL +seed --selector %SELECTOR +run-operation %MACRO_NAME +run-operation %MACRO_NAME --vars %VARIABLE +run-operation %MACRO_NAME --args %MACRO_ARGS diff --git a/tests/e2e/testcases/ide_commands_failure.txt b/tests/e2e/testcases/ide_commands_failure.txt new file mode 100644 index 0000000..6cc7ee8 --- /dev/null +++ b/tests/e2e/testcases/ide_commands_failure.txt @@ -0,0 +1,3 @@ +wrongcommand +docs wrongcommand +build --vars 'test_var: "string"' diff --git a/tests/e2e/testcases/local_run.txt b/tests/e2e/testcases/local_run.txt new file mode 100644 index 0000000..52d6fa5 --- /dev/null +++ b/tests/e2e/testcases/local_run.txt @@ -0,0 +1,2 @@ +# Intentionally leave it blank. You can add test commands below and work with +# tests/e2e/local_run.py to test your dbt server. diff --git a/tests/e2e/testcases/simple_commands.txt b/tests/e2e/testcases/simple_commands.txt new file mode 100644 index 0000000..f7fe3bd --- /dev/null +++ b/tests/e2e/testcases/simple_commands.txt @@ -0,0 +1,13 @@ +# Simple test cases. +docs generate +source freshness +deps +list +build +snapshot +run +compile +test +seed +run-operation %MACRO_NAME +# snapshot is not covered yet diff --git a/tests/e2e/testcases/simple_commands_failure.txt b/tests/e2e/testcases/simple_commands_failure.txt new file mode 100644 index 0000000..6cc7ee8 --- /dev/null +++ b/tests/e2e/testcases/simple_commands_failure.txt @@ -0,0 +1,3 @@ +wrongcommand +docs wrongcommand +build --vars 'test_var: "string"' diff --git a/tests/integration/test_cache.py b/tests/integration/test_cache.py index a6f0ec3..4cd64b8 100644 --- a/tests/integration/test_cache.py +++ b/tests/integration/test_cache.py @@ -1,10 +1,13 @@ from fastapi.testclient import TestClient from unittest.mock import patch -import unittest from dbt_server.server import app, startup_cache_initialize from dbt_server.state import LAST_PARSED from dbt_server.exceptions import StateNotFoundException +from dbt_server.services.filesystem_service import DEFAULT_WORKING_DIR +from tests.e2e.helpers import DbtCoreTestBase + +TEST_LATEST_STATE_ID = "abc123" class FakeManifest: @@ -15,16 +18,22 @@ class FakeManifest: client = TestClient(app) -class StartupCacheTest(unittest.TestCase): +class StartupCacheTest(DbtCoreTestBase): def setUp(self): + self.set_envs(DEFAULT_WORKING_DIR, "") LAST_PARSED.reset() def tearDown(self): + super().tearDown() LAST_PARSED.reset() @patch( "dbt_server.services.filesystem_service.get_latest_state_id", - return_value="abc123", + return_value=TEST_LATEST_STATE_ID, + ) + @patch( + "dbt_server.services.filesystem_service.get_latest_project_path", + return_value=None, ) @patch("dbt_server.services.filesystem_service.get_size", return_value=1024) @patch( @@ -39,6 +48,7 @@ def test_startup_cache_succeeds( create_dbt_config, mock_dbt, mock_fs_get_size, + mock_fs_get_latest_project_path, mock_fs_get_latest_state_id, ): # Make sure it's not errantly cached @@ -47,12 +57,12 @@ def test_startup_cache_succeeds( startup_cache_initialize() # Make sure manifest is now cached - expected_path = "./working-dir/state-abc123/manifest.msgpack" + expected_path = f"./working-dir/state-{TEST_LATEST_STATE_ID}/manifest.msgpack" mock_fs_get_latest_state_id.assert_called_once_with(None) mock_fs_get_size.assert_called_once_with(expected_path) mock_dbt.assert_called_once_with(expected_path) assert LAST_PARSED.manifest is fake_manifest - assert LAST_PARSED.state_id == "abc123" + assert LAST_PARSED.state_id == TEST_LATEST_STATE_ID assert LAST_PARSED.manifest_size == 1024 @patch( @@ -71,13 +81,19 @@ def test_startup_cache_fails_no_state(self, mock_fs): @patch( "dbt_server.services.filesystem_service.get_latest_state_id", - return_value="abc123", + return_value=TEST_LATEST_STATE_ID, + ) + @patch( + "dbt_server.services.filesystem_service.get_latest_project_path", + return_value=None, ) @patch( "dbt_server.services.dbt_service.deserialize_manifest", side_effect=TypeError("bad"), ) - def test_startup_cache_fails_bad_manifest(self, mock_dbt, mock_fs): + def test_startup_cache_fails_bad_manifest( + self, mock_dbt, mock_get_latest_project_path, mock_fs + ): # Make sure it's not errantly cached assert LAST_PARSED.manifest is None @@ -85,19 +101,27 @@ def test_startup_cache_fails_bad_manifest(self, mock_dbt, mock_fs): # Make sure manifest is still not cached mock_fs.assert_called_once_with(None) - mock_dbt.assert_called_once_with("./working-dir/state-abc123/manifest.msgpack") + mock_dbt.assert_called_once_with( + f"./working-dir/state-{TEST_LATEST_STATE_ID}/manifest.msgpack" + ) assert LAST_PARSED.manifest is None assert LAST_PARSED.state_id is None @patch( "dbt_server.services.filesystem_service.get_latest_state_id", - return_value="abc123", + return_value=TEST_LATEST_STATE_ID, + ) + @patch( + "dbt_server.services.filesystem_service.get_latest_project_path", + return_value=None, ) @patch( "dbt_server.services.filesystem_service.read_serialized_manifest", side_effect=StateNotFoundException(), ) - def test_startup_cache_fails_specified_state_is_missing(self, mock_dbt, mock_fs): + def test_startup_cache_fails_specified_state_is_missing( + self, mock_dbt, mock_get_latest_project_path, mock_fs + ): # Make sure it's not errantly cached assert LAST_PARSED.manifest is None @@ -105,6 +129,8 @@ def test_startup_cache_fails_specified_state_is_missing(self, mock_dbt, mock_fs) # Make sure manifest is still not cached mock_fs.assert_called_once_with(None) - mock_dbt.assert_called_once_with("./working-dir/state-abc123/manifest.msgpack") + mock_dbt.assert_called_once_with( + f"./working-dir/state-{TEST_LATEST_STATE_ID}/manifest.msgpack" + ) assert LAST_PARSED.manifest is None assert LAST_PARSED.state_id is None diff --git a/tests/integration/test_compile.py b/tests/integration/test_compile.py index 6e1d3f4..9344a6c 100644 --- a/tests/integration/test_compile.py +++ b/tests/integration/test_compile.py @@ -45,6 +45,8 @@ def test_compilation_interface_valid_state_id(self): state_mock = Mock( return_value=StateController( state_id=state_id, + project_path=None, + root_path=f"./working-dir/state-{state_id}", manifest=None, config=None, parser=None, @@ -70,9 +72,8 @@ def test_compilation_interface_valid_state_id(self): ) state_mock.assert_called_once_with( - state_id, SQLConfig( - state_id="goodid", sql="select {{ 1 + 1 }}", target="new_target" + state_id=state_id, sql="select {{ 1 + 1 }}", target="new_target" ), ) query_mock.assert_called_once_with(source_query) @@ -103,7 +104,6 @@ def test_compilation_interface_compilation_error(self): ) state.assert_called_once_with( - state_id, SQLConfig( state_id="badid", sql="select {{ exceptions.raise_compiler_error('bad')}}", @@ -122,7 +122,8 @@ def test_compilation_interface_compilation_error(self): def test_compilation_interface_cache_hit(self): # Cache hit for load_state with patch("dbt_server.state.LAST_PARSED") as last_parsed: - state = StateController.load_state("abc123") + args = SQLConfig(state_id="abc123", sql="") + state = StateController.load_state(args) last_parsed.lookup.assert_called_once_with("abc123") assert state.manifest is not None @@ -132,7 +133,8 @@ def test_compilation_interface_cache_miss(self): # We expect this to raise because abc123 is not a real state... # that's fine for this test, we just want to make sure that we lookup abc123 with self.assertRaises(StateNotFoundException): - StateController.load_state("abc123") + args = SQLConfig(state_id="abc123", sql="") + StateController.load_state(args) lookup.assert_called_once_with("abc123") @@ -155,13 +157,22 @@ def test_compilation_interface_cache_mutation(self): "dbt_server.services.dbt_service", get_sql_parser=Mock(), ): - cached.set_last_parsed_manifest("abc123", manifest_mock, 512, config_mock) + cached.set_last_parsed_manifest( + "abc123", + None, + "./working_dir/state-abc123", + manifest_mock, + 512, + config_mock, + ) assert cached.state_id == "abc123" assert cached.manifest is not None assert cached.manifest_size == 512 assert cached.config == config_mock assert cached.parser is not None + assert cached.root_path == "./working_dir/state-abc123" + assert cached.project_path is None assert cached.lookup(None) is not None manifest_mock.reset_mock() @@ -180,14 +191,20 @@ def test_compilation_interface_cache_mutation(self): get_sql_parser=Mock(), ): cached.set_last_parsed_manifest( - "def456", new_manifest_mock, 1024, new_config_mock + None, + "../jaffle-shop", + "../jaffle-shop", + new_manifest_mock, + 1024, + new_config_mock, ) - assert cached.state_id == "def456" + assert cached.state_id is None assert cached.manifest is not None assert cached.manifest_size == 1024 assert cached.config == new_config_mock assert cached.parser is not None + assert cached.root_path == "../jaffle-shop" + assert cached.project_path == "../jaffle-shop" assert cached.lookup(None) is not None - assert cached.lookup("def456") is not None assert cached.lookup("abc123") is None