Skip to content

Commit

Permalink
merged development
Browse files Browse the repository at this point in the history
  • Loading branch information
cmcarthur committed Aug 9, 2018
2 parents 77eb04a + b4480cb commit 57eaa0c
Show file tree
Hide file tree
Showing 9 changed files with 141 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.10.2a4
current_version = 0.10.2
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
Expand Down
12 changes: 6 additions & 6 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
- Version 2 of schema.yml, which allows users to create table and column comments that end up in the manifest ([#880](https://github.com/fishtown-analytics/dbt/pull/880))
- Add `docs` blocks that users can put into `.md` files and `doc()` value for schema v2 description fields ([#888](https://github.com/fishtown-analytics/dbt/pull/888))

## dbt 0.10.2 (unreleased, codename: Betsy Ross)
## dbt 0.10.2 - Betsy Ross (August 3, 2018)

### Overview

Expand All @@ -29,11 +29,11 @@ We want to extend a big thank you to our outside contributors for this release!
### Features

- BigQuery
- Support incremental models ([#856](https://github.com/fishtown-analytics/dbt/pull/856))
- Support archival ([#856](https://github.com/fishtown-analytics/dbt/pull/856))
- Add pre/post hook support ([#836](https://github.com/fishtown-analytics/dbt/pull/836))
- Redshift: IAM Auth ([#818](https://github.com/fishtown-analytics/dbt/pull/818))
- Model aliases ([#800](https://github.com/fishtown-analytics/dbt/pull/800))
- Support incremental models ([#856](https://github.com/fishtown-analytics/dbt/pull/856)) ([docs](https://docs.getdbt.com/docs/configuring-models#section-configuring-incremental-models))
- Support archival ([#856](https://github.com/fishtown-analytics/dbt/pull/856)) ([docs](https://docs.getdbt.com/docs/archival))
- Add pre/post hook support ([#836](https://github.com/fishtown-analytics/dbt/pull/836)) ([docs](https://docs.getdbt.com/docs/using-hooks))
- Redshift: IAM Auth ([#818](https://github.com/fishtown-analytics/dbt/pull/818)) ([docs](https://docs.getdbt.com/docs/supported-databases#section-iam-authentication))
- Model aliases ([#800](https://github.com/fishtown-analytics/dbt/pull/800))([docs](https://docs.getdbt.com/docs/using-custom-aliases))
- Write JSON manifest file to disk during compilation ([#761](https://github.com/fishtown-analytics/dbt/pull/761))
- Add forward and backward graph edges to the JSON manifest file ([#762](https://github.com/fishtown-analytics/dbt/pull/762))
- Add a 'dbt docs generate' command to generate a JSON catalog file ([#774](https://github.com/fishtown-analytics/dbt/pull/774), [#808](https://github.com/fishtown-analytics/dbt/pull/808))
Expand Down
13 changes: 10 additions & 3 deletions dbt/include/global_project/macros/materializations/table/table.sql
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,13 @@
schema=schema, type='table') -%}
{%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,
schema=schema, type='table') -%}

/*
See ../view/view.sql for more information about this relation.
*/
{%- set backup_relation = api.Relation.create(identifier=backup_identifier,
schema=schema, type='table') -%}
schema=schema, type=(old_relation.type or 'table')) -%}

{%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}
{%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}
{%- set create_as_temporary = (exists_as_table and non_destructive_mode) -%}
Expand Down Expand Up @@ -64,8 +69,8 @@
{% if non_destructive_mode -%}
-- noop
{%- else -%}
{% if exists_as_table %}
-- move the existing table out of the way
{% if old_relation is not none %}
-- move the existing relation out of the way
{{ adapter.rename_relation(target_relation, backup_relation) }}
{% endif %}

Expand All @@ -74,6 +79,8 @@

-- `COMMIT` happens here
{{ adapter.commit() }}

-- finally, drop the existing/backup relation after the commit
{{ drop_relation_if_exists(backup_relation) }}

{{ run_hooks(post_hooks, inside_transaction=False) }}
Expand Down
23 changes: 20 additions & 3 deletions dbt/include/global_project/macros/materializations/view/view.sql
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,22 @@
type='view') -%}
{%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,
schema=schema, type='view') -%}

/*
This relation (probably) doesn't exist yet. If it does exist, it's a leftover from
a previous run, and we're going to try to drop it immediately. At the end of this
materialization, we're going to rename the "old_relation" to this identifier,
and then we're going to drop it. In order to make sure we run the correct one of:
- drop view ...
- drop table ...
We need to set the type of this relation to be the type of the old_relation, if it exists,
or else "view" as a sane default if it does not. Note that if the old_relation does not
exist, then there is nothing to move out of the way and subsequentally drop. In that case,
this relation will be effectively unused.
*/
{%- set backup_relation = api.Relation.create(identifier=backup_identifier,
schema=schema, type='view') -%}
schema=schema, type=(old_relation.type or 'view')) -%}

{%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}

Expand Down Expand Up @@ -52,7 +66,7 @@
-- cleanup
{% if not should_ignore -%}
-- move the existing view out of the way
{% if exists_as_view %}
{% if old_relation is not none %}
{{ adapter.rename_relation(target_relation, backup_relation) }}
{% endif %}
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
Expand All @@ -64,7 +78,10 @@
#}
{% if has_transactional_hooks or not should_ignore %}
{{ adapter.commit() }}
{{ drop_relation_if_exists(backup_relation) }}
{% endif %}

{% if not should_ignore %}
{{ drop_relation_if_exists(backup_relation) }}
{% endif %}

{{ run_hooks(post_hooks, inside_transaction=False) }}
Expand Down
17 changes: 13 additions & 4 deletions dbt/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,17 +155,26 @@ def compile_target(self, target_cfg):

def compile_and_update_target(self):
target = self.cfg['target']
self.cfg['outputs'][target].update(self.run_environment())
run_env = self.run_environment()
self.cfg['outputs'][target].update(run_env)

def run_environment(self):
target_name = self.cfg['target']
if target_name in self.cfg['outputs']:
target_cfg = self.cfg['outputs'][target_name]
return self.compile_target(target_cfg)
else:
raise DbtProfileError(
"'target' config was not found in profile entry for "
"'{}'".format(target_name), self)

outputs = self.cfg.get('outputs', {}).keys()
output_names = [" - {}".format(output) for output in outputs]

msg = ("The profile '{}' does not have a target named '{}'. The "
"valid target names for this profile are:\n{}".format(
self.profile_to_load,
target_name,
"\n".join(output_names)))

raise DbtProfileError(msg, self)

def get_target(self):
ctx = self.context().get('env').copy()
Expand Down
2 changes: 1 addition & 1 deletion dbt/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,6 @@ def get_version_information():
.format(version_msg))


__version__ = '0.10.2a4'
__version__ = '0.10.2'
installed = get_installed_version()
latest = get_latest_version()
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()

package_name = "dbt"
package_version = "0.10.2a4"
package_version = "0.10.2"
description = """dbt (data build tool) is a command line tool that helps \
analysts and engineers transform data in their warehouse more effectively"""

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@


{{ config(materialized=var('materialized'), sql_where='TRUE') }}

select '{{ var("materialized") }}' as materialization
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
from nose.plugins.attrib import attr
from test.integration.base import DBTIntegrationTest


class TestChangingRelationType(DBTIntegrationTest):

@property
def schema(self):
return "changing_relation_type_035"

@staticmethod
def dir(path):
return "test/integration/035_changing_relation_type_test/" + path.lstrip("/")

@property
def models(self):
return self.dir("models")

def swap_types_and_test(self):
# test that dbt is able to do intelligent things when changing
# between materializations that create tables and views.

results = self.run_dbt(['run', '--vars', 'materialized: view'])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: table'])
self.assertEquals(results[0].node['config']['materialized'], 'table')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: view'])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: incremental'])
self.assertEquals(results[0].node['config']['materialized'], 'incremental')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: view'])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

@attr(type="postgres")
def test__postgres__switch_materialization(self):
self.use_profile("postgres")
self.swap_types_and_test()

@attr(type="snowflake")
def test__snowflake__switch_materialization(self):
self.use_profile("snowflake")
self.swap_types_and_test()

@attr(type="redshift")
def test__redshift__switch_materialization(self):
self.use_profile("redshift")
self.swap_types_and_test()

@attr(type="bigquery")
def test__bigquery__switch_materialization(self):
# BQ has a weird check that prevents the dropping of tables in the view materialization
# if --full-refresh is not provided. This is to prevent the clobbering of a date-sharded
# table with a view if a model config is accidently changed. We should probably remove that check
# and then remove these bq-specific tests
self.use_profile("bigquery")

results = self.run_dbt(['run', '--vars', 'materialized: view'])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: table'])
self.assertEquals(results[0].node['config']['materialized'], 'table')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: view', "--full-refresh"])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: incremental'])
self.assertEquals(results[0].node['config']['materialized'], 'incremental')
self.assertEqual(len(results), 1)

results = self.run_dbt(['run', '--vars', 'materialized: view', "--full-refresh"])
self.assertEquals(results[0].node['config']['materialized'], 'view')
self.assertEqual(len(results), 1)

0 comments on commit 57eaa0c

Please sign in to comment.