Skip to content

Commit

Permalink
Update dbt-semantic-interfaces to 0.5.1
Browse files Browse the repository at this point in the history
In order to proceed with development on using Metric inputs in
filter expressions, we need to update our dbt-semantic-interfaces
dependency to 0.5.1 or later.

This change carries some added complexity along with it, since our
dev dependencies specify dbt-core and dbt-<adapter> package
dependencies bound to dbt-core 1.7 version range. This creates
an unresolvable dependency version conflict, since dbt-core depends
on dbt-semantic-interfaces 0.4.x.

For the time being we can get away with relying on the pre-installed
dbt core + adapter packages pulled in by dbt-metricflow, and
simply allow metricflow's specification for dbt-semantic-interfaces
to override the installed package.

Most of the changes in this commit are a result of needing to manage
the dbt-semantic-interfaces version discrepancies by relying solely
on the dbt-metricflow pre-install for dbt core + adapter dependencies.

The one additional tweak is the MetricFlow minor version, which was
improperly set to the currently available release of 0.205.0. Since
this is the top of a set of serious breaking changes we need to ensure
we release as a full minor, so the next version will be 0.206.0.
  • Loading branch information
tlento committed Mar 26, 2024
1 parent 986ab30 commit 294de35
Showing 1 changed file with 20 additions and 49 deletions.
69 changes: 20 additions & 49 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "hatchling.build"

[project]
name = "metricflow"
version = "0.205.0.dev0"
version = "0.206.0.dev0"
description = "Translates a simple metric definition into reusable SQL and executes it against the SQL engine of your choice."
readme = "README.md"
requires-python = ">=3.8,<3.12"
Expand All @@ -27,7 +27,7 @@ classifiers = [
dependencies = [
"Jinja2>=3.1.3",
"PyYAML>=6.0, <7.0.0",
"dbt-semantic-interfaces>=0.4.2, <0.5.0",
"dbt-semantic-interfaces>=0.5.1, <0.6.0",
"graphviz>=0.18.2, <0.21",
"more-itertools>=8.10.0, <10.2.0",
"pandas>=1.5.0, <1.6.0",
Expand Down Expand Up @@ -56,12 +56,18 @@ dev-packages = [
"types-tabulate",

# Test and CLI development dependencies.
# These should generally match what's in dbt-metricflow/pyproject.toml, but may
# diverge during upgrade phases or for other reasons
#
# Note we do not currently depend on dbt-core because updates across minor version
# boundaries cause problems with tests against future versions of dbt-semantic-interfaces.
# In future we will refine this packaging to depend on stable dbt-adapter interfaces in
# the development builds, but those packages do not yet support all of our needs so we
# hack around the problem with editable pre-installs in the various dev environments.
# Note this only works because we don't use the dbt packages for anything other than
# managing warehouse connections and dispatching queries, so this is not a pattern
# anyone else should be emulating even as a short-term hack.

# This should generally match what's in dbt-metricflow/pyproject.toml, but may
# diverge during upgrade phases
"dbt-core>=1.7.4, <1.8.0",

# These need to match the ones in dbt-metricflow/pyproject.toml
"halo>=0.0.31, <0.1.0",
"update-checker>=0.18.0, <0.19.0",

Expand All @@ -79,34 +85,6 @@ trino-sql-client-packages = [
"trino>=0.327.0, <0.328.0",
]

dbt-postgres = [
"dbt-postgres>=1.7.0, <1.8.0",
]

dbt-bigquery = [
"dbt-bigquery>=1.7.0, <1.8.0",
]

dbt-databricks = [
"dbt-databricks>=1.7.0, <1.8.0",
]

dbt-redshift = [
"dbt-redshift>=1.7.0, <1.8.0",
]

dbt-snowflake = [
"dbt-snowflake>=1.7.0, <1.8.0",
]

dbt-duckdb = [
"dbt-duckdb>=1.7.0, <1.8.0",
]

dbt-trino = [
"dbt-trino>=1.7.0, <1.8.0",
]

[tool.hatch.build.targets.sdist]
exclude = [
"/.github",
Expand Down Expand Up @@ -134,13 +112,12 @@ exclude = [
description = "Environment for development. Includes a DuckDB-backed client."

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[duckdb]"
]

features = [
"dev-packages",
"sql-client-packages",
"dbt-duckdb",
]

[tool.hatch.envs.dev-env.env-vars]
Expand All @@ -156,12 +133,11 @@ MF_TEST_ADAPTER_TYPE="postgres"
description = "Dev environment for working with Postgres adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[postgres]"
]

features = [
"dev-packages",
"dbt-postgres",
"sql-client-packages",
]

Expand All @@ -175,12 +151,11 @@ MF_TEST_ADAPTER_TYPE="bigquery"
description = "Dev environment for working with the BigQuery adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[bigquery]"
]

features = [
"dev-packages",
"dbt-bigquery",
"sql-client-packages",
]

Expand All @@ -191,12 +166,11 @@ MF_TEST_ADAPTER_TYPE="databricks"
description = "Dev environment for working with the Databricks adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[databricks]"
]

features = [
"dev-packages",
"dbt-databricks",
"sql-client-packages",
]

Expand All @@ -207,12 +181,11 @@ MF_TEST_ADAPTER_TYPE="redshift"
description = "Dev environment for working with the Redshift adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[redshift]"
]

features = [
"dev-packages",
"dbt-redshift",
"sql-client-packages",
]

Expand All @@ -223,12 +196,11 @@ MF_TEST_ADAPTER_TYPE="snowflake"
description = "Dev environment for working with Snowflake adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[snowflake]"
]

features = [
"dev-packages",
"dbt-snowflake",
"sql-client-packages",
]

Expand All @@ -241,12 +213,11 @@ DBT_ENV_SECRET_CATALOG="memory"
description = "Dev environment for working with the Trino adapter"

pre-install-commands = [
"pip install -e ./dbt-metricflow"
"pip install -e ./dbt-metricflow[trino]"
]

features = [
"dev-packages",
"dbt-trino",
"sql-client-packages",
"trino-sql-client-packages",
]
Expand Down

0 comments on commit 294de35

Please sign in to comment.