Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feature/decouple adapters from core #972

Merged
merged 112 commits into from
Jan 25, 2024
Merged
Show file tree
Hide file tree
Changes from 100 commits
Commits
Show all changes
112 commits
Select commit Hold shift + click to select a range
fd6f6f0
Add Github action for integration test
JCZuurmond Sep 29, 2023
795e40a
Update tox
JCZuurmond Sep 29, 2023
ff39c5d
Fetch spark from https link
JCZuurmond Sep 29, 2023
1505fc6
Use Spark version 3.1.2
JCZuurmond Sep 29, 2023
44fe33f
Seperate running Spark session and thrift
JCZuurmond Sep 29, 2023
2655631
Use Spark 3.1.2 and Hadoop 3.2
JCZuurmond Sep 29, 2023
915f67e
Reset tox.ini
JCZuurmond Sep 29, 2023
f0ef215
Remove base pythons in tox.ini
JCZuurmond Sep 29, 2023
e8457df
Fix reference to Docker compose file
JCZuurmond Sep 29, 2023
842466a
Remove timeout
JCZuurmond Sep 29, 2023
0738f2d
Remove artifact steps
JCZuurmond Sep 29, 2023
277bef1
Bump Spark and Hadoop versions
JCZuurmond Sep 29, 2023
8d5853d
Reset Spark and Hadoop version
JCZuurmond Sep 29, 2023
919528a
Update comment
JCZuurmond Sep 29, 2023
15e48fd
Add changie
JCZuurmond Sep 29, 2023
ab90c4c
Merge branch 'main' into add-github-workflow-for-integration-tests
Fleid Oct 12, 2023
31cb05e
add databricks and PR execution protections
colin-rogers-dbt Oct 18, 2023
31eceb5
Merge branch 'main' into migrateOffCircleCI
colin-rogers-dbt Oct 18, 2023
fd54d7f
use single quotes
colin-rogers-dbt Oct 23, 2023
8de8339
remove `_target` suffix
colin-rogers-dbt Oct 23, 2023
e85232f
add comment to test
colin-rogers-dbt Oct 23, 2023
fe3300e
specify container user as root
colin-rogers-dbt Oct 23, 2023
b37e14b
formatting
colin-rogers-dbt Oct 23, 2023
51511ec
remove python setup for pre-existing container
colin-rogers-dbt Oct 23, 2023
98607b6
download simba
colin-rogers-dbt Oct 23, 2023
e6ec414
fix curl call
colin-rogers-dbt Oct 23, 2023
05a2c08
fix curl call
colin-rogers-dbt Oct 23, 2023
a89ec58
fix curl call
colin-rogers-dbt Oct 23, 2023
2a18fad
fix curl call
colin-rogers-dbt Oct 23, 2023
1481396
fix curl call
colin-rogers-dbt Oct 23, 2023
31b427c
fix curl call
colin-rogers-dbt Oct 23, 2023
15ba1da
fix db test naming
colin-rogers-dbt Oct 23, 2023
ca33a23
confirm ODBC driver installed
colin-rogers-dbt Oct 23, 2023
6274d77
add odbc driver env var
colin-rogers-dbt Oct 23, 2023
0ba91a2
add odbc driver env var
colin-rogers-dbt Oct 23, 2023
f092026
specify platform
colin-rogers-dbt Oct 23, 2023
b968985
check odbc driver integrity
colin-rogers-dbt Oct 23, 2023
8a49567
add dbt user env var
colin-rogers-dbt Oct 23, 2023
7723e8d
add dbt user env var
colin-rogers-dbt Oct 23, 2023
ea5ebfa
fix host_name env var
colin-rogers-dbt Oct 23, 2023
610e5e9
try removing architecture arg
colin-rogers-dbt Oct 24, 2023
b4411ab
swap back to pull_request_target
colin-rogers-dbt Oct 24, 2023
cae6c8a
try running on host instead of container
colin-rogers-dbt Oct 24, 2023
0c68972
Update .github/workflows/integration.yml
colin-rogers-dbt Oct 24, 2023
b2f63bd
try running odbcinst -j
colin-rogers-dbt Oct 24, 2023
80eb7e4
remove bash
colin-rogers-dbt Oct 24, 2023
4bbfa71
add sudo
colin-rogers-dbt Oct 24, 2023
b1d2020
add sudo
colin-rogers-dbt Oct 24, 2023
38fda3d
update odbc.ini
colin-rogers-dbt Oct 24, 2023
6b599a1
install libsasl2-modules-gssapi-mit
colin-rogers-dbt Oct 24, 2023
0976c4f
install libsasl2-modules-gssapi-mit
colin-rogers-dbt Oct 24, 2023
42f2784
set -e on odbc install
colin-rogers-dbt Oct 24, 2023
4f11291
set -e on odbc install
colin-rogers-dbt Oct 24, 2023
1384084
set -e on odbc install
colin-rogers-dbt Oct 24, 2023
543e321
sudo echo odbc.inst
colin-rogers-dbt Oct 24, 2023
307a9af
Merge branch 'main' into migrateOffCircleCI
mikealfare Oct 27, 2023
f380d46
remove postgres components
mikealfare Nov 2, 2023
c334f32
remove release related items
mikealfare Nov 2, 2023
19dcff3
remove irrelevant output
mikealfare Nov 2, 2023
01b0c0c
move long bash script into its own file
mikealfare Nov 2, 2023
d3d2844
update integration.yml to align with other adapters
mikealfare Nov 2, 2023
94af018
Merge branch 'main' into migrateOffCircleCI
mikealfare Nov 2, 2023
72daf90
revert name change
mikealfare Nov 2, 2023
4f63a3c
Merge remote-tracking branch 'origin/migrateOffCircleCI' into migrate…
mikealfare Nov 2, 2023
b43c9d1
revert name change
mikealfare Nov 2, 2023
91715d2
combine databricks and spark tests
mikealfare Nov 2, 2023
943a8dc
combine databricks and spark tests
mikealfare Nov 2, 2023
3d0dece
Add dagger
colin-rogers-dbt Nov 30, 2023
080b816
remove platform
colin-rogers-dbt Nov 30, 2023
c8477ce
add dagger setup
colin-rogers-dbt Jan 8, 2024
c0a37ae
add dagger setup
colin-rogers-dbt Jan 8, 2024
9b9dc79
Merge branch 'main' into migrateOffCircleCI
colin-rogers-dbt Jan 8, 2024
8c6a745
set env vars
colin-rogers-dbt Jan 8, 2024
6a6b4ce
Merge remote-tracking branch 'origin/migrateOffCircleCI' into migrate…
colin-rogers-dbt Jan 8, 2024
1ae321a
install requirements
colin-rogers-dbt Jan 8, 2024
6361429
install requirements
colin-rogers-dbt Jan 8, 2024
6bca5dc
add DEFAULT_ENV_VARS and test_path arg
colin-rogers-dbt Jan 8, 2024
f4293e0
remove circle ci
colin-rogers-dbt Jan 8, 2024
d398065
formatting
colin-rogers-dbt Jan 9, 2024
6108d44
update changie
colin-rogers-dbt Jan 9, 2024
d472f3b
Update .changes/unreleased/Under the Hood-20230929-161218.yaml
colin-rogers-dbt Jan 9, 2024
ce92bcf
formatting fixes and simplify env_var handling
colin-rogers-dbt Jan 9, 2024
0c4ed9e
Merge remote-tracking branch 'origin/migrateOffCircleCI' into migrate…
colin-rogers-dbt Jan 9, 2024
56b14bc
remove tox, update CONTRIBUTING.md and cleanup GHA workflows
colin-rogers-dbt Jan 9, 2024
9849c1c
remove tox, update CONTRIBUTING.md and cleanup GHA workflows
colin-rogers-dbt Jan 9, 2024
f9a4c58
install test reqs in main.yml
colin-rogers-dbt Jan 9, 2024
bbe17a8
install test reqs in main.yml
colin-rogers-dbt Jan 9, 2024
3f44e96
formatting
colin-rogers-dbt Jan 9, 2024
afd3866
remove tox from dev-requirements.txt and Makefile
colin-rogers-dbt Jan 10, 2024
259ebc7
clarify spark crt instantiation
colin-rogers-dbt Jan 10, 2024
a8a7010
add comments on python-version
colin-rogers-dbt Jan 10, 2024
fcf074b
initial migration changes
colin-rogers-dbt Jan 10, 2024
5da682a
Merge branch 'main' into feature/decouple-adapters-from-core
colin-rogers-dbt Jan 10, 2024
1b1fcec
unpin
colin-rogers-dbt Jan 10, 2024
0a2b73d
implement core / adapters decoupling
colin-rogers-dbt Jan 11, 2024
bd86ee1
fix list_relations
colin-rogers-dbt Jan 11, 2024
cb5e05c
fix typing and exception imports
colin-rogers-dbt Jan 11, 2024
fd7a22f
fix typing and exception imports
colin-rogers-dbt Jan 11, 2024
77df8b7
add changie
colin-rogers-dbt Jan 11, 2024
f216bb6
Merge branch 'main' into feature/decouple-adapters-from-core
colin-rogers-dbt Jan 11, 2024
dfd5885
replace dbt.common with dbt_common
colin-rogers-dbt Jan 12, 2024
3fc6d07
update setup.py
colin-rogers-dbt Jan 12, 2024
17607c1
add dbt-adapters
colin-rogers-dbt Jan 16, 2024
79d74aa
update setup.py
colin-rogers-dbt Jan 22, 2024
011c9b5
fix credentials import
colin-rogers-dbt Jan 22, 2024
a40b07c
fix dev-requirements.txt
colin-rogers-dbt Jan 22, 2024
8aac398
dagger improvements to caching and installing package under test
colin-rogers-dbt Jan 24, 2024
6edcdcf
update requirements
colin-rogers-dbt Jan 24, 2024
eeba17f
add cluster start fixture
colin-rogers-dbt Jan 24, 2024
f3a4c2d
update conftest.py
colin-rogers-dbt Jan 25, 2024
32c05bb
re-order dagger setup to reduce cache invalidation
colin-rogers-dbt Jan 25, 2024
e8e4543
renove dbt-core version dependency version check
colin-rogers-dbt Jan 25, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changes/unreleased/Under the Hood-20240111-114806.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
kind: Under the Hood
body: Update import paths and list_relations to support decoupling adapters/core
time: 2024-01-11T11:48:06.120111-08:00
custom:
Author: colin-rogers-dbt
Issue: "972"
3 changes: 2 additions & 1 deletion dagger/run_dbt_spark_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import argparse
import sys
from typing import Dict

import anyio as anyio
import dagger as dagger
Expand All @@ -19,7 +20,7 @@
TESTING_ENV_VARS.update({"ODBC_DRIVER": "/opt/simba/spark/lib/64/libsparkodbc_sb64.so"})


def env_variables(envs: dict[str, str]):
def env_variables(envs: Dict[str, str]):
def env_variables_inner(ctr: dagger.Container):
for key, value in envs.items():
ctr = ctr.with_env_variable(key, value)
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/spark/column.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from typing import Any, Dict, Optional, TypeVar, Union

from dbt.adapters.base.column import Column
from dbt.dataclass_schema import dbtClassMixin
from dbt.common.dataclass_schema import dbtClassMixin

Self = TypeVar("Self", bound="SparkColumn")

Expand Down
51 changes: 24 additions & 27 deletions dbt/adapters/spark/connections.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from contextlib import contextmanager

import dbt.exceptions
from dbt.adapters.base import Credentials
from dbt.adapters.contracts.connection import AdapterResponse, ConnectionState, Connection
from dbt.adapters.events.logging import AdapterLogger
from dbt.adapters.exceptions import FailedToConnectError
from dbt.adapters.sql import SQLConnectionManager
from dbt.contracts.connection import ConnectionState, AdapterResponse
from dbt.events import AdapterLogger
from dbt.utils import DECIMALS
from dbt.common.exceptions import DbtConfigError, DbtRuntimeError, DbtDatabaseError

from dbt.common.utils.encoding import DECIMALS
from dbt.adapters.spark import __version__

try:
Expand All @@ -22,8 +24,7 @@
pyodbc = None
from datetime import datetime
import sqlparams
from dbt.contracts.connection import Connection
from dbt.dataclass_schema import StrEnum
from dbt.common.dataclass_schema import StrEnum
from dataclasses import dataclass, field
from typing import Any, Dict, Optional, Union, Tuple, List, Generator, Iterable, Sequence

Expand Down Expand Up @@ -92,15 +93,15 @@ def cluster_id(self) -> Optional[str]:

def __post_init__(self) -> None:
if self.method is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `method` in profile")
raise DbtRuntimeError("Must specify `method` in profile")
if self.host is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `host` in profile")
raise DbtRuntimeError("Must specify `host` in profile")
if self.schema is None:
raise dbt.exceptions.DbtRuntimeError("Must specify `schema` in profile")
raise DbtRuntimeError("Must specify `schema` in profile")

# spark classifies database and schema as the same thing
if self.database is not None and self.database != self.schema:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f" schema: {self.schema} \n"
f" database: {self.database} \n"
f"On Spark, database must be omitted or have the same value as"
Expand All @@ -112,7 +113,7 @@ def __post_init__(self) -> None:
try:
import pyodbc # noqa: F401
except ImportError as e:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand All @@ -121,7 +122,7 @@ def __post_init__(self) -> None:
) from e

if self.method == SparkConnectionMethod.ODBC and self.cluster and self.endpoint:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
"`cluster` and `endpoint` cannot both be set when"
f" using {self.method} method to connect to Spark"
)
Expand All @@ -130,7 +131,7 @@ def __post_init__(self) -> None:
self.method == SparkConnectionMethod.HTTP
or self.method == SparkConnectionMethod.THRIFT
) and not (ThriftState and THttpClient and hive):
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand All @@ -141,7 +142,7 @@ def __post_init__(self) -> None:
try:
import pyspark # noqa: F401
except ImportError as e:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand Down Expand Up @@ -291,13 +292,11 @@ def execute(self, sql: str, bindings: Optional[List[Any]] = None) -> None:
if poll_state.errorMessage:
logger.debug("Poll response: {}".format(poll_state))
logger.debug("Poll status: {}".format(state))
raise dbt.exceptions.DbtDatabaseError(poll_state.errorMessage)
raise DbtDatabaseError(poll_state.errorMessage)

elif state not in STATE_SUCCESS:
status_type = ThriftState._VALUES_TO_NAMES.get(state, "Unknown<{!r}>".format(state))
raise dbt.exceptions.DbtDatabaseError(
"Query failed with status: {}".format(status_type)
)
raise DbtDatabaseError("Query failed with status: {}".format(status_type))

logger.debug("Poll status: {}, query complete".format(state))

Expand Down Expand Up @@ -358,9 +357,9 @@ def exception_handler(self, sql: str) -> Generator[None, None, None]:
thrift_resp = exc.args[0]
if hasattr(thrift_resp, "status"):
msg = thrift_resp.status.errorMessage
raise dbt.exceptions.DbtRuntimeError(msg)
raise DbtRuntimeError(msg)
else:
raise dbt.exceptions.DbtRuntimeError(str(exc))
raise DbtRuntimeError(str(exc))

def cancel(self, connection: Connection) -> None:
connection.handle.cancel()
Expand Down Expand Up @@ -390,7 +389,7 @@ def validate_creds(cls, creds: Any, required: Iterable[str]) -> None:

for key in required:
if not hasattr(creds, key):
raise dbt.exceptions.DbtProfileError(
raise DbtConfigError(
"The config '{}' is required when using the {} method"
" to connect to Spark".format(key, method)
)
Expand Down Expand Up @@ -481,7 +480,7 @@ def open(cls, connection: Connection) -> Connection:
endpoint=creds.endpoint
)
else:
raise dbt.exceptions.DbtProfileError(
raise DbtConfigError(
"Either `cluster` or `endpoint` must set when"
" using the odbc method to connect to Spark"
)
Expand Down Expand Up @@ -525,9 +524,7 @@ def open(cls, connection: Connection) -> Connection:
Connection(server_side_parameters=creds.server_side_parameters)
)
else:
raise dbt.exceptions.DbtProfileError(
f"invalid credential method: {creds.method}"
)
raise DbtConfigError(f"invalid credential method: {creds.method}")
break
except Exception as e:
exc = e
Expand All @@ -537,7 +534,7 @@ def open(cls, connection: Connection) -> Connection:
msg = "Failed to connect"
if creds.token is not None:
msg += ", is your token valid?"
raise dbt.exceptions.FailedToConnectError(msg) from e
raise FailedToConnectError(msg) from e
retryable_message = _is_retryable_error(e)
if retryable_message and creds.connect_retries > 0:
msg = (
Expand All @@ -558,7 +555,7 @@ def open(cls, connection: Connection) -> Connection:
logger.warning(msg)
time.sleep(creds.connect_timeout)
else:
raise dbt.exceptions.FailedToConnectError("failed to connect") from e
raise FailedToConnectError("failed to connect") from e
else:
raise exc # type: ignore

Expand Down
63 changes: 40 additions & 23 deletions dbt/adapters/spark/impl.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,31 @@
import os
import re
from concurrent.futures import Future
from dataclasses import dataclass
from typing import Any, Dict, Iterable, List, Optional, Union, Type, Tuple, Callable, Set
from typing import (
Any,
Dict,
Iterable,
List,
Optional,
Union,
Type,
Tuple,
Callable,
Set,
FrozenSet,
)

from dbt.adapters.base.relation import InformationSchema
from dbt.contracts.graph.manifest import Manifest
from dbt.adapters.contracts.connection import AdapterResponse
from dbt.adapters.events.logging import AdapterLogger
from dbt.common.exceptions import DbtRuntimeError, CompilationError
from dbt.common.utils import AttrDict, executor

from typing_extensions import TypeAlias

import agate

import dbt
import dbt.exceptions

from dbt.adapters.base import AdapterConfig, PythonJobHelper
from dbt.adapters.base.impl import catch_as_completed, ConstraintSupport
from dbt.adapters.sql import SQLAdapter
Expand All @@ -24,14 +37,16 @@
AllPurposeClusterPythonJobHelper,
)
from dbt.adapters.base import BaseRelation
from dbt.clients.agate_helper import DEFAULT_TYPE_TESTER
from dbt.contracts.connection import AdapterResponse
from dbt.contracts.graph.nodes import ConstraintType
from dbt.contracts.relation import RelationType
from dbt.events import AdapterLogger
from dbt.utils import executor, AttrDict
from dbt.adapters.contracts.relation import RelationType, RelationConfig
from dbt.common.clients.agate_helper import DEFAULT_TYPE_TESTER
from dbt.common.contracts.constraints import ConstraintType

logger = AdapterLogger("Spark")
packages = ["pyhive.hive", "thrift.transport", "thrift.protocol"]
log_level = os.getenv("DBT_SPARK_LOG_LEVEL", "ERROR")
for package in packages:
logger.debug(f"Setting {package} logging to {log_level}")
logger.set_adapter_dependency_log_level(package, log_level)

GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME = "get_columns_in_relation_raw"
LIST_SCHEMAS_MACRO_NAME = "list_schemas"
Expand Down Expand Up @@ -144,7 +159,7 @@ def _get_relation_information(self, row: agate.Row) -> RelationInfo:
try:
_schema, name, _, information = row
except ValueError:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f'Invalid value from "show tables extended ...", got {len(row)} values, expected 4'
)

Expand All @@ -155,7 +170,7 @@ def _get_relation_information_using_describe(self, row: agate.Row) -> RelationIn
try:
_schema, name, _ = row
except ValueError:
raise dbt.exceptions.DbtRuntimeError(
raise DbtRuntimeError(
f'Invalid value from "show tables ...", got {len(row)} values, expected 3'
)

Expand All @@ -164,7 +179,7 @@ def _get_relation_information_using_describe(self, row: agate.Row) -> RelationIn
table_results = self.execute_macro(
DESCRIBE_TABLE_EXTENDED_MACRO_NAME, kwargs={"table_name": table_name}
)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
logger.debug(f"Error while retrieving information about {table_name}: {e.msg}")
table_results = AttrDict()

Expand Down Expand Up @@ -219,7 +234,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[
row_list=show_table_extended_rows,
relation_info_func=self._get_relation_information,
)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
errmsg = getattr(e, "msg", "")
if f"Database '{schema_relation}' not found" in errmsg:
return []
Expand All @@ -236,7 +251,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[
row_list=show_table_rows,
relation_info_func=self._get_relation_information_using_describe,
)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
description = "Error while retrieving information about"
logger.debug(f"{description} {schema_relation}: {e.msg}")
return []
Expand Down Expand Up @@ -298,7 +313,7 @@ def get_columns_in_relation(self, relation: BaseRelation) -> List[SparkColumn]:
GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME, kwargs={"relation": relation}
)
columns = self.parse_describe_extended(relation, rows)
except dbt.exceptions.DbtRuntimeError as e:
except DbtRuntimeError as e:
# spark would throw error when table doesn't exist, where other
# CDW would just return and empty list, normalizing the behavior here
errmsg = getattr(e, "msg", "")
Expand Down Expand Up @@ -352,11 +367,13 @@ def _get_columns_for_catalog(self, relation: BaseRelation) -> Iterable[Dict[str,
yield as_dict

def get_catalog(
self, manifest: Manifest, selected_nodes: Optional[Set] = None
self,
relation_configs: Iterable[RelationConfig],
used_schemas: FrozenSet[Tuple[str, str]],
) -> Tuple[agate.Table, List[Exception]]:
schema_map = self._get_catalog_schemas(manifest)
schema_map = self._get_catalog_schemas(relation_configs)
if len(schema_map) > 1:
raise dbt.exceptions.CompilationError(
raise CompilationError(
f"Expected only one database in get_catalog, found " f"{list(schema_map)}"
)

Expand All @@ -371,7 +388,7 @@ def get_catalog(
self._get_one_catalog,
info,
[schema],
manifest,
relation_configs,
)
)
catalogs, exceptions = catch_as_completed(futures)
Expand All @@ -381,10 +398,10 @@ def _get_one_catalog(
self,
information_schema: InformationSchema,
schemas: Set[str],
manifest: Manifest,
used_schemas: FrozenSet[Tuple[str, str]],
) -> agate.Table:
if len(schemas) != 1:
raise dbt.exceptions.CompilationError(
raise CompilationError(
f"Expected only one schema in spark _get_one_catalog, found " f"{schemas}"
)

Expand Down
Loading