Skip to content

Commit

Permalink
convert to use struct logging for adapters (dbt-labs#251)
Browse files Browse the repository at this point in the history
* convert to use struct logging for adapters

* add changelog

* Update integration tests

Co-authored-by: Jeremy Cohen <[email protected]>
  • Loading branch information
2 people authored and Vinoth Govindarajan committed Nov 17, 2021
1 parent 38467e3 commit 0fae2da
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 9 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
- Add support for `on_schema_change` config in incremental models: `ignore`, `fail`, `append_new_columns`. For `sync_all_columns`, removing columns is not supported by Apache Spark or Delta Lake ([#198](https://github.com/dbt-labs/dbt-spark/issues/198), [#226](https://github.com/dbt-labs/dbt-spark/issues/226), [#229](https://github.com/dbt-labs/dbt-spark/pull/229))
- Add `persist_docs` call to incremental model ([#224](https://github.com/dbt-labs/dbt-spark/issues/224), [#234](https://github.com/dbt-labs/dbt-spark/pull/234))

### Under the hood
- Add support for structured logging [#251](https://github.com/dbt-labs/dbt-spark/pull/251)

### Contributors
- [@binhnefits](https://github.com/binhnefits) ([#234](https://github.com/dbt-labs/dbt-spark/pull/234))

Expand Down
4 changes: 3 additions & 1 deletion dbt/adapters/spark/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dbt.adapters.base import Credentials
from dbt.adapters.sql import SQLConnectionManager
from dbt.contracts.connection import ConnectionState
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.events import AdapterLogger
from dbt.utils import DECIMALS
from dbt.adapters.spark import __version__

Expand Down Expand Up @@ -42,6 +42,8 @@
import base64
import time

logger = AdapterLogger("Spark")

NUMBERS = DECIMALS + (int, float)


Expand Down
4 changes: 3 additions & 1 deletion dbt/adapters/spark/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
from dbt.adapters.spark import SparkColumn
from dbt.adapters.base import BaseRelation
from dbt.clients.agate_helper import DEFAULT_TYPE_TESTER
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.events import AdapterLogger
from dbt.utils import executor

logger = AdapterLogger("Spark")

GET_COLUMNS_IN_RELATION_MACRO_NAME = 'get_columns_in_relation'
LIST_SCHEMAS_MACRO_NAME = 'list_schemas'
LIST_RELATIONS_MACRO_NAME = 'list_relations_without_caching'
Expand Down
17 changes: 10 additions & 7 deletions tests/integration/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,15 @@
from dbt.clients.jinja import template_cache
from dbt.config import RuntimeConfig
from dbt.context import providers
from dbt.logger import GLOBAL_LOGGER as logger, log_manager
from dbt.logger import log_manager
from dbt.events.functions import (
capture_stdout_logs, fire_event, setup_event_logger, stop_capture_stdout_logs
)
from dbt.events import AdapterLogger
from dbt.contracts.graph.manifest import Manifest

logger = AdapterLogger("Spark")

INITIAL_ROOT = os.getcwd()


Expand Down Expand Up @@ -269,6 +275,7 @@ def setUp(self):
os.chdir(self.initial_dir)
# before we go anywhere, collect the initial path info
self._logs_dir = os.path.join(self.initial_dir, 'logs', self.prefix)
setup_event_logger(self._logs_dir)
_really_makedirs(self._logs_dir)
self.test_original_source_path = _pytest_get_test_root()
self.test_root_dir = self._generate_test_root_dir()
Expand Down Expand Up @@ -439,16 +446,12 @@ def run_dbt(self, args=None, expect_pass=True, profiles_dir=True):

def run_dbt_and_capture(self, *args, **kwargs):
try:
initial_stdout = log_manager.stdout
initial_stderr = log_manager.stderr
stringbuf = io.StringIO()
log_manager.set_output_stream(stringbuf)

stringbuf = capture_stdout_logs()
res = self.run_dbt(*args, **kwargs)
stdout = stringbuf.getvalue()

finally:
log_manager.set_output_stream(initial_stdout, initial_stderr)
stop_capture_stdout_logs()

return res, stdout

Expand Down

0 comments on commit 0fae2da

Please sign in to comment.