Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DBMON-2435] serialize payload with type bytes in json dumps #15763

Merged
merged 11 commits into from
Sep 8, 2023
4 changes: 4 additions & 0 deletions datadog_checks_base/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@
* Upgrade clickhouse-driver to 0.2.6 on Python 3 ([#15726](https://github.com/DataDog/integrations-core/pull/15726))
* Upgrade lz4 to 4.3.2 on Python 3 ([#15746](https://github.com/DataDog/integrations-core/pull/15746))

***Fixed***:

* Fix type `bytes` is not JSON serializable for dbm events ([#15763](https://github.com/DataDog/integrations-core/pull/15763))

## 33.1.0 / 2023-08-25

***Security***:
Expand Down
2 changes: 2 additions & 0 deletions datadog_checks_base/datadog_checks/base/utils/db/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,8 @@ def default_json_event_encoding(o):
return o.isoformat()
if isinstance(o, IPv4Address):
return str(o)
if isinstance(o, bytes):
return o.decode('utf-8')
raise TypeError


Expand Down
22 changes: 22 additions & 0 deletions datadog_checks_base/tests/base/utils/db/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
# (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import datetime
import decimal
import time
from concurrent.futures.thread import ThreadPoolExecutor
from ipaddress import IPv4Address

import mock
import pytest
Expand All @@ -14,6 +17,7 @@
ConstantRateLimiter,
DBMAsyncJob,
RateLimitingTTLCache,
default_json_event_encoding,
obfuscate_sql_with_metadata,
resolve_db_host,
)
Expand Down Expand Up @@ -254,3 +258,21 @@ def test_dbm_async_job_inactive_stop(aggregator):
job.run_job_loop([])
job._job_loop_future.result()
aggregator.assert_metric("dd.test-dbms.async_job.inactive_stop", tags=['job:test-job'])


@pytest.mark.parametrize(
"input",
[
pytest.param({"foo": "bar"}, id='dict'),
pytest.param({"foo": "bar", "baz": 1}, id='dict-with-multiple-keys'),
pytest.param({"foo": "bar", "baz": 1, "qux": {"quux": "corge"}}, id='nested-dict'),
pytest.param({"foo": b'bar'}, id='dict-with-bytes'),
pytest.param({"foo": decimal.Decimal("1.0")}, id='dict-with-decimal'),
pytest.param({"foo": datetime.datetime(2020, 1, 1, 0, 0, 0)}, id='dict-with-datetime'),
pytest.param({"foo": datetime.date(2020, 1, 1)}, id='dict-with-date'),
pytest.param({"foo": IPv4Address(u"192.168.1.1")}, id='dict-with-IPv4Address'),
],
)
def test_default_json_event_encoding(input):
# assert that the default json event encoding can handle all defined types without raising TypeError
assert json.dumps(input, default=default_json_event_encoding)
1 change: 1 addition & 0 deletions sqlserver/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

***Fixed***:

* Fix type `bytes` is not JSON serializable for dbm events ([#15763](https://github.com/DataDog/integrations-core/pull/15763))
* Fix sqlserver file stats metrics for Azure SQL DB ([#15695](https://github.com/DataDog/integrations-core/pull/15695))

## 14.0.0 / 2023-08-18
Expand Down
5 changes: 2 additions & 3 deletions sqlserver/datadog_checks/sqlserver/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import time

from datadog_checks.base import is_affirmative
from datadog_checks.base.utils.common import to_native_string
from datadog_checks.base.utils.db.sql import compute_sql_signature
from datadog_checks.base.utils.db.utils import DBMAsyncJob, default_json_event_encoding, obfuscate_sql_with_metadata
from datadog_checks.base.utils.serialization import json
Expand Down Expand Up @@ -135,8 +134,8 @@
]


def _hash_to_hex(hash):
return to_native_string(binascii.hexlify(hash))
def _hash_to_hex(hash) -> str:
return binascii.hexlify(hash).decode("utf-8")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to_native_string converts bytes b'xxx' to "b'xxx'". The correct str representation of the hash should be properly decoded.



def agent_check_getter(self):
Expand Down
20 changes: 19 additions & 1 deletion sqlserver/tests/test_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from datadog_checks.base.utils.db.utils import DBMAsyncJob, default_json_event_encoding
from datadog_checks.sqlserver import SQLServer
from datadog_checks.sqlserver.activity import DM_EXEC_REQUESTS_COLS
from datadog_checks.sqlserver.activity import DM_EXEC_REQUESTS_COLS, _hash_to_hex
from datadog_checks.sqlserver.utils import extract_sql_comments, is_statement_proc

from .common import CHECK_NAME
Expand Down Expand Up @@ -343,6 +343,11 @@ def run_queries(conn, queries):
assert tx3["query_hash"]
assert tx3["query_plan_hash"]

assert isinstance(tx2["query_hash"], str)
assert isinstance(tx2["query_plan_hash"], str)
assert isinstance(tx3["query_hash"], str)
assert isinstance(tx3["query_plan_hash"], str)

for t in [t1, t2, t3]:
t.join()

Expand Down Expand Up @@ -868,6 +873,19 @@ def test_async_job_enabled(dd_run_check, dbm_instance, activity_enabled):
assert check.activity._job_loop_future is None


@pytest.mark.parametrize(
"input,expected",
[
(b'0xBA61D813C4878164', '307842413631443831334334383738313634'),
(b'0x0000000000000000', '307830303030303030303030303030303030'),
],
)
def test_hash_to_hex(input, expected):
output = _hash_to_hex(input)
assert output == expected
assert type(output) == str


@pytest.mark.integration
@pytest.mark.usefixtures('dd_environment')
def test_async_job_inactive_stop(aggregator, dd_run_check, dbm_instance):
Expand Down
Loading