Skip to content

Commit

Permalink
Merge branch '1.x' into yunkim/gunicorn-test-scenarios
Browse files Browse the repository at this point in the history
  • Loading branch information
Yun-Kim authored Jan 3, 2023
2 parents f17eb5e + 6de9b70 commit 749dfac
Show file tree
Hide file tree
Showing 44 changed files with 715 additions and 351 deletions.
30 changes: 2 additions & 28 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -191,20 +191,9 @@ commands:
parameters:
pattern:
type: string
wait:
type: string
default: ""
steps:
- checkout
- restore_tox_cache
- when:
condition:
<< parameters.wait >>
steps:
- setup_riot
- run:
name: "Waiting for << parameters.wait >>"
command: riot -v run 'wait' << parameters.wait >>
- start_docker_services:
env: SNAPSHOT_CI=1
services: memcached redis testagent
Expand All @@ -220,24 +209,13 @@ commands:
parameters:
pattern:
type: string
wait:
type: string
default: ""
store_coverage:
type: boolean
default: true
steps:
- checkout
- setup_tox
- restore_tox_cache
- when:
condition:
<< parameters.wait >>
steps:
- setup_riot
- run:
name: "Waiting for << parameters.wait >>"
command: riot -v run 'wait' << parameters.wait >>
- run:
name: "Run scripts/run-tox-scenario"
command: scripts/run-tox-scenario '<< parameters.pattern >>'
Expand Down Expand Up @@ -1076,9 +1054,9 @@ jobs:
- VP_TEST_PASSWORD=abc123
- VP_TEST_DATABASE=docker
steps:
- run_tox_scenario:
- run_test:
wait: vertica
pattern: '^vertica_contrib-'
pattern: 'vertica'

wsgi:
<<: *machine_executor
Expand Down Expand Up @@ -1336,7 +1314,3 @@ workflows:
# - profile-windows-311: *requires_pre_check
# Final reports
- coverage_report: *requires_tests

test_latest:
<<: *workflow_test

2 changes: 1 addition & 1 deletion .github/workflows/build_deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ jobs:
platforms: all

- name: Build wheels python 3.6 and above
uses: pypa/[email protected].3
uses: pypa/[email protected].4
env:
# configure cibuildwheel to build native archs ('auto'), and some
# emulated ones
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v6
- uses: actions/stale@v7
with:
# Increase the total API operations from 30 to 200
# DEV: GitHub Actions have an API rate limit of 1000 operations per hour per repository
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test_frameworks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ jobs:
repository: tiangolo/fastapi
ref: 0.75.0
path: fastapi
- uses: actions/cache@v3.0.11
- uses: actions/cache@v3.2.2
id: cache
with:
path: ${{ env.pythonLocation }}
Expand Down
7 changes: 5 additions & 2 deletions ddtrace/appsec/_remoteconfiguration.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,11 @@
log = get_logger(__name__)


def enable_appsec_rc(tracer):
# type: (Tracer) -> None
def enable_appsec_rc():
# type: () -> None
# Import tracer here to avoid a circular import
from ddtrace import tracer

if _appsec_rc_features_is_enabled():
RemoteConfig.register(ASM_FEATURES_PRODUCT, appsec_rc_reload_features(tracer))

Expand Down
2 changes: 1 addition & 1 deletion ddtrace/appsec/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

def _appsec_rc_features_is_enabled():
# type: () -> bool
if asbool(os.environ.get("DD_REMOTE_CONFIGURATION_ENABLED", "false")):
if asbool(os.environ.get("DD_REMOTE_CONFIGURATION_ENABLED", "true")):
return APPSEC_ENV not in os.environ
return False

Expand Down
5 changes: 0 additions & 5 deletions ddtrace/bootstrap/sitecustomize.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@


from ddtrace import config # noqa
from ddtrace import constants
from ddtrace.debugging._config import config as debugger_config
from ddtrace.internal.logger import get_logger # noqa
from ddtrace.internal.runtime.runtime_metrics import RuntimeWorker
Expand Down Expand Up @@ -114,10 +113,6 @@ def update_patched_modules():

patch_all(**EXTRA_PATCHED_MODULES)

dd_env = os.getenv("DD_ENV")
if dd_env:
tracer.set_tags({constants.ENV_KEY: dd_env})

if "DD_TRACE_GLOBAL_TAGS" in os.environ:
env_tags = os.getenv("DD_TRACE_GLOBAL_TAGS")
tracer.set_tags(parse_tags_str(env_tags))
Expand Down
25 changes: 21 additions & 4 deletions ddtrace/contrib/botocore/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple

import botocore.client
import botocore.exceptions
Expand Down Expand Up @@ -45,6 +46,8 @@
MAX_KINESIS_DATA_SIZE = 1 << 20 # 1MB
MAX_EVENTBRIDGE_DETAIL_SIZE = 1 << 18 # 256KB

LINE_BREAK = "\n"

log = get_logger(__name__)


Expand Down Expand Up @@ -176,8 +179,17 @@ def inject_trace_to_eventbridge_detail(params, span):
entry["Detail"] = detail_json


def get_json_from_str(data_str):
# type: (str) -> Tuple[str, Optional[Dict[str, Any]]]
data_obj = json.loads(data_str)

if data_str.endswith(LINE_BREAK):
return LINE_BREAK, data_obj
return "", data_obj


def get_kinesis_data_object(data):
# type: (str) -> Optional[Dict[str, Any]]
# type: (str) -> Tuple[str, Optional[Dict[str, Any]]]
"""
:data: the data from a kinesis stream
Expand All @@ -190,13 +202,14 @@ def get_kinesis_data_object(data):

# check if data is a json string
try:
return json.loads(data)
return get_json_from_str(data)
except ValueError:
pass

# check if data is a base64 encoded json string
try:
return json.loads(base64.b64decode(data).decode("ascii"))
data_str = base64.b64decode(data).decode("ascii")
return get_json_from_str(data_str)
except ValueError:
raise TraceInjectionDecodingError("Unable to parse kinesis streams data string")

Expand All @@ -216,11 +229,15 @@ def inject_trace_to_kinesis_stream_data(record, span):
return

data = record["Data"]
data_obj = get_kinesis_data_object(data)
line_break, data_obj = get_kinesis_data_object(data)
data_obj["_datadog"] = {}
HTTPPropagator.inject(span.context, data_obj["_datadog"])
data_json = json.dumps(data_obj)

# if original string had a line break, add it back
if line_break:
data_json += line_break

# check if data size will exceed max size with headers
data_size = len(data_json)
if data_size >= MAX_KINESIS_DATA_SIZE:
Expand Down
2 changes: 0 additions & 2 deletions ddtrace/debugging/_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,3 @@
log = get_logger(__name__)

config = DynamicInstrumentationConfig()

log.debug("Dynamic instrumentation configuration: %r", config.__dict__)
17 changes: 13 additions & 4 deletions ddtrace/debugging/_debugger.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,12 +151,17 @@ class Debugger(Service):
@classmethod
def enable(cls, run_module=False):
# type: (bool) -> None
"""Enable the debugger (idempotent)."""
"""Enable dynamic instrumentation
This class method is idempotent. Dynamic instrumentation will be
disabled automatically at exit.
"""
if sys.version_info >= (3, 11, 0):
raise RuntimeError(
"Dynamic Instrumentation is not yet compatible with Python 3.11. "
"See tracking issue for more details: https://github.com/DataDog/dd-trace-py/issues/4149"
)

if cls._instance is not None:
log.debug("%s already enabled", cls.__name__)
return
Expand All @@ -180,7 +185,11 @@ def enable(cls, run_module=False):
@classmethod
def disable(cls):
# type: () -> None
"""Disable the debugger (idempotent)."""
"""Disable dynamic instrumentation.
This class method is idempotent. Called automatically at exit, if
dynamic instrumentation was enabled.
"""
if cls._instance is None:
log.debug("%s not enabled", cls.__name__)
return
Expand Down Expand Up @@ -211,7 +220,7 @@ def __init__(self, tracer=None):
Snapshot: SnapshotJsonEncoder(service_name),
str: str,
},
on_full=self.on_encoder_buffer_full,
on_full=self._on_encoder_buffer_full,
)
self._probe_registry = ProbeRegistry(self.__logger__(service_name, self._encoder))
self._uploader = self.__uploader__(self._encoder)
Expand All @@ -233,7 +242,7 @@ def __init__(self, tracer=None):

log.debug("%s initialized (service name: %s)", self.__class__.__name__, service_name)

def on_encoder_buffer_full(self, item, encoded):
def _on_encoder_buffer_full(self, item, encoded):
# type (Any, bytes) -> None
# Send upload request
self._uploader.upload()
Expand Down
19 changes: 17 additions & 2 deletions ddtrace/internal/agent.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import socket
from typing import TypeVar
from typing import Union

Expand All @@ -22,14 +23,28 @@
T = TypeVar("T")


# This method returns if a hostname is an IPv6 address
def is_ipv6_hostname(hostname):
# type: (Union[T, str]) -> bool
if not isinstance(hostname, str):
return False
try:
socket.inet_pton(socket.AF_INET6, hostname)
return True
except socket.error: # not a valid address
return False


def get_trace_hostname(default=DEFAULT_HOSTNAME):
# type: (Union[T, str]) -> Union[T, str]
return os.environ.get("DD_AGENT_HOST", os.environ.get("DD_TRACE_AGENT_HOSTNAME", default))
hostname = os.environ.get("DD_AGENT_HOST", os.environ.get("DD_TRACE_AGENT_HOSTNAME", default))
return "[{}]".format(hostname) if is_ipv6_hostname(hostname) else hostname


def get_stats_hostname(default=DEFAULT_HOSTNAME):
# type: (Union[T, str]) -> Union[T, str]
return os.environ.get("DD_AGENT_HOST", os.environ.get("DD_DOGSTATSD_HOST", default))
hostname = os.environ.get("DD_AGENT_HOST", os.environ.get("DD_DOGSTATSD_HOST", default))
return "[{}]".format(hostname) if is_ipv6_hostname(hostname) else hostname


def get_trace_port(default=DEFAULT_TRACE_PORT):
Expand Down
25 changes: 21 additions & 4 deletions ddtrace/internal/telemetry/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,28 @@ def _get_container_id():
def _get_os_version():
# type: () -> str
"""Returns the os version for applications running on Unix, Mac or Windows 32-bit"""
mver, _, _ = platform.mac_ver()
_, wver, _, _ = platform.win32_ver()
_, lver = platform.libc_ver()
try:
mver, _, _ = platform.mac_ver()
if mver:
return mver

_, wver, _, _ = platform.win32_ver()
if wver:
return wver

# This is the call which is more likely to fail
#
# https://docs.python.org/3/library/platform.html#unix-platforms
# Note that this function has intimate knowledge of how different libc versions add symbols
# to the executable is probably only usable for executables compiled using gcc.
_, lver = platform.libc_ver()
if lver:
return lver
except OSError:
# We were unable to lookup the proper version
pass

return mver or wver or lver or ""
return ""


@cached()
Expand Down
2 changes: 1 addition & 1 deletion ddtrace/internal/utils/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def w3c_get_dd_list_member(context):
for k, v in context._meta.items():
if (
isinstance(k, six.string_types)
and k.startswith("_dd.p")
and k.startswith("_dd.p.")
# we've already added sampling decision and user id
and k not in [SAMPLING_DECISION_TRACE_TAG_KEY, USER_ID_KEY]
):
Expand Down
24 changes: 23 additions & 1 deletion ddtrace/internal/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import tenacity

import ddtrace
from ddtrace.appsec._remoteconfiguration import enable_appsec_rc
from ddtrace.vendor.dogstatsd import DogStatsd

from . import agent
Expand Down Expand Up @@ -278,9 +279,26 @@ def __init__(
if headers:
self._headers.update(headers)
self._timeout = timeout

# Default to v0.4 if we are on Windows since there is a known compatibility issue
# https://github.com/DataDog/dd-trace-py/issues/4829
# DEV: sys.platform on windows should be `win32` or `cygwin`, but using `startswith`
# as a safety precaution.
# https://docs.python.org/3/library/sys.html#sys.platform
is_windows = sys.platform.startswith("win") or sys.platform.startswith("cygwin")
default_api_version = "v0.4" if is_windows else "v0.5"

self._api_version = (
api_version or os.getenv("DD_TRACE_API_VERSION") or ("v0.5" if priority_sampler is not None else "v0.3")
api_version
or os.getenv("DD_TRACE_API_VERSION")
or (default_api_version if priority_sampler is not None else "v0.3")
)
if is_windows and self._api_version == "v0.5":
raise RuntimeError(
"There is a known compatibiltiy issue with v0.5 API and Windows, "
"please see https://github.com/DataDog/dd-trace-py/issues/4829 for more details."
)

try:
Encoder = MSGPACK_ENCODERS[self._api_version]
except KeyError:
Expand Down Expand Up @@ -509,10 +527,14 @@ def write(self, spans=None):
try:
if self.status != service.ServiceStatus.RUNNING:
self.start()

# instrumentation telemetry writer should be enabled/started after the global tracer and configs
# are initialized
if asbool(os.getenv("DD_INSTRUMENTATION_TELEMETRY_ENABLED", True)):
telemetry_writer.enable()
# appsec remote config should be enabled/started after the global tracer and configs
# are initialized
enable_appsec_rc()
except service.ServiceStatusError:
pass

Expand Down
Loading

0 comments on commit 749dfac

Please sign in to comment.