diff --git a/.github/actions/node/latest/action.yml b/.github/actions/node/latest/action.yml index fd67f75fffe..74e5d531f94 100644 --- a/.github/actions/node/latest/action.yml +++ b/.github/actions/node/latest/action.yml @@ -4,4 +4,4 @@ runs: steps: - uses: actions/setup-node@v3 with: - node-version: '22.6' + node-version: 'latest' diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 57f8ad88cf2..19470023010 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -208,7 +208,8 @@ jobs: matrix: version: - 18 - - 22.6 + - latest + range: ['9.5.0', '11.1.4', '13.2.0', '14.2.6'] runs-on: ubuntu-latest env: PLUGINS: next diff --git a/.github/workflows/debugger.yml b/.github/workflows/debugger.yml new file mode 100644 index 00000000000..b9543148382 --- /dev/null +++ b/.github/workflows/debugger.yml @@ -0,0 +1,33 @@ +name: Debugger + +on: + pull_request: + push: + branches: [master] + schedule: + - cron: '0 4 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + ubuntu: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/setup + - uses: ./.github/actions/install + - uses: ./.github/actions/node/18 + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - uses: ./.github/actions/node/20 + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - uses: ./.github/actions/node/latest + - run: yarn test:debugger:ci + - run: yarn test:integration:debugger + - if: always() + uses: ./.github/actions/testagent/logs + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/package-size.yml b/.github/workflows/package-size.yml index b96163fe2a7..628614c7dc5 100644 --- a/.github/workflows/package-size.yml +++ b/.github/workflows/package-size.yml @@ -12,6 +12,8 @@ concurrency: jobs: package-size-report: runs-on: ubuntu-latest + permissions: + pull-requests: write steps: - uses: actions/checkout@v4 - name: Setup Node.js diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 73f21a45700..405bc562f0e 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -96,7 +96,7 @@ jobs: uses: ./.github/actions/testagent/logs - uses: codecov/codecov-action@v3 - amqp10: # TODO: move rhea to its own job + amqp10: runs-on: ubuntu-latest services: qpid: @@ -107,7 +107,7 @@ jobs: ports: - 5673:5672 env: - PLUGINS: amqp10|rhea + PLUGINS: amqp10 SERVICES: qpid DD_DATA_STREAMS_ENABLED: true steps: @@ -139,7 +139,7 @@ jobs: aws-sdk: strategy: matrix: - node-version: ['18', '22.6'] + node-version: ['18', 'latest'] runs-on: ubuntu-latest services: localstack: @@ -245,7 +245,11 @@ jobs: couchbase: strategy: matrix: - range: ['^2.6.12', '^3.0.7', '>=4.2.0'] + node-version: [16] + range: ['^2.6.12', '^3.0.7', '>=4.0.0 <4.2.0'] + include: + - node-version: 18 + range: '>=4.2.0' runs-on: ubuntu-latest services: couchbase: @@ -262,8 +266,11 @@ jobs: - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - uses: ./.github/actions/install - - uses: ./.github/actions/node/oldest - - run: yarn test:plugins:ci + - uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + - run: yarn config set ignore-engines true + - run: yarn test:plugins:ci --ignore-engines - uses: codecov/codecov-action@v3 connect: @@ -412,7 +419,7 @@ jobs: http: strategy: matrix: - node-version: ['18', '20', '22.6'] + node-version: ['18', '20', 'latest'] runs-on: ubuntu-latest env: PLUGINS: http @@ -648,7 +655,8 @@ jobs: matrix: version: - 18 - - 22.6 + - latest + range: ['9.5.0', '11.1.4', '13.2.0', '14.2.6'] runs-on: ubuntu-latest env: PLUGINS: next @@ -825,6 +833,24 @@ jobs: - uses: actions/checkout@v4 - uses: ./.github/actions/plugins/test + rhea: + runs-on: ubuntu-latest + services: + qpid: + image: scholzj/qpid-cpp:1.38.0 + env: + QPIDD_ADMIN_USERNAME: admin + QPIDD_ADMIN_PASSWORD: admin + ports: + - 5673:5672 + env: + PLUGINS: rhea + SERVICES: qpid + DD_DATA_STREAMS_ENABLED: true + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/plugins/test-and-upstream + router: runs-on: ubuntu-latest env: @@ -852,7 +878,7 @@ jobs: runs-on: ubuntu-latest services: mssql: - image: mcr.microsoft.com/mssql/server:2017-latest-ubuntu + image: mcr.microsoft.com/mssql/server:2019-latest env: ACCEPT_EULA: 'Y' SA_PASSWORD: DD_HUNTER2 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 3f18d7ea8ad..499f94b5071 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -18,7 +18,7 @@ jobs: # setting fail-fast to false in an attempt to prevent this from happening fail-fast: false matrix: - version: [18, 20, 22.6] + version: [18, 20, latest] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -47,7 +47,7 @@ jobs: integration-ci: strategy: matrix: - version: [18, 22.6] + version: [18, latest] framework: [cucumber, playwright, selenium, jest, mocha] runs-on: ubuntu-latest env: @@ -89,7 +89,7 @@ jobs: # Important: This is outside the minimum supported version of dd-trace-js # Node > 16 does not work with Cypress@6.7.0 (not even without our plugin) # TODO: figure out what to do with this: we might have to deprecate support for cypress@6.7.0 - version: [16, 22.6] + version: [16, latest] # 6.7.0 is the minimum version we support cypress-version: [6.7.0, latest] module-type: ['commonJS', 'esm'] diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 5542920b454..e1ce6f7d767 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -27,18 +27,12 @@ jobs: path: ./binaries/**/* get-essential-scenarios: - runs-on: ubuntu-latest - outputs: - scenario: ${{ steps.parse-yml.outputs.scenario }} - steps: - - name: Checkout system tests - uses: actions/checkout@v4 - with: - repository: 'DataDog/system-tests' - - name: Get Essential Scenarios - id: parse-yml - run: yq -o tsv .TRACER_ESSENTIAL_SCENARIOS ./scenario_groups.yml | xargs node -p "x=process.argv;x.shift();x.push('CROSSED_TRACING_LIBRARIES');'scenario='+JSON.stringify(x)" >> $GITHUB_OUTPUT - + name: Get parameters + uses: DataDog/system-tests/.github/workflows/compute-workflow-parameters.yml@main + with: + library: nodejs + scenarios: CROSSED_TRACING_LIBRARIES + scenarios_groups: essentials system-tests: runs-on: ${{ contains(fromJSON('["CROSSED_TRACING_LIBRARIES", "INTEGRATIONS"]'), matrix.scenario) && 'ubuntu-latest-16-cores' || 'ubuntu-latest' }} @@ -46,16 +40,17 @@ jobs: - get-essential-scenarios strategy: matrix: - weblog-variant: - - express4 - - express4-typescript - - nextjs - scenario: ${{fromJson(needs.get-essential-scenarios.outputs.scenario)}} + weblog-variant: ${{fromJson(needs.get-essential-scenarios.outputs.endtoend_weblogs)}} + scenario: ${{fromJson(needs.get-essential-scenarios.outputs.endtoend_scenarios)}} env: TEST_LIBRARY: nodejs WEBLOG_VARIANT: ${{ matrix.weblog-variant }} DD_API_KEY: ${{ secrets.DD_API_KEY }} + AWS_ACCESS_KEY_ID: ${{ secrets.IDM_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.IDM_AWS_SECRET_ACCESS_KEY }} + AWS_REGION: us-east-1 + AWS_DEFAULT_REGION: us-east-1 # AWS services should use `AWS_REGION`, but some still use the older `AWS_DEFAULT_REGION` steps: - name: Checkout system tests @@ -86,7 +81,7 @@ jobs: if: ${{ always() }} run: tar -czvf artifact.tar.gz $(ls | grep logs) - name: Upload artifact - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 if: ${{ always() }} with: name: logs_${{ matrix.weblog-variant }}-${{ matrix.scenario }} diff --git a/.vscode/launch.json b/.vscode/launch.json index a5d0c61d976..3df35f8cbc1 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,15 +8,11 @@ "type": "node", "request": "launch", "name": "Test Current File", - "runtimeExecutable": "yarn", - "runtimeArgs": [ - "tdd", - "${file}", - "--inspect-brk=9229" + "skipFiles": [ + "/**" ], - "port": 9229, - "console": "integratedTerminal", - "internalConsoleOptions": "neverOpen" - }, + "program": "${file}", + "console": "integratedTerminal" + } ] } diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 2ca4a2c7b59..00000000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "debug.node.autoAttach": "on" -} diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 7c31335db0a..45f88f66cb4 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -14,6 +14,7 @@ require,import-in-the-middle,Apache license 2.0,Copyright 2021 Datadog Inc. require,int64-buffer,MIT,Copyright 2015-2016 Yusuke Kawasaki require,istanbul-lib-coverage,BSD-3-Clause,Copyright 2012-2015 Yahoo! Inc. require,jest-docblock,MIT,Copyright Meta Platforms, Inc. and affiliates. +require,jsonpath-plus,MIT,Copyright (c) 2011-2019 Stefan Goessner, Subbu Allamaraju, Mike Brevoort, Robert Krahn, Brett Zamir, Richard Schneider require,koalas,MIT,Copyright 2013-2017 Brian Woodward require,limiter,MIT,Copyright 2011 John Hurliman require,lodash.sortby,MIT,Copyright JS Foundation and other contributors @@ -26,6 +27,7 @@ require,pprof-format,MIT,Copyright 2022 Stephen Belanger require,protobufjs,BSD-3-Clause,Copyright 2016 Daniel Wirtz require,tlhunter-sorted-set,MIT,Copyright (c) 2023 Datadog Inc. require,retry,MIT,Copyright 2011 Tim Koschützki Felix Geisendörfer +require,rfdc,MIT,Copyright 2019 David Mark Clements require,semver,ISC,Copyright Isaac Z. Schlueter and Contributors require,shell-quote,mit,Copyright (c) 2013 James Halliday dev,@types/node,MIT,Copyright Authors diff --git a/docs/test.ts b/docs/test.ts index 07b96a01673..e37177e0898 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -320,9 +320,6 @@ tracer.use('http', { tracer.use('http', { client: httpClientOptions }); -tracer.use('http', { - enablePropagationWithAmazonHeaders: true -}); tracer.use('http2'); tracer.use('http2', { server: http2ServerOptions diff --git a/index.d.ts b/index.d.ts index 6d2b495d5da..02c84fb47d3 100644 --- a/index.d.ts +++ b/index.d.ts @@ -729,6 +729,26 @@ declare namespace tracer { * The selection and priority order of context propagation injection and extraction mechanisms. */ propagationStyle?: string[] | PropagationStyle + + /** + * Cloud payload report as tags + */ + cloudPayloadTagging?: { + /** + * Additional JSONPath queries to replace with `redacted` in request payloads + * Undefined or invalid JSONPath queries disable the feature for requests. + */ + request?: string, + /** + * Additional JSONPath queries to replace with `redacted` in response payloads + * Undefined or invalid JSONPath queries disable the feature for responses. + */ + response?: string, + /** + * Maximum depth of payload traversal for tags + */ + maxDepth?: number + } } /** @@ -1010,14 +1030,6 @@ declare namespace tracer { * @default code => code < 500 */ validateStatus?: (code: number) => boolean; - - /** - * Enable injection of tracing headers into requests signed with AWS IAM headers. - * Disable this if you get AWS signature errors (HTTP 403). - * - * @default false - */ - enablePropagationWithAmazonHeaders?: boolean; } /** @hidden */ diff --git a/integration-tests/ci-visibility/features-flaky/support/steps.js b/integration-tests/ci-visibility/features-flaky/support/steps.js index 8a413576411..2e4a335cfb7 100644 --- a/integration-tests/ci-visibility/features-flaky/support/steps.js +++ b/integration-tests/ci-visibility/features-flaky/support/steps.js @@ -8,5 +8,11 @@ Then('I should have heard {string}', function (expectedResponse) { }) When('the greeter says flaky', function () { - this.whatIHeard = globalCounter++ % 2 === 0 ? 'flaky' : 'not flaky' + // It's important that the first time this fails. The reason is the following: + // In `getWrappedRunTestCase` we were returning the first result from + // `runTestCaseFunction`, so if the first time it passed, the EFD logic was + // not kicking in. By making it fail, `runTestCaseResult` is false (fail), + // and the EFD logic is tested correctly, i.e. the test passes as long as a single + // attempt has passed. + this.whatIHeard = globalCounter++ % 2 === 1 ? 'flaky' : 'not flaky' }) diff --git a/integration-tests/ci-visibility/test-api-manual/test.fake.js b/integration-tests/ci-visibility/test-api-manual/test.fake.js index 11f35dd8e87..a3256bc6f42 100644 --- a/integration-tests/ci-visibility/test-api-manual/test.fake.js +++ b/integration-tests/ci-visibility/test-api-manual/test.fake.js @@ -31,7 +31,7 @@ describe('can run tests', () => { }) test('integration test', () => { // Just for testing purposes, so we don't create a custom span - if (!process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED) { + if (process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED === 'false') { return Promise.resolve() } const testSpan = tracer.scope().active() diff --git a/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs b/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs new file mode 100644 index 00000000000..a85036dac8e --- /dev/null +++ b/integration-tests/ci-visibility/vitest-tests/early-flake-detection.mjs @@ -0,0 +1,33 @@ +import { describe, test, expect } from 'vitest' +import { sum } from './sum' + +let numAttempt = 0 +let numOtherAttempt = 0 + +describe('early flake detection', () => { + test('can retry tests that eventually pass', { repeats: process.env.SHOULD_REPEAT && 2 }, () => { + expect(sum(1, 2)).to.equal(numAttempt++ > 1 ? 3 : 4) + }) + + test('can retry tests that always pass', { repeats: process.env.SHOULD_REPEAT && 2 }, () => { + if (process.env.ALWAYS_FAIL) { + expect(sum(1, 2)).to.equal(4) + } else { + expect(sum(1, 2)).to.equal(3) + } + }) + + test('does not retry if it is not new', () => { + expect(sum(1, 2)).to.equal(3) + }) + + test.skip('does not retry if the test is skipped', () => { + expect(sum(1, 2)).to.equal(3) + }) + + if (process.env.SHOULD_ADD_EVENTUALLY_FAIL) { + test('can retry tests that eventually fail', () => { + expect(sum(1, 2)).to.equal(numOtherAttempt++ < 3 ? 3 : 4) + }) + } +}) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index c6446bcbed6..35c4b3b2060 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -27,355 +27,432 @@ const { TEST_ITR_FORCED_RUN, TEST_ITR_UNSKIPPABLE, TEST_SOURCE_FILE, + TEST_SOURCE_START, TEST_EARLY_FLAKE_ENABLED, + TEST_EARLY_FLAKE_ABORT_REASON, TEST_IS_NEW, TEST_IS_RETRY, TEST_NAME, CUCUMBER_IS_PARALLEL, TEST_SUITE, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const isOldNode = semver.satisfies(process.version, '<=16') const versions = ['7.0.0', isOldNode ? '9' : 'latest'] -const moduleType = [ - { - type: 'commonJS', - runTestsCommand: './node_modules/.bin/cucumber-js ci-visibility/features/*.feature', - runTestsWithCoverageCommand: - './node_modules/nyc/bin/nyc.js -r=text-summary ' + - 'node ./node_modules/.bin/cucumber-js ci-visibility/features/*.feature', - parallelModeCommand: './node_modules/.bin/cucumber-js ' + - 'ci-visibility/features/*.feature --parallel 2', - featuresPath: 'ci-visibility/features/', - fileExtension: 'js' - } -] +const runTestsCommand = './node_modules/.bin/cucumber-js ci-visibility/features/*.feature' +const runTestsWithCoverageCommand = './node_modules/nyc/bin/nyc.js -r=text-summary ' + + 'node ./node_modules/.bin/cucumber-js ci-visibility/features/*.feature' +const parallelModeCommand = './node_modules/.bin/cucumber-js ci-visibility/features/*.feature --parallel 2' +const featuresPath = 'ci-visibility/features/' +const fileExtension = 'js' versions.forEach(version => { - moduleType.forEach(({ - type, - runTestsCommand, - runTestsWithCoverageCommand, - parallelModeCommand, - featuresPath, - fileExtension - }) => { - // TODO: add esm tests - describe(`cucumber@${version} ${type}`, () => { - let sandbox, cwd, receiver, childProcess, testOutput - - before(async function () { - // add an explicit timeout to make tests less flaky - this.timeout(50000) - - sandbox = await createSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc'], true) - cwd = sandbox.folder - }) + // TODO: add esm tests + describe(`cucumber@${version} commonJS`, () => { + let sandbox, cwd, receiver, childProcess, testOutput - after(async function () { - // add an explicit timeout to make tests less flaky - this.timeout(50000) + before(async function () { + // add an explicit timeout to make tests less flaky + this.timeout(50000) - await sandbox.remove() - }) + sandbox = await createSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc'], true) + cwd = sandbox.folder + }) - beforeEach(async function () { - const port = await getPort() - receiver = await new FakeCiVisIntake(port).start() - }) + after(async function () { + // add an explicit timeout to make tests less flaky + this.timeout(50000) - afterEach(async () => { - testOutput = '' - childProcess.kill() - await receiver.stop() - }) + await sandbox.remove() + }) - const reportMethods = ['agentless', 'evp proxy'] + beforeEach(async function () { + const port = await getPort() + receiver = await new FakeCiVisIntake(port).start() + }) - reportMethods.forEach((reportMethod) => { - context(`reporting via ${reportMethod}`, () => { - let envVars, isAgentless - beforeEach(() => { - isAgentless = reportMethod === 'agentless' - envVars = isAgentless ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) - }) - const runModes = ['serial'] + afterEach(async () => { + testOutput = '' + childProcess.kill() + await receiver.stop() + }) - if (version !== '7.0.0') { // only on latest or 9 if node is old - runModes.push('parallel') - } + const reportMethods = ['agentless', 'evp proxy'] - runModes.forEach((runMode) => { - it(`(${runMode}) can run and report tests`, (done) => { - const runCommand = runMode === 'parallel' ? parallelModeCommand : runTestsCommand + reportMethods.forEach((reportMethod) => { + context(`reporting via ${reportMethod}`, () => { + let envVars, isAgentless + beforeEach(() => { + isAgentless = reportMethod === 'agentless' + envVars = isAgentless ? getCiVisAgentlessConfig(receiver.port) : getCiVisEvpProxyConfig(receiver.port) + }) + const runModes = ['serial'] + + if (version !== '7.0.0') { // only on latest or 9 if node is old + runModes.push('parallel') + } + + runModes.forEach((runMode) => { + it(`(${runMode}) can run and report tests`, (done) => { + const runCommand = runMode === 'parallel' ? parallelModeCommand : runTestsCommand + + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) - const receiverPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) + const events = payloads.flatMap(({ payload }) => payload.events) - const testSessionEvent = events.find(event => event.type === 'test_session_end') - const testModuleEvent = events.find(event => event.type === 'test_module_end') - const testSuiteEvents = events.filter(event => event.type === 'test_suite_end') - const testEvents = events.filter(event => event.type === 'test') + const testSessionEvent = events.find(event => event.type === 'test_session_end') + const testModuleEvent = events.find(event => event.type === 'test_module_end') + const testSuiteEvents = events.filter(event => event.type === 'test_suite_end') + const testEvents = events.filter(event => event.type === 'test') - const stepEvents = events.filter(event => event.type === 'span') + const stepEvents = events.filter(event => event.type === 'span') - const { content: testSessionEventContent } = testSessionEvent - const { content: testModuleEventContent } = testModuleEvent + const { content: testSessionEventContent } = testSessionEvent + const { content: testModuleEventContent } = testModuleEvent - if (runMode === 'parallel') { - assert.equal(testSessionEventContent.meta[CUCUMBER_IS_PARALLEL], 'true') - } + if (runMode === 'parallel') { + assert.equal(testSessionEventContent.meta[CUCUMBER_IS_PARALLEL], 'true') + } - assert.exists(testSessionEventContent.test_session_id) - assert.exists(testSessionEventContent.meta[TEST_COMMAND]) - assert.exists(testSessionEventContent.meta[TEST_TOOLCHAIN]) - assert.equal(testSessionEventContent.resource.startsWith('test_session.'), true) - assert.equal(testSessionEventContent.meta[TEST_STATUS], 'fail') - - assert.exists(testModuleEventContent.test_session_id) - assert.exists(testModuleEventContent.test_module_id) - assert.exists(testModuleEventContent.meta[TEST_COMMAND]) - assert.exists(testModuleEventContent.meta[TEST_MODULE]) - assert.equal(testModuleEventContent.resource.startsWith('test_module.'), true) - assert.equal(testModuleEventContent.meta[TEST_STATUS], 'fail') - assert.equal( - testModuleEventContent.test_session_id.toString(10), - testSessionEventContent.test_session_id.toString(10) - ) + assert.exists(testSessionEventContent.test_session_id) + assert.exists(testSessionEventContent.meta[TEST_COMMAND]) + assert.exists(testSessionEventContent.meta[TEST_TOOLCHAIN]) + assert.equal(testSessionEventContent.resource.startsWith('test_session.'), true) + assert.equal(testSessionEventContent.meta[TEST_STATUS], 'fail') + + assert.exists(testModuleEventContent.test_session_id) + assert.exists(testModuleEventContent.test_module_id) + assert.exists(testModuleEventContent.meta[TEST_COMMAND]) + assert.exists(testModuleEventContent.meta[TEST_MODULE]) + assert.equal(testModuleEventContent.resource.startsWith('test_module.'), true) + assert.equal(testModuleEventContent.meta[TEST_STATUS], 'fail') + assert.equal( + testModuleEventContent.test_session_id.toString(10), + testSessionEventContent.test_session_id.toString(10) + ) - assert.includeMembers(testSuiteEvents.map(suite => suite.content.resource), [ - `test_suite.${featuresPath}farewell.feature`, - `test_suite.${featuresPath}greetings.feature` - ]) - assert.includeMembers(testSuiteEvents.map(suite => suite.content.meta[TEST_STATUS]), [ - 'pass', - 'fail' - ]) - - testSuiteEvents.forEach(({ - content: { - meta, - test_suite_id: testSuiteId, - test_module_id: testModuleId, - test_session_id: testSessionId - } - }) => { - assert.exists(meta[TEST_COMMAND]) - assert.exists(meta[TEST_MODULE]) - assert.exists(testSuiteId) - assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) - assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) - }) + assert.includeMembers(testSuiteEvents.map(suite => suite.content.resource), [ + `test_suite.${featuresPath}farewell.feature`, + `test_suite.${featuresPath}greetings.feature` + ]) + assert.includeMembers(testSuiteEvents.map(suite => suite.content.meta[TEST_STATUS]), [ + 'pass', + 'fail' + ]) - assert.includeMembers(testEvents.map(test => test.content.resource), [ - `${featuresPath}farewell.feature.Say farewell`, - `${featuresPath}greetings.feature.Say greetings`, - `${featuresPath}greetings.feature.Say yeah`, - `${featuresPath}greetings.feature.Say yo`, - `${featuresPath}greetings.feature.Say skip` - ]) - assert.includeMembers(testEvents.map(test => test.content.meta[TEST_STATUS]), [ - 'pass', - 'pass', - 'pass', - 'fail', - 'skip' - ]) - - testEvents.forEach(({ - content: { - meta, - test_suite_id: testSuiteId, - test_module_id: testModuleId, - test_session_id: testSessionId - } - }) => { - assert.exists(meta[TEST_COMMAND]) - assert.exists(meta[TEST_MODULE]) - assert.exists(testSuiteId) - assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) - assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) - assert.equal(meta[TEST_SOURCE_FILE].startsWith('ci-visibility/features'), true) - // Can read DD_TAGS - assert.propertyVal(meta, 'test.customtag', 'customvalue') - assert.propertyVal(meta, 'test.customtag2', 'customvalue2') - if (runMode === 'parallel') { - assert.propertyVal(meta, CUCUMBER_IS_PARALLEL, 'true') - } - }) + testSuiteEvents.forEach(({ + content: { + meta, + metrics, + test_suite_id: testSuiteId, + test_module_id: testModuleId, + test_session_id: testSessionId + } + }) => { + assert.exists(meta[TEST_COMMAND]) + assert.exists(meta[TEST_MODULE]) + assert.exists(testSuiteId) + assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) + assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + assert.isTrue(meta[TEST_SOURCE_FILE].startsWith(featuresPath)) + assert.equal(metrics[TEST_SOURCE_START], 1) + assert.exists(metrics[DD_HOST_CPU_COUNT]) + }) - stepEvents.forEach(stepEvent => { - assert.equal(stepEvent.content.name, 'cucumber.step') - assert.property(stepEvent.content.meta, 'cucumber.step') - }) - }, 5000) + assert.includeMembers(testEvents.map(test => test.content.resource), [ + `${featuresPath}farewell.feature.Say farewell`, + `${featuresPath}greetings.feature.Say greetings`, + `${featuresPath}greetings.feature.Say yeah`, + `${featuresPath}greetings.feature.Say yo`, + `${featuresPath}greetings.feature.Say skip` + ]) + assert.includeMembers(testEvents.map(test => test.content.meta[TEST_STATUS]), [ + 'pass', + 'pass', + 'pass', + 'fail', + 'skip' + ]) - childProcess = exec( - runCommand, - { - cwd, - env: { - ...envVars, - DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' - }, - stdio: 'pipe' - } - ) + testEvents.forEach(({ + content: { + meta, + metrics, + test_suite_id: testSuiteId, + test_module_id: testModuleId, + test_session_id: testSessionId + } + }) => { + assert.exists(meta[TEST_COMMAND]) + assert.exists(meta[TEST_MODULE]) + assert.exists(testSuiteId) + assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) + assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + assert.equal(meta[TEST_SOURCE_FILE].startsWith('ci-visibility/features'), true) + // Can read DD_TAGS + assert.propertyVal(meta, 'test.customtag', 'customvalue') + assert.propertyVal(meta, 'test.customtag2', 'customvalue2') + if (runMode === 'parallel') { + assert.propertyVal(meta, CUCUMBER_IS_PARALLEL, 'true') + } + assert.exists(metrics[DD_HOST_CPU_COUNT]) + }) - childProcess.on('exit', () => { - receiverPromise.then(() => done()).catch(done) - }) + stepEvents.forEach(stepEvent => { + assert.equal(stepEvent.content.name, 'cucumber.step') + assert.property(stepEvent.content.meta, 'cucumber.step') + }) + }, 5000) + + childProcess = exec( + runCommand, + { + cwd, + env: { + ...envVars, + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', + DD_TEST_SESSION_NAME: 'my-test-session' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + receiverPromise.then(() => done()).catch(done) }) }) - context('intelligent test runner', () => { - it('can report git metadata', (done) => { - const searchCommitsRequestPromise = receiver.payloadReceived( - ({ url }) => url.endsWith('/api/v2/git/repository/search_commits') + }) + context('intelligent test runner', () => { + it('can report git metadata', (done) => { + const searchCommitsRequestPromise = receiver.payloadReceived( + ({ url }) => url.endsWith('/api/v2/git/repository/search_commits') + ) + const packfileRequestPromise = receiver + .payloadReceived(({ url }) => url.endsWith('/api/v2/git/repository/packfile')) + const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) + + Promise.all([ + searchCommitsRequestPromise, + packfileRequestPromise, + eventsRequestPromise + ]).then(([searchCommitRequest, packfileRequest, eventsRequest]) => { + if (isAgentless) { + assert.propertyVal(searchCommitRequest.headers, 'dd-api-key', '1') + assert.propertyVal(packfileRequest.headers, 'dd-api-key', '1') + } else { + assert.notProperty(searchCommitRequest.headers, 'dd-api-key') + assert.notProperty(packfileRequest.headers, 'dd-api-key') + } + + const eventTypes = eventsRequest.payload.events.map(event => event.type) + assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) + const numSuites = eventTypes.reduce( + (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 ) - const packfileRequestPromise = receiver - .payloadReceived(({ url }) => url.endsWith('/api/v2/git/repository/packfile')) - const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) + assert.equal(numSuites, 2) - Promise.all([ - searchCommitsRequestPromise, - packfileRequestPromise, - eventsRequestPromise - ]).then(([searchCommitRequest, packfileRequest, eventsRequest]) => { - if (isAgentless) { - assert.propertyVal(searchCommitRequest.headers, 'dd-api-key', '1') - assert.propertyVal(packfileRequest.headers, 'dd-api-key', '1') - } else { - assert.notProperty(searchCommitRequest.headers, 'dd-api-key') - assert.notProperty(packfileRequest.headers, 'dd-api-key') - } + done() + }).catch(done) - const eventTypes = eventsRequest.payload.events.map(event => event.type) - assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) - const numSuites = eventTypes.reduce( - (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 - ) - assert.equal(numSuites, 2) + childProcess = exec( + runTestsCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + }) + it('can report code coverage', (done) => { + const libraryConfigRequestPromise = receiver.payloadReceived( + ({ url }) => url.endsWith('/api/v2/libraries/tests/services/setting') + ) + const codeCovRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcov')) + const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) + + Promise.all([ + libraryConfigRequestPromise, + codeCovRequestPromise, + eventsRequestPromise + ]).then(([libraryConfigRequest, codeCovRequest, eventsRequest]) => { + const [coveragePayload] = codeCovRequest.payload + if (isAgentless) { + assert.propertyVal(libraryConfigRequest.headers, 'dd-api-key', '1') + assert.propertyVal(codeCovRequest.headers, 'dd-api-key', '1') + } else { + assert.notProperty(libraryConfigRequest.headers, 'dd-api-key') + assert.notProperty(codeCovRequest.headers, 'dd-api-key', '1') + } + + assert.propertyVal(coveragePayload, 'name', 'coverage1') + assert.propertyVal(coveragePayload, 'filename', 'coverage1.msgpack') + assert.propertyVal(coveragePayload, 'type', 'application/msgpack') + assert.include(coveragePayload.content, { + version: 2 + }) + const allCoverageFiles = codeCovRequest.payload + .flatMap(coverage => coverage.content.coverages) + .flatMap(file => file.files) + .map(file => file.filename) + + assert.includeMembers(allCoverageFiles, [ + `${featuresPath}support/steps.${fileExtension}`, + `${featuresPath}farewell.feature`, + `${featuresPath}greetings.feature` + ]) + // steps is twice because there are two suites using it + assert.equal( + allCoverageFiles.filter(file => file === `${featuresPath}support/steps.${fileExtension}`).length, + 2 + ) + assert.exists(coveragePayload.content.coverages[0].test_session_id) + assert.exists(coveragePayload.content.coverages[0].test_suite_id) + + const testSession = eventsRequest + .payload + .events + .find(event => event.type === 'test_session_end') + .content + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) + + const eventTypes = eventsRequest.payload.events.map(event => event.type) + assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) + const numSuites = eventTypes.reduce( + (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 + ) + assert.equal(numSuites, 2) + }).catch(done) - done() - }).catch(done) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.on('exit', () => { + // check that reported coverage is still the same + assert.include(testOutput, 'Lines : 100%') + done() + }) + }) + it('does not report code coverage if disabled by the API', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false + }) - childProcess = exec( - runTestsCommand, - { - cwd, - env: envVars, - stdio: 'pipe' + receiver.assertPayloadReceived(() => { + const error = new Error('it should not report code coverage') + done(error) + }, ({ url }) => url.endsWith('/api/v2/citestcov')).catch(() => {}) + + receiver.assertPayloadReceived(({ payload }) => { + const eventTypes = payload.events.map(event => event.type) + assert.includeMembers(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) + const testSession = payload.events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') + assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'false') + assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'false') + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) + const testModule = payload.events.find(event => event.type === 'test_module_end').content + assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') + assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'false') + assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'false') + }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + }) + it('can skip suites received by the intelligent test runner API and still reports code coverage', + (done) => { + receiver.setSuitesToSkip([{ + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` } - ) - }) - it('can report code coverage', (done) => { - const libraryConfigRequestPromise = receiver.payloadReceived( - ({ url }) => url.endsWith('/api/v2/libraries/tests/services/setting') - ) - const codeCovRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcov')) + }]) + + const skippableRequestPromise = receiver + .payloadReceived(({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) + const coverageRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcov')) const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) Promise.all([ - libraryConfigRequestPromise, - codeCovRequestPromise, + skippableRequestPromise, + coverageRequestPromise, eventsRequestPromise - ]).then(([libraryConfigRequest, codeCovRequest, eventsRequest]) => { - const [coveragePayload] = codeCovRequest.payload + ]).then(([skippableRequest, coverageRequest, eventsRequest]) => { + const [coveragePayload] = coverageRequest.payload if (isAgentless) { - assert.propertyVal(libraryConfigRequest.headers, 'dd-api-key', '1') - assert.propertyVal(codeCovRequest.headers, 'dd-api-key', '1') + assert.propertyVal(skippableRequest.headers, 'dd-api-key', '1') + assert.propertyVal(coverageRequest.headers, 'dd-api-key', '1') + assert.propertyVal(eventsRequest.headers, 'dd-api-key', '1') } else { - assert.notProperty(libraryConfigRequest.headers, 'dd-api-key') - assert.notProperty(codeCovRequest.headers, 'dd-api-key', '1') + assert.notProperty(skippableRequest.headers, 'dd-api-key', '1') + assert.notProperty(coverageRequest.headers, 'dd-api-key', '1') + assert.notProperty(eventsRequest.headers, 'dd-api-key', '1') } - assert.propertyVal(coveragePayload, 'name', 'coverage1') assert.propertyVal(coveragePayload, 'filename', 'coverage1.msgpack') assert.propertyVal(coveragePayload, 'type', 'application/msgpack') - assert.include(coveragePayload.content, { - version: 2 - }) - const allCoverageFiles = codeCovRequest.payload - .flatMap(coverage => coverage.content.coverages) - .flatMap(file => file.files) - .map(file => file.filename) - - assert.includeMembers(allCoverageFiles, [ - `${featuresPath}support/steps.${fileExtension}`, - `${featuresPath}farewell.feature`, - `${featuresPath}greetings.feature` - ]) - // steps is twice because there are two suites using it - assert.equal( - allCoverageFiles.filter(file => file === `${featuresPath}support/steps.${fileExtension}`).length, - 2 - ) - assert.exists(coveragePayload.content.coverages[0].test_session_id) - assert.exists(coveragePayload.content.coverages[0].test_suite_id) - - const testSession = eventsRequest - .payload - .events - .find(event => event.type === 'test_session_end') - .content - assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const eventTypes = eventsRequest.payload.events.map(event => event.type) + + const skippedSuite = eventsRequest.payload.events.find(event => + event.content.resource === `test_suite.${featuresPath}farewell.feature` + ).content + assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') + assert.propertyVal(skippedSuite.meta, TEST_SKIPPED_BY_ITR, 'true') + assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) const numSuites = eventTypes.reduce( (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 ) assert.equal(numSuites, 2) - }).catch(done) + const testSession = eventsRequest + .payload.events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'true') + assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_TYPE, 'suite') + assert.propertyVal(testSession.metrics, TEST_ITR_SKIPPING_COUNT, 1) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'pipe' - } - ) - childProcess.stdout.on('data', (chunk) => { - testOutput += chunk.toString() - }) - childProcess.stderr.on('data', (chunk) => { - testOutput += chunk.toString() - }) - childProcess.on('exit', () => { - // check that reported coverage is still the same - assert.include(testOutput, 'Lines : 100%') + const testModule = eventsRequest + .payload.events.find(event => event.type === 'test_module_end').content + assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'true') + assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_TYPE, 'suite') + assert.propertyVal(testModule.metrics, TEST_ITR_SKIPPING_COUNT, 1) done() - }) - }) - it('does not report code coverage if disabled by the API', (done) => { - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false - }) - - receiver.assertPayloadReceived(() => { - const error = new Error('it should not report code coverage') - done(error) - }, ({ url }) => url.endsWith('/api/v2/citestcov')).catch(() => {}) - - receiver.assertPayloadReceived(({ payload }) => { - const eventTypes = payload.events.map(event => event.type) - assert.includeMembers(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) - const testSession = payload.events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') - assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'false') - assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'false') - assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) - const testModule = payload.events.find(event => event.type === 'test_module_end').content - assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') - assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'false') - assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'false') - }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) + }).catch(done) childProcess = exec( runTestsWithCoverageCommand, @@ -386,692 +463,763 @@ versions.forEach(version => { } ) }) - it('can skip suites received by the intelligent test runner API and still reports code coverage', - (done) => { - receiver.setSuitesToSkip([{ - type: 'suite', - attributes: { - suite: `${featuresPath}farewell.feature` - } - }]) - - const skippableRequestPromise = receiver - .payloadReceived(({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) - const coverageRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcov')) - const eventsRequestPromise = receiver.payloadReceived(({ url }) => url.endsWith('/api/v2/citestcycle')) - - Promise.all([ - skippableRequestPromise, - coverageRequestPromise, - eventsRequestPromise - ]).then(([skippableRequest, coverageRequest, eventsRequest]) => { - const [coveragePayload] = coverageRequest.payload - if (isAgentless) { - assert.propertyVal(skippableRequest.headers, 'dd-api-key', '1') - assert.propertyVal(coverageRequest.headers, 'dd-api-key', '1') - assert.propertyVal(eventsRequest.headers, 'dd-api-key', '1') - } else { - assert.notProperty(skippableRequest.headers, 'dd-api-key', '1') - assert.notProperty(coverageRequest.headers, 'dd-api-key', '1') - assert.notProperty(eventsRequest.headers, 'dd-api-key', '1') - } - assert.propertyVal(coveragePayload, 'name', 'coverage1') - assert.propertyVal(coveragePayload, 'filename', 'coverage1.msgpack') - assert.propertyVal(coveragePayload, 'type', 'application/msgpack') + it('does not skip tests if git metadata upload fails', (done) => { + receiver.setSuitesToSkip([{ + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` + } + }]) + + receiver.setGitUploadStatus(404) + + receiver.assertPayloadReceived(() => { + const error = new Error('should not request skippable') + done(error) + }, ({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) + + receiver.assertPayloadReceived(({ payload }) => { + const eventTypes = payload.events.map(event => event.type) + // because they are not skipped + assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) + const numSuites = eventTypes.reduce( + (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 + ) + assert.equal(numSuites, 2) + const testSession = payload.events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') + assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'true') + const testModule = payload.events.find(event => event.type === 'test_module_end').content + assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') + assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'true') + }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + }) + it('does not skip tests if test skipping is disabled by the API', (done) => { + receiver.setSettings({ + itr_enabled: true, + code_coverage: true, + tests_skipping: false + }) - const eventTypes = eventsRequest.payload.events.map(event => event.type) + receiver.setSuitesToSkip([{ + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` + } + }]) + + receiver.assertPayloadReceived(() => { + const error = new Error('should not request skippable') + done(error) + }, ({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) + + receiver.assertPayloadReceived(({ payload }) => { + const eventTypes = payload.events.map(event => event.type) + // because they are not skipped + assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) + const numSuites = eventTypes.reduce( + (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 + ) + assert.equal(numSuites, 2) + }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + } + ) + }) + it('does not skip suites if suite is marked as unskippable', (done) => { + receiver.setSettings({ + itr_enabled: true, + code_coverage: true, + tests_skipping: true + }) - const skippedSuite = eventsRequest.payload.events.find(event => - event.content.resource === `test_suite.${featuresPath}farewell.feature` - ).content - assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') - assert.propertyVal(skippedSuite.meta, TEST_SKIPPED_BY_ITR, 'true') + receiver.setSuitesToSkip([ + { + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` + } + }, + { + type: 'suite', + attributes: { + suite: `${featuresPath}greetings.feature` + } + } + ]) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') + + assert.equal(suites.length, 2) + + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_FORCED_RUN, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_FORCED_RUN, 'true') + + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' + ).content + const forcedToRunSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' + ).content + + assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') + assert.notProperty(skippedSuite.meta, TEST_ITR_UNSKIPPABLE) + assert.notProperty(skippedSuite.meta, TEST_ITR_FORCED_RUN) + + assert.propertyVal(forcedToRunSuite.meta, TEST_STATUS, 'fail') + assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_FORCED_RUN, 'true') + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) - assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) - const numSuites = eventTypes.reduce( - (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 - ) - assert.equal(numSuites, 2) - const testSession = eventsRequest - .payload.events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'true') - assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'true') - assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_TYPE, 'suite') - assert.propertyVal(testSession.metrics, TEST_ITR_SKIPPING_COUNT, 1) - - const testModule = eventsRequest - .payload.events.find(event => event.type === 'test_module_end').content - assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'true') - assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'true') - assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_TYPE, 'suite') - assert.propertyVal(testModule.metrics, TEST_ITR_SKIPPING_COUNT, 1) - done() - }).catch(done) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('only sets forced to run if suite was going to be skipped by ITR', (done) => { + receiver.setSettings({ + itr_enabled: true, + code_coverage: true, + tests_skipping: true + }) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' - } - ) - }) - it('does not skip tests if git metadata upload fails', (done) => { - receiver.setSuitesToSkip([{ + receiver.setSuitesToSkip([ + { type: 'suite', attributes: { suite: `${featuresPath}farewell.feature` } - }]) + } + ]) - receiver.setGitUploadStatus(404) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') - receiver.assertPayloadReceived(() => { - const error = new Error('should not request skippable') - done(error) - }, ({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) + assert.equal(suites.length, 2) - receiver.assertPayloadReceived(({ payload }) => { - const eventTypes = payload.events.map(event => event.type) - // because they are not skipped - assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) - const numSuites = eventTypes.reduce( - (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testSession.meta, TEST_ITR_FORCED_RUN) + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testModule.meta, TEST_ITR_FORCED_RUN) + + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' ) - assert.equal(numSuites, 2) - const testSession = payload.events.find(event => event.type === 'test_session_end').content + const failedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' + ) + + assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') + assert.notProperty(skippedSuite.content.meta, TEST_ITR_UNSKIPPABLE) + assert.notProperty(skippedSuite.content.meta, TEST_ITR_FORCED_RUN) + + assert.propertyVal(failedSuite.content.meta, TEST_STATUS, 'fail') + assert.propertyVal(failedSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(failedSuite.content.meta, TEST_ITR_FORCED_RUN) + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('sets _dd.ci.itr.tests_skipped to false if the received suite is not skipped', (done) => { + receiver.setSuitesToSkip([{ + type: 'suite', + attributes: { + suite: `${featuresPath}not-existing.feature` + } + }]) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'true') assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - const testModule = payload.events.find(event => event.type === 'test_module_end').content + assert.propertyVal(testSession.metrics, TEST_ITR_SKIPPING_COUNT, 0) + const testModule = events.find(event => event.type === 'test_module_end').content assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'true') assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) - - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' - } - ) + assert.propertyVal(testModule.metrics, TEST_ITR_SKIPPING_COUNT, 0) + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) }) - it('does not skip tests if test skipping is disabled by the API', (done) => { - receiver.setSettings({ - itr_enabled: true, - code_coverage: true, - tests_skipping: false - }) - - receiver.setSuitesToSkip([{ - type: 'suite', - attributes: { - suite: `${featuresPath}farewell.feature` - } - }]) - - receiver.assertPayloadReceived(() => { - const error = new Error('should not request skippable') - done(error) - }, ({ url }) => url.endsWith('/api/v2/ci/tests/skippable')) + }) + if (!isAgentless) { + context('if the agent is not event platform proxy compatible', () => { + it('does not do any intelligent test runner request', (done) => { + receiver.setInfoResponse({ endpoints: [] }) + + receiver.assertPayloadReceived(() => { + const error = new Error('should not request search_commits') + done(error) + }, ({ url }) => url === '/evp_proxy/v2/api/v2/git/repository/search_commits') + receiver.assertPayloadReceived(() => { + const error = new Error('should not request search_commits') + done(error) + }, ({ url }) => url === '/api/v2/git/repository/search_commits') + receiver.assertPayloadReceived(() => { + const error = new Error('should not request setting') + done(error) + }, ({ url }) => url === '/api/v2/libraries/tests/services/setting') + receiver.assertPayloadReceived(() => { + const error = new Error('should not request setting') + done(error) + }, ({ url }) => url === '/evp_proxy/v2/api/v2/libraries/tests/services/setting') + + receiver.assertPayloadReceived(({ payload }) => { + const testSpans = payload.flatMap(trace => trace) + const resourceNames = testSpans.map(span => span.resource) + + assert.includeMembers(resourceNames, + [ + `${featuresPath}farewell.feature.Say farewell`, + `${featuresPath}greetings.feature.Say greetings`, + `${featuresPath}greetings.feature.Say yeah`, + `${featuresPath}greetings.feature.Say yo`, + `${featuresPath}greetings.feature.Say skip` + ] + ) + }, ({ url }) => url === '/v0.4/traces').then(() => done()).catch(done) - receiver.assertPayloadReceived(({ payload }) => { - const eventTypes = payload.events.map(event => event.type) - // because they are not skipped - assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) - const numSuites = eventTypes.reduce( - (acc, type) => type === 'test_suite_end' ? acc + 1 : acc, 0 + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisEvpProxyConfig(receiver.port), + stdio: 'inherit' + } ) - assert.equal(numSuites, 2) - }, ({ url }) => url.endsWith('/api/v2/citestcycle')).then(() => done()).catch(done) + }) + }) + } + it('reports itr_correlation_id in test suites', (done) => { + const itrCorrelationId = '4321' + receiver.setItrCorrelationId(itrCorrelationId) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) + testSuites.forEach(testSuite => { + assert.equal(testSuite.itr_correlation_id, itrCorrelationId) + }) + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + }) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: getCiVisAgentlessConfig(receiver.port), - stdio: 'inherit' + context('early flake detection', () => { + it('retries new tests', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD } - ) + } }) - it('does not skip suites if suite is marked as unskippable', (done) => { - receiver.setSettings({ - itr_enabled: true, - code_coverage: true, - tests_skipping: true - }) - - receiver.setSuitesToSkip([ - { - type: 'suite', - attributes: { - suite: `${featuresPath}farewell.feature` - } - }, - { - type: 'suite', - attributes: { - suite: `${featuresPath}greetings.feature` - } + // cucumber.ci-visibility/features/farewell.feature.Say whatever will be considered new + receiver.setKnownTests( + { + cucumber: { + 'ci-visibility/features/farewell.feature': ['Say farewell'], + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] } - ]) - - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const suites = events.filter(event => event.type === 'test_suite_end') - - assert.equal(suites.length, 2) - - const testSession = events.find(event => event.type === 'test_session_end').content - const testModule = events.find(event => event.type === 'test_session_end').content - - assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.propertyVal(testSession.meta, TEST_ITR_FORCED_RUN, 'true') - assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.propertyVal(testModule.meta, TEST_ITR_FORCED_RUN, 'true') - - const skippedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' - ).content - const forcedToRunSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' - ).content - - assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') - assert.notProperty(skippedSuite.meta, TEST_ITR_UNSKIPPABLE) - assert.notProperty(skippedSuite.meta, TEST_ITR_FORCED_RUN) - - assert.propertyVal(forcedToRunSuite.meta, TEST_STATUS, 'fail') - assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_FORCED_RUN, 'true') - }, 25000) + } + ) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' - } - ) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + const tests = events.filter(event => event.type === 'test').map(event => event.content) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) + const newTests = tests.filter(test => + test.resource === 'ci-visibility/features/farewell.feature.Say whatever' + ) + newTests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + }) + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + newTests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + // Test name does not change + newTests.forEach(test => { + assert.equal(test.meta[TEST_NAME], 'Say whatever') + }) }) + childProcess = exec( + runTestsCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) }) - it('only sets forced to run if suite was going to be skipped by ITR', (done) => { - receiver.setSettings({ - itr_enabled: true, - code_coverage: true, - tests_skipping: true - }) + }) - receiver.setSuitesToSkip([ - { - type: 'suite', - attributes: { - suite: `${featuresPath}farewell.feature` - } + it('is disabled if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD } - ]) - - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const suites = events.filter(event => event.type === 'test_suite_end') - - assert.equal(suites.length, 2) - - const testSession = events.find(event => event.type === 'test_session_end').content - const testModule = events.find(event => event.type === 'test_session_end').content - - assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.notProperty(testSession.meta, TEST_ITR_FORCED_RUN) - assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.notProperty(testModule.meta, TEST_ITR_FORCED_RUN) - - const skippedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' - ) - const failedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' - ) - - assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') - assert.notProperty(skippedSuite.content.meta, TEST_ITR_UNSKIPPABLE) - assert.notProperty(skippedSuite.content.meta, TEST_ITR_FORCED_RUN) + } + }) - assert.propertyVal(failedSuite.content.meta, TEST_STATUS, 'fail') - assert.propertyVal(failedSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.notProperty(failedSuite.content.meta, TEST_ITR_FORCED_RUN) - }, 25000) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' - } - ) - - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const newTests = tests.filter(test => + test.meta[TEST_IS_NEW] === 'true' + ) + // new tests are not detected + assert.equal(newTests.length, 0) }) + // cucumber.ci-visibility/features/farewell.feature.Say whatever will be considered new + receiver.setKnownTests({ + cucumber: { + 'ci-visibility/features/farewell.feature': ['Say farewell'], + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] + } }) - it('sets _dd.ci.itr.tests_skipped to false if the received suite is not skipped', (done) => { - receiver.setSuitesToSkip([{ - type: 'suite', - attributes: { - suite: `${featuresPath}not-existing.feature` - } - }]) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') - assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'true') - assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - assert.propertyVal(testSession.metrics, TEST_ITR_SKIPPING_COUNT, 0) - const testModule = events.find(event => event.type === 'test_module_end').content - assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') - assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'true') - assert.propertyVal(testModule.meta, TEST_ITR_SKIPPING_ENABLED, 'true') - assert.propertyVal(testModule.metrics, TEST_ITR_SKIPPING_COUNT, 0) - }, 25000) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' - } - ) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) + childProcess = exec( + runTestsCommand, + { + cwd, + env: { ...envVars, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false' }, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) }) - if (!isAgentless) { - context('if the agent is not event platform proxy compatible', () => { - it('does not do any intelligent test runner request', (done) => { - receiver.setInfoResponse({ endpoints: [] }) - - receiver.assertPayloadReceived(() => { - const error = new Error('should not request search_commits') - done(error) - }, ({ url }) => url === '/evp_proxy/v2/api/v2/git/repository/search_commits') - receiver.assertPayloadReceived(() => { - const error = new Error('should not request search_commits') - done(error) - }, ({ url }) => url === '/api/v2/git/repository/search_commits') - receiver.assertPayloadReceived(() => { - const error = new Error('should not request setting') - done(error) - }, ({ url }) => url === '/api/v2/libraries/tests/services/setting') - receiver.assertPayloadReceived(() => { - const error = new Error('should not request setting') - done(error) - }, ({ url }) => url === '/evp_proxy/v2/api/v2/libraries/tests/services/setting') - - receiver.assertPayloadReceived(({ payload }) => { - const testSpans = payload.flatMap(trace => trace) - const resourceNames = testSpans.map(span => span.resource) - - assert.includeMembers(resourceNames, - [ - `${featuresPath}farewell.feature.Say farewell`, - `${featuresPath}greetings.feature.Say greetings`, - `${featuresPath}greetings.feature.Say yeah`, - `${featuresPath}greetings.feature.Say yo`, - `${featuresPath}greetings.feature.Say skip` - ] - ) - }, ({ url }) => url === '/v0.4/traces').then(() => done()).catch(done) - - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: getCiVisEvpProxyConfig(receiver.port), - stdio: 'inherit' - } - ) - }) - }) - } - it('reports itr_correlation_id in test suites', (done) => { - const itrCorrelationId = '4321' - receiver.setItrCorrelationId(itrCorrelationId) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) - testSuites.forEach(testSuite => { - assert.equal(testSuite.itr_correlation_id, itrCorrelationId) - }) - }, 25000) + }) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: envVars, - stdio: 'inherit' + it('retries flaky tests and sets exit code to 0 as long as one attempt passes', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD } - ) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) + } }) - }) + // Tests in "cucumber.ci-visibility/features-flaky/flaky.feature" will be considered new + receiver.setKnownTests({}) - context('early flake detection', () => { - it('retries new tests', (done) => { - const NUM_RETRIES_EFD = 3 - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false, - early_flake_detection: { - enabled: true, - slow_test_retries: { - '5s': NUM_RETRIES_EFD - } - } - }) - // "cucumber.ci-visibility/features/farewell.feature.Say" whatever will be considered new - receiver.setKnownTests( - { - cucumber: { - 'ci-visibility/features/farewell.feature': ['Say farewell'], - 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] - } - } - ) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') - const tests = events.filter(event => event.type === 'test').map(event => event.content) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) - const newTests = tests.filter(test => - test.resource === 'ci-visibility/features/farewell.feature.Say whatever' - ) - newTests.forEach(test => { - assert.propertyVal(test.meta, TEST_IS_NEW, 'true') - }) - const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - // all but one has been retried - assert.equal( - newTests.length - 1, - retriedTests.length - ) - assert.equal(retriedTests.length, NUM_RETRIES_EFD) - // Test name does not change - newTests.forEach(test => { - assert.equal(test.meta[TEST_NAME], 'Say whatever') - }) + tests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + }) + // All test suites pass, even though there are failed tests + testSuites.forEach(testSuite => { + assert.propertyVal(testSuite.meta, TEST_STATUS, 'pass') }) - childProcess = exec( - runTestsCommand, - { - cwd, - env: envVars, - stdio: 'pipe' - } - ) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) - }) - it('is disabled if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', (done) => { - const NUM_RETRIES_EFD = 3 - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false, - early_flake_detection: { - enabled: true, - slow_test_retries: { - '5s': NUM_RETRIES_EFD - } - } - }) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + const failedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + const passedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'pass') - const tests = events.filter(event => event.type === 'test').map(event => event.content) - const newTests = tests.filter(test => - test.meta[TEST_IS_NEW] === 'true' - ) - // new tests are not detected - assert.equal(newTests.length, 0) - }) - // cucumber.ci-visibility/features/farewell.feature.Say whatever will be considered new - receiver.setKnownTests({ - cucumber: { - 'ci-visibility/features/farewell.feature': ['Say farewell'], - 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] - } + // (1 original run + 3 retries) / 2 + assert.equal(failedAttempts.length, 2) + assert.equal(passedAttempts.length, 2) }) - childProcess = exec( - runTestsCommand, - { - cwd, - env: { ...envVars, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false' }, - stdio: 'pipe' - } - ) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) + childProcess = exec( + './node_modules/.bin/cucumber-js ci-visibility/features-flaky/*.feature', + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + childProcess.on('exit', (exitCode) => { + assert.equal(exitCode, 0) + eventsPromise.then(() => { + done() + }).catch(done) }) - it('retries flaky tests and sets exit code to 0 as long as one attempt passes', (done) => { - const NUM_RETRIES_EFD = 3 - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false, - early_flake_detection: { - enabled: true, - slow_test_retries: { - '5s': NUM_RETRIES_EFD - } - } - }) - // Tests in "cucumber.ci-visibility/features-flaky/flaky.feature" will be considered new - receiver.setKnownTests({}) + }) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) + it('does not retry tests that are skipped', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + // "cucumber.ci-visibility/features/farewell.feature.Say whatever" will be considered new + // "cucumber.ci-visibility/features/greetings.feature.Say skip" will be considered new + receiver.setKnownTests({ + cucumber: { + 'ci-visibility/features/farewell.feature': ['Say farewell'], + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo'] + } + }) - const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') - const tests = events.filter(event => event.type === 'test').map(event => event.content) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) - tests.forEach(test => { - assert.propertyVal(test.meta, TEST_IS_NEW, 'true') - }) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + const tests = events.filter(event => event.type === 'test').map(event => event.content) - const failedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - const passedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + const skippedNewTest = tests.filter(test => + test.resource === 'ci-visibility/features/greetings.feature.Say skip' + ) + // not retried + assert.equal(skippedNewTest.length, 1) + }) - // (1 original run + 3 retries) / 2 - assert.equal(failedAttempts.length, 2) - assert.equal(passedAttempts.length, 2) - }) + childProcess = exec( + runTestsCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) - childProcess = exec( - './node_modules/.bin/cucumber-js ci-visibility/features-flaky/*.feature', - { - cwd, - env: envVars, - stdio: 'pipe' + it('does not run EFD if the known tests request fails', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD } - ) - childProcess.on('exit', (exitCode) => { - assert.equal(exitCode, 0) - eventsPromise.then(() => { - done() - }).catch(done) - }) + } }) - it('does not retry tests that are skipped', (done) => { - const NUM_RETRIES_EFD = 3 - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false, - early_flake_detection: { - enabled: true, - slow_test_retries: { - '5s': NUM_RETRIES_EFD - } - } + receiver.setKnownTestsResponseCode(500) + receiver.setKnownTests({}) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + assert.equal(tests.length, 6) + const newTests = tests.filter(test => + test.meta[TEST_IS_NEW] === 'true' + ) + assert.equal(newTests.length, 0) }) - // "cucumber.ci-visibility/features/farewell.feature.Say whatever" will be considered new - // "cucumber.ci-visibility/features/greetings.feature.Say skip" will be considered new - receiver.setKnownTests({ + + childProcess = exec( + runTestsCommand, + { cwd, env: envVars, stdio: 'pipe' } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + + it('bails out of EFD if the percentage of new tests is too high', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 0 + } + }) + // tests in cucumber.ci-visibility/features/farewell.feature will be considered new + receiver.setKnownTests( + { cucumber: { - 'ci-visibility/features/farewell.feature': ['Say farewell'], - 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo'] + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] } - }) + } + ) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') - const testSession = events.find(event => event.type === 'test_session_end').content - assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') - const tests = events.filter(event => event.type === 'test').map(event => event.content) + const tests = events.filter(event => event.type === 'test').map(event => event.content) - const skippedNewTest = tests.filter(test => - test.resource === 'ci-visibility/features/greetings.feature.Say skip' - ) - // not retried - assert.equal(skippedNewTest.length, 1) - }) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) - childProcess = exec( - runTestsCommand, - { - cwd, - env: envVars, - stdio: 'pipe' - } - ) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) }) + + childProcess = exec( + runTestsCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) }) - it('does not run EFD if the known tests request fails', (done) => { - const NUM_RETRIES_EFD = 3 - receiver.setSettings({ - itr_enabled: false, - code_coverage: false, - tests_skipping: false, - early_flake_detection: { - enabled: true, - slow_test_retries: { - '5s': NUM_RETRIES_EFD + }) + + if (version !== '7.0.0') { // EFD in parallel mode only supported from cucumber>=11 + context('parallel mode', () => { + it('retries new tests', (done) => { + const NUM_RETRIES_EFD = 3 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } } - } - }) - receiver.setKnownTestsResponseCode(500) - receiver.setKnownTests({}) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) + }) + // cucumber.ci-visibility/features/farewell.feature.Say whatever will be considered new + receiver.setKnownTests( + { + cucumber: { + 'ci-visibility/features/farewell.feature': ['Say farewell'], + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] + } + } + ) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) - const tests = events.filter(event => event.type === 'test').map(event => event.content) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.propertyVal(testSession.meta, CUCUMBER_IS_PARALLEL, 'true') - assert.equal(tests.length, 6) - const newTests = tests.filter(test => - test.meta[TEST_IS_NEW] === 'true' - ) - assert.equal(newTests.length, 0) - }) + const tests = events.filter(event => event.type === 'test').map(event => event.content) - childProcess = exec( - runTestsCommand, - { cwd, env: envVars, stdio: 'pipe' } - ) + const newTests = tests.filter(test => + test.resource === 'ci-visibility/features/farewell.feature.Say whatever' + ) + newTests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + // Test name does not change + assert.propertyVal(test.meta, TEST_NAME, 'Say whatever') + assert.propertyVal(test.meta, CUCUMBER_IS_PARALLEL, 'true') + }) + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + newTests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + }) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) + childProcess = exec( + parallelModeCommand, + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) }) - }) - }) - if (version === 'latest') { // flaky test retries only supported from >=8.0.0 - context('flaky test retries', () => { - it('can retry failed tests', (done) => { + it('retries flaky tests and sets exit code to 0 as long as one attempt passes', (done) => { + const NUM_RETRIES_EFD = 3 receiver.setSettings({ itr_enabled: false, code_coverage: false, tests_skipping: false, - flaky_test_retries_enabled: true, early_flake_detection: { - enabled: false + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } } }) + // Tests in "cucumber.ci-visibility/features-flaky/flaky.feature" will be considered new + receiver.setKnownTests({}) const eventsPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.propertyVal(testSession.meta, CUCUMBER_IS_PARALLEL, 'true') const tests = events.filter(event => event.type === 'test').map(event => event.content) + const testSuites = events + .filter(event => event.type === 'test_suite_end').map(event => event.content) - // 2 failures and 1 passed attempt - assert.equal(tests.length, 3) + tests.forEach(test => { + assert.propertyVal(test.meta, TEST_IS_NEW, 'true') + assert.propertyVal(test.meta, CUCUMBER_IS_PARALLEL, 'true') + }) - const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedTests.length, 2) - const passedTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(passedTests.length, 1) + // All test suites pass, even though there are failed tests + testSuites.forEach(testSuite => { + assert.propertyVal(testSuite.meta, TEST_STATUS, 'pass') + }) - // All but the first one are retries - const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 2) + const failedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + const passedAttempts = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + + // (1 original run + 3 retries) / 2 + assert.equal(failedAttempts.length, 2) + assert.equal(passedAttempts.length, 2) }) childProcess = exec( - './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + './node_modules/.bin/cucumber-js ci-visibility/features-flaky/*.feature --parallel 2', { cwd, env: envVars, @@ -1079,44 +1227,60 @@ versions.forEach(version => { } ) - childProcess.on('exit', () => { + childProcess.on('exit', (exitCode) => { + assert.equal(exitCode, 0) eventsPromise.then(() => { done() }).catch(done) }) }) - it('is disabled if DD_CIVISIBILITY_FLAKY_RETRY_ENABLED is false', (done) => { + it('bails out of EFD if the percentage of new tests is too high', (done) => { + const NUM_RETRIES_EFD = 3 receiver.setSettings({ itr_enabled: false, code_coverage: false, tests_skipping: false, - flaky_test_retries_enabled: true, early_flake_detection: { - enabled: false + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 0 } }) + // tests in cucumber.ci-visibility/features/farewell.feature will be considered new + receiver.setKnownTests( + { + cucumber: { + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo', 'Say skip'] + } + } + ) const eventsPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + assert.propertyVal(testSession.meta, CUCUMBER_IS_PARALLEL, 'true') + const tests = events.filter(event => event.type === 'test').map(event => event.content) - assert.equal(tests.length, 1) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) - const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') assert.equal(retriedTests.length, 0) }) childProcess = exec( - './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + parallelModeCommand, { cwd, - env: { - ...envVars, - DD_CIVISIBILITY_FLAKY_RETRY_ENABLED: 'false' - }, + env: envVars, stdio: 'pipe' } ) @@ -1128,14 +1292,25 @@ versions.forEach(version => { }) }) - it('retries DD_CIVISIBILITY_FLAKY_RETRY_COUNT times', (done) => { + it('does not retry tests that are skipped', (done) => { + const NUM_RETRIES_EFD = 3 receiver.setSettings({ itr_enabled: false, code_coverage: false, tests_skipping: false, - flaky_test_retries_enabled: true, early_flake_detection: { - enabled: false + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + // "cucumber.ci-visibility/features/farewell.feature.Say whatever" will be considered new + // "cucumber.ci-visibility/features/greetings.feature.Say skip" will be considered new + receiver.setKnownTests({ + cucumber: { + 'ci-visibility/features/farewell.feature': ['Say farewell'], + 'ci-visibility/features/greetings.feature': ['Say greetings', 'Say yeah', 'Say yo'] } }) @@ -1143,33 +1318,26 @@ versions.forEach(version => { .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.propertyVal(testSession.meta, CUCUMBER_IS_PARALLEL, 'true') const tests = events.filter(event => event.type === 'test').map(event => event.content) - // 2 failures - assert.equal(tests.length, 2) - - const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') - assert.equal(failedTests.length, 2) - const passedTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') - assert.equal(passedTests.length, 0) - - // All but the first one are retries - const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') - assert.equal(retriedTests.length, 1) + const skippedNewTest = tests.filter(test => + test.resource === 'ci-visibility/features/greetings.feature.Say skip' + ) + // not retried + assert.equal(skippedNewTest.length, 1) }) childProcess = exec( - './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + parallelModeCommand, { cwd, - env: { - ...envVars, - DD_CIVISIBILITY_FLAKY_RETRY_COUNT: 1 - }, + env: envVars, stdio: 'pipe' } ) - childProcess.on('exit', () => { eventsPromise.then(() => { done() @@ -1179,53 +1347,235 @@ versions.forEach(version => { }) } }) - }) - it('correctly calculates test code owners when working directory is not repository root', (done) => { - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) + if (version === 'latest') { // flaky test retries only supported from >=8.0.0 + context('flaky test retries', () => { + it('can retry failed tests', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + flaky_test_retries_enabled: true, + early_flake_detection: { + enabled: false + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + // 2 failures and 1 passed attempt + assert.equal(tests.length, 3) + + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 2) + const passedTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + assert.equal(passedTests.length, 1) + + // All but the first one are retries + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 2) + }) + + childProcess = exec( + './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + { + cwd, + env: envVars, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + + it('is disabled if DD_CIVISIBILITY_FLAKY_RETRY_ENABLED is false', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + flaky_test_retries_enabled: true, + early_flake_detection: { + enabled: false + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + assert.equal(tests.length, 1) + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) + }) + + childProcess = exec( + './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + { + cwd, + env: { + ...envVars, + DD_CIVISIBILITY_FLAKY_RETRY_ENABLED: 'false' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + + it('retries DD_CIVISIBILITY_FLAKY_RETRY_COUNT times', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + flaky_test_retries_enabled: true, + early_flake_detection: { + enabled: false + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + // 2 failures + assert.equal(tests.length, 2) + + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 2) + const passedTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + assert.equal(passedTests.length, 0) + + // All but the first one are retries + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 1) + }) + + childProcess = exec( + './node_modules/.bin/cucumber-js ci-visibility/features-retry/*.feature', + { + cwd, + env: { + ...envVars, + DD_CIVISIBILITY_FLAKY_RETRY_COUNT: 1 + }, + stdio: 'pipe' + } + ) - const test = events.find(event => event.type === 'test').content - // The test is in a subproject - assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) - assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) }) + } + }) + }) - childProcess = exec( - 'node ../../node_modules/.bin/cucumber-js features/*.feature', - { - cwd: `${cwd}/ci-visibility/subproject`, - env: { - ...getCiVisAgentlessConfig(receiver.port) - }, - stdio: 'inherit' - } - ) + it('correctly calculates test code owners when working directory is not repository root', (done) => { + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content + // The test is in a subproject + assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) + assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + }) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) + childProcess = exec( + 'node ../../node_modules/.bin/cucumber-js features/*.feature', + { + cwd: `${cwd}/ci-visibility/subproject`, + env: { + ...getCiVisAgentlessConfig(receiver.port) + }, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + + it('takes into account untested files if "all" is passed to nyc', (done) => { + const linesPctMatchRegex = /Lines\s*:\s*([\d.]+)%/ + let linesPctMatch + let linesPctFromNyc = 0 + let codeCoverageWithUntestedFiles = 0 + let codeCoverageWithoutUntestedFiles = 0 + + let eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + codeCoverageWithUntestedFiles = testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] }) + + childProcess = exec( + './node_modules/nyc/bin/nyc.js --all -r=text-summary --nycrc-path ./my-nyc.config.js ' + + 'node ./node_modules/.bin/cucumber-js ci-visibility/features/*.feature', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + NYC_INCLUDE: JSON.stringify( + [ + 'ci-visibility/features/**', + 'ci-visibility/features-esm/**' + ] + ) + }, + stdio: 'inherit' + } + ) + + childProcess.stdout.on('data', (chunk) => { + testOutput += chunk.toString() + }) + childProcess.stderr.on('data', (chunk) => { + testOutput += chunk.toString() }) - it('takes into account untested files if "all" is passed to nyc', (done) => { - const linesPctMatchRegex = /Lines\s*:\s*([\d.]+)%/ - let linesPctMatch - let linesPctFromNyc = 0 - let codeCoverageWithUntestedFiles = 0 - let codeCoverageWithoutUntestedFiles = 0 + childProcess.on('exit', () => { + linesPctMatch = testOutput.match(linesPctMatchRegex) + linesPctFromNyc = linesPctMatch ? Number(linesPctMatch[1]) : null - let eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - codeCoverageWithUntestedFiles = testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] - }) + assert.equal( + linesPctFromNyc, + codeCoverageWithUntestedFiles, + 'nyc --all output does not match the reported coverage' + ) + // reset test output for next test session + testOutput = '' + // we run the same tests without the all flag childProcess = exec( - './node_modules/nyc/bin/nyc.js --all -r=text-summary --nycrc-path ./my-nyc.config.js ' + + './node_modules/nyc/bin/nyc.js -r=text-summary --nycrc-path ./my-nyc.config.js ' + 'node ./node_modules/.bin/cucumber-js ci-visibility/features/*.feature', { cwd, @@ -1242,6 +1592,13 @@ versions.forEach(version => { } ) + eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + codeCoverageWithoutUntestedFiles = testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] + }) + childProcess.stdout.on('data', (chunk) => { testOutput += chunk.toString() }) @@ -1255,60 +1612,14 @@ versions.forEach(version => { assert.equal( linesPctFromNyc, - codeCoverageWithUntestedFiles, - 'nyc --all output does not match the reported coverage' - ) - - // reset test output for next test session - testOutput = '' - // we run the same tests without the all flag - childProcess = exec( - './node_modules/nyc/bin/nyc.js -r=text-summary --nycrc-path ./my-nyc.config.js ' + - 'node ./node_modules/.bin/cucumber-js ci-visibility/features/*.feature', - { - cwd, - env: { - ...getCiVisAgentlessConfig(receiver.port), - NYC_INCLUDE: JSON.stringify( - [ - 'ci-visibility/features/**', - 'ci-visibility/features-esm/**' - ] - ) - }, - stdio: 'inherit' - } + codeCoverageWithoutUntestedFiles, + 'nyc output does not match the reported coverage (no --all flag)' ) - eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSession = events.find(event => event.type === 'test_session_end').content - codeCoverageWithoutUntestedFiles = testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT] - }) - - childProcess.stdout.on('data', (chunk) => { - testOutput += chunk.toString() - }) - childProcess.stderr.on('data', (chunk) => { - testOutput += chunk.toString() - }) - - childProcess.on('exit', () => { - linesPctMatch = testOutput.match(linesPctMatchRegex) - linesPctFromNyc = linesPctMatch ? Number(linesPctMatch[1]) : null - - assert.equal( - linesPctFromNyc, - codeCoverageWithoutUntestedFiles, - 'nyc output does not match the reported coverage (no --all flag)' - ) - - eventsPromise.then(() => { - assert.isAbove(codeCoverageWithoutUntestedFiles, codeCoverageWithUntestedFiles) - done() - }).catch(done) - }) + eventsPromise.then(() => { + assert.isAbove(codeCoverageWithoutUntestedFiles, codeCoverageWithUntestedFiles) + done() + }).catch(done) }) }) }) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index be4493c6b8e..afc79b2ebe5 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -28,12 +28,16 @@ const { TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, TEST_SOURCE_FILE, + TEST_SOURCE_START, TEST_IS_NEW, TEST_IS_RETRY, TEST_EARLY_FLAKE_ENABLED, TEST_SUITE, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const { NODE_MAJOR } = require('../../version') @@ -225,6 +229,13 @@ moduleTypes.forEach(({ it('can run and report tests', (done) => { const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) const events = payloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') @@ -266,6 +277,7 @@ moduleTypes.forEach(({ testSuiteEvents.forEach(({ content: { meta, + metrics, test_suite_id: testSuiteId, test_module_id: testModuleId, test_session_id: testSessionId @@ -276,6 +288,9 @@ moduleTypes.forEach(({ assert.exists(testSuiteId) assert.equal(testModuleId.toString(10), testModuleEventContent.test_module_id.toString(10)) assert.equal(testSessionId.toString(10), testSessionEventContent.test_session_id.toString(10)) + assert.isTrue(meta[TEST_SOURCE_FILE].startsWith('cypress/e2e/')) + assert.equal(metrics[TEST_SOURCE_START], 1) + assert.exists(metrics[DD_HOST_CPU_COUNT]) }) assert.includeMembers(testEvents.map(test => test.content.resource), [ @@ -293,6 +308,7 @@ moduleTypes.forEach(({ testEvents.forEach(({ content: { meta, + metrics, test_suite_id: testSuiteId, test_module_id: testModuleId, test_session_id: testSessionId @@ -307,6 +323,7 @@ moduleTypes.forEach(({ // Can read DD_TAGS assert.propertyVal(meta, 'test.customtag', 'customvalue') assert.propertyVal(meta, 'test.customtag2', 'customvalue2') + assert.exists(metrics[DD_HOST_CPU_COUNT]) }) }, 25000) @@ -322,7 +339,8 @@ moduleTypes.forEach(({ env: { ...restEnvVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, - DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' } @@ -1411,9 +1429,11 @@ moduleTypes.forEach(({ const events = payloads.flatMap(({ payload }) => payload.events) const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content // The test is in a subproject assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }, 25000) childProcess = exec( diff --git a/integration-tests/debugger/index.spec.js b/integration-tests/debugger/index.spec.js new file mode 100644 index 00000000000..241f57f722f --- /dev/null +++ b/integration-tests/debugger/index.spec.js @@ -0,0 +1,447 @@ +'use strict' + +const path = require('path') +const { randomUUID } = require('crypto') +const getPort = require('get-port') +const Axios = require('axios') +const { assert } = require('chai') +const { assertObjectContains, assertUUID, createSandbox, FakeAgent, spawnProc } = require('../helpers') +const { ACKNOWLEDGED, ERROR } = require('../../packages/dd-trace/src/appsec/remote_config/apply_states') +const { version } = require('../../package.json') + +const probeFile = 'debugger/target-app/index.js' +const probeLineNo = 9 +const pollInterval = 1 + +describe('Dynamic Instrumentation', function () { + let axios, sandbox, cwd, appPort, appFile, agent, proc, rcConfig + + before(async function () { + sandbox = await createSandbox(['fastify']) + cwd = sandbox.folder + appFile = path.join(cwd, ...probeFile.split('/')) + }) + + after(async function () { + await sandbox.remove() + }) + + beforeEach(async function () { + rcConfig = generateRemoteConfig() + appPort = await getPort() + agent = await new FakeAgent().start() + proc = await spawnProc(appFile, { + cwd, + env: { + APP_PORT: appPort, + DD_DYNAMIC_INSTRUMENTATION_ENABLED: true, + DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_DEBUG: process.env.DD_TRACE_DEBUG, // inherit to make debugging the sandbox easier + DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS: pollInterval + } + }) + axios = Axios.create({ + baseURL: `http://localhost:${appPort}` + }) + }) + + afterEach(async function () { + proc.kill() + await agent.stop() + }) + + it('base case: target app should work as expected if no test probe has been added', async function () { + const response = await axios.get('/foo') + assert.strictEqual(response.status, 200) + assert.deepStrictEqual(response.data, { hello: 'foo' }) + }) + + describe('diagnostics messages', function () { + it('should send expected diagnostics messages if probe is received and triggered', function (done) { + let receivedAckUpdate = false + const probeId = rcConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'EMITTING' } } + }] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, rcConfig.id) + assert.strictEqual(version, 1) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + receivedAckUpdate = true + endIfDone() + }) + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + + if (payload.debugger.diagnostics.status === 'INSTALLED') { + axios.get('/foo') + .then((response) => { + assert.strictEqual(response.status, 200) + assert.deepStrictEqual(response.data, { hello: 'foo' }) + }) + .catch(done) + } else { + endIfDone() + } + }) + + agent.addRemoteConfig(rcConfig) + + function endIfDone () { + if (receivedAckUpdate && expectedPayloads.length === 0) done() + } + }) + + it('should send expected diagnostics messages if probe is first received and then updated', function (done) { + let receivedAckUpdates = 0 + const probeId = rcConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 1, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 1, status: 'INSTALLED' } } + }] + const triggers = [ + () => { + rcConfig.config.version++ + agent.updateRemoteConfig(rcConfig.id, rcConfig.config) + }, + () => {} + ] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, rcConfig.id) + assert.strictEqual(version, ++receivedAckUpdates) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + endIfDone() + }) + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + if (payload.debugger.diagnostics.status === 'INSTALLED') triggers.shift()() + endIfDone() + }) + + agent.addRemoteConfig(rcConfig) + + function endIfDone () { + if (receivedAckUpdates === 2 && expectedPayloads.length === 0) done() + } + }) + + it('should send expected diagnostics messages if probe is first received and then deleted', function (done) { + let receivedAckUpdate = false + let payloadsProcessed = false + const probeId = rcConfig.config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { probeId, version: 0, status: 'INSTALLED' } } + }] + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, rcConfig.id) + assert.strictEqual(version, 1) + assert.strictEqual(state, ACKNOWLEDGED) + assert.notOk(error) // falsy check since error will be an empty string, but that's an implementation detail + + receivedAckUpdate = true + endIfDone() + }) + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + assertUUID(payload.debugger.diagnostics.runtimeId) + + if (payload.debugger.diagnostics.status === 'INSTALLED') { + agent.removeRemoteConfig(rcConfig.id) + // Wait a little to see if we get any follow-up `debugger-diagnostics` messages + setTimeout(() => { + payloadsProcessed = true + endIfDone() + }, pollInterval * 2 * 1000) // wait twice as long as the RC poll interval + } + }) + + agent.addRemoteConfig(rcConfig) + + function endIfDone () { + if (receivedAckUpdate && payloadsProcessed) done() + } + }) + + const unsupporedOrInvalidProbes = [[ + 'should send expected error diagnostics messages if probe doesn\'t conform to expected schema', + 'bad config!!!', + { status: 'ERROR' } + ], [ + 'should send expected error diagnostics messages if probe type isn\'t supported', + generateProbeConfig({ type: 'INVALID_PROBE' }) + ], [ + 'should send expected error diagnostics messages if it isn\'t a line-probe', + generateProbeConfig({ where: { foo: 'bar' } }) // TODO: Use valid schema for method probe instead + ]] + + for (const [title, config, customErrorDiagnosticsObj] of unsupporedOrInvalidProbes) { + it(title, function (done) { + let receivedAckUpdate = false + + agent.on('remote-config-ack-update', (id, version, state, error) => { + assert.strictEqual(id, `logProbe_${config.id}`) + assert.strictEqual(version, 1) + assert.strictEqual(state, ERROR) + assert.strictEqual(error.slice(0, 6), 'Error:') + + receivedAckUpdate = true + endIfDone() + }) + + const probeId = config.id + const expectedPayloads = [{ + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: { status: 'RECEIVED' } } + }, { + ddsource: 'dd_debugger', + service: 'node', + debugger: { diagnostics: customErrorDiagnosticsObj ?? { probeId, version: 0, status: 'ERROR' } } + }] + + agent.on('debugger-diagnostics', ({ payload }) => { + const expected = expectedPayloads.shift() + assertObjectContains(payload, expected) + const { diagnostics } = payload.debugger + assertUUID(diagnostics.runtimeId) + + if (diagnostics.status === 'ERROR') { + assert.property(diagnostics, 'exception') + assert.hasAllKeys(diagnostics.exception, ['message', 'stacktrace']) + assert.typeOf(diagnostics.exception.message, 'string') + assert.typeOf(diagnostics.exception.stacktrace, 'string') + } + + endIfDone() + }) + + agent.addRemoteConfig({ + product: 'LIVE_DEBUGGING', + id: `logProbe_${config.id}`, + config + }) + + function endIfDone () { + if (receivedAckUpdate && expectedPayloads.length === 0) done() + } + }) + } + }) + + describe('input messages', function () { + it('should capture and send expected snapshot when a log line probe is triggered', function (done) { + agent.on('debugger-diagnostics', ({ payload }) => { + if (payload.debugger.diagnostics.status === 'INSTALLED') { + axios.get('/foo') + } + }) + + agent.on('debugger-input', ({ payload }) => { + const expected = { + ddsource: 'dd_debugger', + service: 'node', + message: 'Hello World!', + logger: { + name: 'debugger/target-app/index.js', + method: 'handler', + version, + thread_name: 'MainThread' + }, + 'debugger.snapshot': { + probe: { + id: rcConfig.config.id, + version: 0, + location: { file: probeFile, lines: [probeLineNo] } + }, + language: 'javascript' + } + } + + assertObjectContains(payload, expected) + assert.match(payload.logger.thread_id, /^pid:\d+$/) + assertUUID(payload['debugger.snapshot'].id) + assert.isNumber(payload['debugger.snapshot'].timestamp) + assert.isTrue(payload['debugger.snapshot'].timestamp > Date.now() - 1000 * 60) + assert.isTrue(payload['debugger.snapshot'].timestamp <= Date.now()) + + done() + }) + + agent.addRemoteConfig(rcConfig) + }) + + it('should respond with updated message if probe message is updated', function (done) { + const expectedMessages = ['Hello World!', 'Hello Updated World!'] + const triggers = [ + async () => { + await axios.get('/foo') + rcConfig.config.version++ + rcConfig.config.template = 'Hello Updated World!' + agent.updateRemoteConfig(rcConfig.id, rcConfig.config) + }, + async () => { + await axios.get('/foo') + } + ] + + agent.on('debugger-diagnostics', ({ payload }) => { + if (payload.debugger.diagnostics.status === 'INSTALLED') triggers.shift()().catch(done) + }) + + agent.on('debugger-input', ({ payload }) => { + assert.strictEqual(payload.message, expectedMessages.shift()) + if (expectedMessages.length === 0) done() + }) + + agent.addRemoteConfig(rcConfig) + }) + + it('should not trigger if probe is deleted', function (done) { + agent.on('debugger-diagnostics', async ({ payload }) => { + try { + if (payload.debugger.diagnostics.status === 'INSTALLED') { + agent.once('remote-confg-responded', async () => { + try { + await axios.get('/foo') + // We want to wait enough time to see if the client triggers on the breakpoint so that the test can fail + // if it does, but not so long that the test times out. + // TODO: Is there some signal we can use instead of a timer? + setTimeout(done, pollInterval * 2 * 1000) // wait twice as long as the RC poll interval + } catch (err) { + // Nessecary hack: Any errors thrown inside of an async function is invisible to Mocha unless the outer + // `it` callback is also `async` (which we can't do in this case since we rely on the `done` callback). + done(err) + } + }) + + agent.removeRemoteConfig(rcConfig.id) + } + } catch (err) { + // Nessecary hack: Any errors thrown inside of an async function is invisible to Mocha unless the outer `it` + // callback is also `async` (which we can't do in this case since we rely on the `done` callback). + done(err) + } + }) + + agent.on('debugger-input', () => { + assert.fail('should not capture anything when the probe is deleted') + }) + + agent.addRemoteConfig(rcConfig) + }) + }) + + describe('race conditions', () => { + it('should remove the last breakpoint completely before trying to add a new one', (done) => { + const rcConfig2 = generateRemoteConfig() + + agent.on('debugger-diagnostics', ({ payload: { debugger: { diagnostics: { status, probeId } } } }) => { + if (status !== 'INSTALLED') return + + if (probeId === rcConfig.config.id) { + // First INSTALLED payload: Try to trigger the race condition. + agent.removeRemoteConfig(rcConfig.id) + agent.addRemoteConfig(rcConfig2) + } else { + // Second INSTALLED payload: Perform an HTTP request to see if we successfully handled the race condition. + let finished = false + + // If the race condition occurred, the debugger will have been detached from the main thread and the new + // probe will never trigger. If that's the case, the following timer will fire: + const timer = setTimeout(() => { + done(new Error('Race condition occurred!')) + }, 1000) + + // If we successfully handled the race condition, the probe will trigger, we'll get a probe result and the + // following event listener will be called: + agent.once('debugger-input', () => { + clearTimeout(timer) + finished = true + done() + }) + + // Perform HTTP request to try and trigger the probe + axios.get('/foo').catch((err) => { + // If the request hasn't fully completed by the time the tests ends and the target app is destroyed, Axios + // will complain with a "socket hang up" error. Hence this sanity check before calling `done(err)`. If we + // later add more tests below this one, this shouuldn't be an issue. + if (!finished) done(err) + }) + } + }) + + agent.addRemoteConfig(rcConfig) + }) + }) +}) + +function generateRemoteConfig (overrides = {}) { + overrides.id = overrides.id || randomUUID() + return { + product: 'LIVE_DEBUGGING', + id: `logProbe_${overrides.id}`, + config: generateProbeConfig(overrides) + } +} + +function generateProbeConfig (overrides) { + return { + id: randomUUID(), + version: 0, + type: 'LOG_PROBE', + language: 'javascript', + where: { sourceFile: probeFile, lines: [String(probeLineNo)] }, + tags: [], + template: 'Hello World!', + segments: [{ str: 'Hello World!' }], + captureSnapshot: false, + capture: { maxReferenceDepth: 3 }, + sampling: { snapshotsPerSecond: 5000 }, + evaluateAt: 'EXIT', + ...overrides + } +} diff --git a/integration-tests/debugger/target-app/index.js b/integration-tests/debugger/target-app/index.js new file mode 100644 index 00000000000..d0e1b7fb6dd --- /dev/null +++ b/integration-tests/debugger/target-app/index.js @@ -0,0 +1,18 @@ +'use strict' + +require('dd-trace/init') +const Fastify = require('fastify') + +const fastify = Fastify() + +fastify.get('/:name', function handler (request) { + return { hello: request.params.name } +}) + +fastify.listen({ port: process.env.APP_PORT }, (err) => { + if (err) { + fastify.log.error(err) + process.exit(1) + } + process.send({ port: process.env.APP_PORT }) +}) diff --git a/integration-tests/helpers/fake-agent.js b/integration-tests/helpers/fake-agent.js index 86c6890bf00..70aff2ecfa8 100644 --- a/integration-tests/helpers/fake-agent.js +++ b/integration-tests/helpers/fake-agent.js @@ -13,8 +13,7 @@ module.exports = class FakeAgent extends EventEmitter { constructor (port = 0) { super() this.port = port - this._rcFiles = {} - this._rcTargetsVersion = 0 + this.resetRemoteConfig() } async start () { @@ -95,11 +94,12 @@ module.exports = class FakeAgent extends EventEmitter { } /** - * Remove any existing config added by calls to FakeAgent#addRemoteConfig. + * Reset any existing Remote Config state. Usefull in `before` and `beforeEach` blocks. */ resetRemoteConfig () { this._rcFiles = {} - this._rcTargetsVersion++ + this._rcTargetsVersion = 0 + this._rcSeenStates = new Set() } // **resolveAtFirstSuccess** - specific use case for Next.js (or any other future libraries) @@ -216,13 +216,23 @@ function buildExpressServer (agent) { console.error(state.error) // eslint-disable-line no-console } - for (const { apply_error: error } of state.config_states) { - if (error) { + for (const cs of state.config_states) { + const uniqueState = `${cs.id}-${cs.version}-${cs.apply_state}` + if (!agent._rcSeenStates.has(uniqueState)) { + agent._rcSeenStates.add(uniqueState) + agent.emit('remote-config-ack-update', cs.id, cs.version, cs.apply_state, cs.apply_error) + } + + if (cs.apply_error) { // Print the error sent by the client in case it's useful in debugging tests - console.error(error) // eslint-disable-line no-console + console.error(cs.apply_error) // eslint-disable-line no-console } } + res.on('close', () => { + agent.emit('remote-confg-responded') + }) + if (agent._rcTargetsVersion === state.targets_version) { // If the state hasn't changed since the last time the client asked, just return an empty result res.json({}) @@ -272,6 +282,22 @@ function buildExpressServer (agent) { }) }) + app.post('/debugger/v1/input', (req, res) => { + res.status(200).send() + agent.emit('debugger-input', { + headers: req.headers, + payload: req.body + }) + }) + + app.post('/debugger/v1/diagnostics', upload.any(), (req, res) => { + res.status(200).send() + agent.emit('debugger-diagnostics', { + headers: req.headers, + payload: JSON.parse(req.files[0].buffer.toString()) + }) + }) + app.post('/profiling/v1/input', upload.any(), (req, res) => { res.status(200).send() agent.emit('message', { diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 49a04544322..98074ba89b4 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -334,12 +334,32 @@ function useSandbox (...args) { return oldSandbox.remove() }) } + function sandboxCwd () { return sandbox.folder } +function assertObjectContains (actual, expected) { + for (const [key, val] of Object.entries(expected)) { + if (val !== null && typeof val === 'object') { + assert.ok(key in actual) + assert.notStrictEqual(actual[key], null) + assert.strictEqual(typeof actual[key], 'object') + assertObjectContains(actual[key], val) + } else { + assert.strictEqual(actual[key], expected[key]) + } + } +} + +function assertUUID (actual, msg = 'not a valid UUID') { + assert.match(actual, /^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$/, msg) +} + module.exports = { FakeAgent, + assertObjectContains, + assertUUID, spawnProc, runAndCheckWithTelemetry, createSandbox, diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js index ebeed73796a..789019100da 100644 --- a/integration-tests/jest/jest.spec.js +++ b/integration-tests/jest/jest.spec.js @@ -31,8 +31,11 @@ const { JEST_DISPLAY_NAME, TEST_EARLY_FLAKE_ABORT_REASON, TEST_SOURCE_START, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const testFile = 'ci-visibility/run-jest.js' @@ -133,6 +136,14 @@ describe('jest CommonJS', () => { receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) } receiver.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('citestcycle'), (payloads) => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = payloads.flatMap(({ payload }) => payload.events) const sessionEventContent = events.find(event => event.type === 'test_session_end').content const moduleEventContent = events.find(event => event.type === 'test_module_end').content @@ -153,15 +164,19 @@ describe('jest CommonJS', () => { assert.include(testOutput, expectedStdout) - // Can read DD_TAGS tests.forEach(testEvent => { + assert.equal(testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) + assert.exists(testEvent.metrics[TEST_SOURCE_START]) + // Can read DD_TAGS assert.propertyVal(testEvent.meta, 'test.customtag', 'customvalue') assert.propertyVal(testEvent.meta, 'test.customtag2', 'customvalue2') + assert.exists(testEvent.metrics[DD_HOST_CPU_COUNT]) }) - tests.forEach(testEvent => { - assert.equal(testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) - assert.exists(testEvent.metrics[TEST_SOURCE_START]) + suites.forEach(testSuite => { + assert.isTrue(testSuite.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test')) + assert.equal(testSuite.metrics[TEST_SOURCE_START], 1) + assert.exists(testSuite.metrics[DD_HOST_CPU_COUNT]) }) done() @@ -171,7 +186,8 @@ describe('jest CommonJS', () => { cwd, env: { ...envVars, - DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' }) @@ -247,9 +263,11 @@ describe('jest CommonJS', () => { const events = payloads.flatMap(({ payload }) => payload.events) const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content // The test is in a subproject assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }) childProcess = exec( @@ -428,17 +446,27 @@ describe('jest CommonJS', () => { cwd, env: { ...getCiVisAgentlessConfig(receiver.port), - RUN_IN_PARALLEL: true + RUN_IN_PARALLEL: true, + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' }) receiver.gatherPayloads(({ url }) => url === '/api/v2/citestcycle', 5000).then(eventsRequests => { - const eventTypes = eventsRequests.map(({ payload }) => payload) - .flatMap(({ events }) => events) - .map(event => event.type) + const metadataDicts = eventsRequests.flatMap(({ payload }) => payload.metadata) + + // it propagates test session name to the test and test suite events in parallel mode + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = eventsRequests.map(({ payload }) => payload) + .flatMap(({ events }) => events) + const eventTypes = events.map(event => event.type) assert.includeMembers(eventTypes, ['test', 'test_suite_end', 'test_module_end', 'test_session_end']) + done() }).catch(done) }) diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js index d1a35302bbc..dac0a9e3bff 100644 --- a/integration-tests/mocha/mocha.spec.js +++ b/integration-tests/mocha/mocha.spec.js @@ -32,8 +32,12 @@ const { TEST_MODULE, MOCHA_IS_PARALLEL, TEST_SOURCE_START, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES, + TEST_EARLY_FLAKE_ABORT_REASON } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const runTestsWithCoverageCommand = './node_modules/nyc/bin/nyc.js -r=text-summary node ./ci-visibility/run-mocha.js' @@ -132,6 +136,14 @@ describe('mocha CommonJS', function () { receiver.setInfoResponse({ endpoints: ['/evp_proxy/v4'] }) } receiver.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('citestcycle'), (payloads) => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = payloads.flatMap(({ payload }) => payload.events) const sessionEventContent = events.find(event => event.type === 'test_session_end').content const moduleEventContent = events.find(event => event.type === 'test_module_end').content @@ -153,15 +165,19 @@ describe('mocha CommonJS', function () { assert.include(testOutput, expectedStdout) assert.include(testOutput, extraStdout) - // Can read DD_TAGS tests.forEach(testEvent => { + assert.equal(testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) + assert.exists(testEvent.metrics[TEST_SOURCE_START]) + // Can read DD_TAGS assert.propertyVal(testEvent.meta, 'test.customtag', 'customvalue') assert.propertyVal(testEvent.meta, 'test.customtag2', 'customvalue2') + assert.exists(testEvent.metrics[DD_HOST_CPU_COUNT]) }) - tests.forEach(testEvent => { - assert.equal(testEvent.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test'), true) - assert.exists(testEvent.metrics[TEST_SOURCE_START]) + suites.forEach(testSuite => { + assert.isTrue(testSuite.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/test/ci-visibility-test')) + assert.equal(testSuite.metrics[TEST_SOURCE_START], 1) + assert.exists(testSuite.metrics[DD_HOST_CPU_COUNT]) }) done() @@ -171,7 +187,8 @@ describe('mocha CommonJS', function () { cwd, env: { ...envVars, - DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' }) @@ -247,9 +264,11 @@ describe('mocha CommonJS', function () { const events = payloads.flatMap(({ payload }) => payload.events) const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content // The test is in a subproject assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }) childProcess = exec( @@ -307,6 +326,14 @@ describe('mocha CommonJS', function () { it('works with parallel mode', (done) => { const eventsPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = payloads.flatMap(({ payload }) => payload.events) const sessionEventContent = events.find(event => event.type === 'test_session_end').content const moduleEventContent = events.find(event => event.type === 'test_module_end').content @@ -354,7 +381,8 @@ describe('mocha CommonJS', function () { ...getCiVisAgentlessConfig(receiver.port), RUN_IN_PARALLEL: true, DD_TRACE_DEBUG: 1, - DD_TRACE_LOG_LEVEL: 'warn' + DD_TRACE_LOG_LEVEL: 'warn', + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' }) @@ -1132,12 +1160,14 @@ describe('mocha CommonJS', function () { stdio: 'inherit' } ) + childProcess.on('exit', () => { eventsPromise.then(() => { done() }).catch(done) }) }) + it('handles parameterized tests as a single unit', (done) => { // Tests from ci-visibility/test-early-flake-detection/test-parameterized.js will be considered new receiver.setKnownTests({ @@ -1215,6 +1245,7 @@ describe('mocha CommonJS', function () { }).catch(done) }) }) + it('is disabled if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', (done) => { // Tests from ci-visibility/test/ci-visibility-test-2.js will be considered new receiver.setKnownTests({ @@ -1270,6 +1301,7 @@ describe('mocha CommonJS', function () { }).catch(done) }) }) + it('retries flaky tests', (done) => { // Tests from ci-visibility/test/occasionally-failing-test will be considered new receiver.setKnownTests({}) @@ -1329,13 +1361,16 @@ describe('mocha CommonJS', function () { stdio: 'inherit' } ) - childProcess.on('exit', () => { + + childProcess.on('exit', (exitCode) => { // TODO: check exit code: if a new, retried test fails, the exit code should remain 0 eventsPromise.then(() => { + assert.equal(exitCode, 0) done() }).catch(done) }) }) + it('does not retry new tests that are skipped', (done) => { // Tests from ci-visibility/test/skipped-and-todo-test will be considered new receiver.setKnownTests({}) @@ -1390,6 +1425,7 @@ describe('mocha CommonJS', function () { }).catch(done) }) }) + it('handles spaces in test names', (done) => { receiver.setSettings({ itr_enabled: false, @@ -1455,6 +1491,7 @@ describe('mocha CommonJS', function () { }).catch(done) }) }) + it('does not run EFD if the known tests request fails', (done) => { receiver.setKnownTestsResponseCode(500) @@ -1507,6 +1544,7 @@ describe('mocha CommonJS', function () { eventsPromise.then(() => done()).catch(done) }) }) + it('retries flaky tests and sets exit code to 0 as long as one attempt passes', (done) => { // Tests from ci-visibility/test/occasionally-failing-test will be considered new receiver.setKnownTests({}) @@ -1581,6 +1619,260 @@ describe('mocha CommonJS', function () { }).catch(done) }) }) + + it('bails out of EFD if the percentage of new tests is too high', (done) => { + const NUM_RETRIES_EFD = 5 + + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 0 + } + }) + // Tests from ci-visibility/test/ci-visibility-test-2.js will be considered new + receiver.setKnownTests({ + mocha: { + 'ci-visibility/test/ci-visibility-test.js': ['ci visibility can report tests'] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) + + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: JSON.stringify([ + './test/ci-visibility-test.js', + './test/ci-visibility-test-2.js' + ]) + }, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + + context('parallel mode', () => { + it('retries new tests', (done) => { + // Tests from ci-visibility/test/occasionally-failing-test will be considered new + receiver.setKnownTests({}) + + const NUM_RETRIES_EFD = 5 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 100 + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.propertyVal(testSession.meta, MOCHA_IS_PARALLEL, 'true') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + tests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + // Out of NUM_RETRIES_EFD + 1 total runs, half will be passing and half will be failing, + // based on the global counter in the test file + const passingTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + const failingTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(passingTests.length, (NUM_RETRIES_EFD + 1) / 2) + assert.equal(failingTests.length, (NUM_RETRIES_EFD + 1) / 2) + // Test name does not change + retriedTests.forEach(test => { + assert.equal(test.meta[TEST_NAME], 'fail occasionally fails') + }) + }) + + childProcess = exec( + 'mocha --parallel ./ci-visibility/test-early-flake-detection/occasionally-failing-test.js', { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + }) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 0) + done() + }).catch(done) + }) + }) + it('retries new tests when using the programmatic API', (done) => { + // Tests from ci-visibility/test/occasionally-failing-test will be considered new + receiver.setKnownTests({}) + + const NUM_RETRIES_EFD = 5 + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 100 + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + assert.propertyVal(testSession.meta, MOCHA_IS_PARALLEL, 'true') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + // all but one has been retried + assert.equal( + tests.length - 1, + retriedTests.length + ) + assert.equal(retriedTests.length, NUM_RETRIES_EFD) + // Out of NUM_RETRIES_EFD + 1 total runs, half will be passing and half will be failing, + // based on the global counter in the test file + const passingTests = tests.filter(test => test.meta[TEST_STATUS] === 'pass') + const failingTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(passingTests.length, (NUM_RETRIES_EFD + 1) / 2) + assert.equal(failingTests.length, (NUM_RETRIES_EFD + 1) / 2) + // Test name does not change + retriedTests.forEach(test => { + assert.equal(test.meta[TEST_NAME], 'fail occasionally fails') + }) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + RUN_IN_PARALLEL: true, + TESTS_TO_RUN: JSON.stringify([ + './test-early-flake-detection/occasionally-failing-test.js' + ]) + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 0) + done() + }).catch(done) + }) + }) + it('bails out of EFD if the percentage of new tests is too high', (done) => { + const NUM_RETRIES_EFD = 5 + + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 0 + } + }) + // Tests from ci-visibility/test/ci-visibility-test-2.js will be considered new + receiver.setKnownTests({ + mocha: { + 'ci-visibility/test/ci-visibility-test.js': ['ci visibility can report tests'] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) + + const retriedTests = newTests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) + }) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + RUN_IN_PARALLEL: true, + TESTS_TO_RUN: JSON.stringify([ + './test/ci-visibility-test.js', + './test/ci-visibility-test-2.js' + ]) + }, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + }) }) context('flaky test retries', () => { diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 8cc1d04a8b3..440cf13d637 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -22,8 +22,11 @@ const { TEST_IS_RETRY, TEST_EARLY_FLAKE_ENABLED, TEST_SUITE, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const NUM_RETRIES_EFD = 3 @@ -71,6 +74,14 @@ versions.forEach((version) => { const reportUrl = reportMethod === 'agentless' ? '/api/v2/citestcycle' : '/evp_proxy/v2/api/v2/citestcycle' receiver.gatherPayloadsMaxTimeout(({ url }) => url === reportUrl, payloads => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = payloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') @@ -106,6 +117,9 @@ versions.forEach((version) => { if (testSuiteEvent.content.meta[TEST_STATUS] === 'fail') { assert.exists(testSuiteEvent.content.meta[ERROR_MESSAGE]) } + assert.isTrue(testSuiteEvent.content.meta[TEST_SOURCE_FILE].endsWith('-test.js')) + assert.equal(testSuiteEvent.content.metrics[TEST_SOURCE_START], 1) + assert.exists(testSuiteEvent.content.metrics[DD_HOST_CPU_COUNT]) }) assert.includeMembers(testEvents.map(test => test.content.resource), [ @@ -133,6 +147,7 @@ versions.forEach((version) => { assert.propertyVal(testEvent.content.meta, 'test.customtag2', 'customvalue2') // Adds the browser used assert.propertyVal(testEvent.content.meta, TEST_CONFIGURATION_BROWSER_NAME, 'chromium') + assert.exists(testEvent.content.metrics[DD_HOST_CPU_COUNT]) }) stepEvents.forEach(stepEvent => { @@ -155,7 +170,8 @@ versions.forEach((version) => { env: { ...envVars, PW_BASE_URL: `http://localhost:${webAppPort}`, - DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2' + DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' } @@ -668,9 +684,11 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content // The test is in a subproject assert.notEqual(test.meta[TEST_SOURCE_FILE], test.meta[TEST_SUITE]) assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }) childProcess = exec( diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js index e70d5ead3ba..7306d7051ad 100644 --- a/integration-tests/profiler/profiler.spec.js +++ b/integration-tests/profiler/profiler.spec.js @@ -125,10 +125,12 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args const addressKey = strings.dedup('address') const portKey = strings.dedup('port') const nameKey = strings.dedup('operation') + const spanIdKey = strings.dedup('span id') + const localRootSpanIdKey = strings.dedup('local root span id') const eventValue = strings.dedup(eventType) const events = [] for (const sample of profile.sample) { - let ts, event, host, address, port, name + let ts, event, host, address, port, name, spanId, localRootSpanId for (const label of sample.label) { switch (label.key) { case tsKey: ts = label.num; break @@ -137,6 +139,8 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args case hostKey: host = label.str; break case addressKey: address = label.str; break case portKey: port = label.num; break + case spanIdKey: spanId = label.str; break + case localRootSpanIdKey: localRootSpanId = label.str; break default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]} ${encoded}`) } } @@ -144,6 +148,13 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args assert.isDefined(ts, encoded) assert.isTrue(ts <= procEnd, encoded) assert.isTrue(ts >= procStart, encoded) + if (process.platform !== 'win32') { + assert.isDefined(spanId, encoded) + assert.isDefined(localRootSpanId, encoded) + } else { + assert.isUndefined(spanId, encoded) + assert.isUndefined(localRootSpanId, encoded) + } // Gather only DNS events; ignore sporadic GC events if (event === eventValue) { assert.isDefined(name, encoded) diff --git a/integration-tests/selenium/selenium.spec.js b/integration-tests/selenium/selenium.spec.js index 50fc9d19568..a95acb6aaa2 100644 --- a/integration-tests/selenium/selenium.spec.js +++ b/integration-tests/selenium/selenium.spec.js @@ -18,7 +18,7 @@ const { } = require('../../packages/dd-trace/src/plugins/util/test') const { NODE_MAJOR } = require('../../version') -const cucumberVersion = NODE_MAJOR <= 16 ? '9' : 'latest' +const cucumberVersion = NODE_MAJOR <= 16 ? '9' : '10' const webAppServer = require('../ci-visibility/web-app-server') diff --git a/integration-tests/test-api-manual.spec.js b/integration-tests/test-api-manual.spec.js index 8335745e2a6..419c7c736c5 100644 --- a/integration-tests/test-api-manual.spec.js +++ b/integration-tests/test-api-manual.spec.js @@ -73,7 +73,7 @@ describe('test-api-manual', () => { '--require ./ci-visibility/test-api-manual/test.fake.js ./ci-visibility/test-api-manual/run-fake-test-framework', { cwd, - env: { ...getCiVisAgentlessConfig(receiver.port), DD_CIVISIBILITY_MANUAL_API_ENABLED: '1' }, + env: getCiVisAgentlessConfig(receiver.port), stdio: 'pipe' } ) @@ -82,7 +82,7 @@ describe('test-api-manual', () => { }) }) - it('does not report test spans if DD_CIVISIBILITY_MANUAL_API_ENABLED is not set', (done) => { + it('does not report test spans if DD_CIVISIBILITY_MANUAL_API_ENABLED is set to false', (done) => { receiver.assertPayloadReceived(() => { const error = new Error('should not report spans') done(error) @@ -93,7 +93,10 @@ describe('test-api-manual', () => { '--require ./ci-visibility/test-api-manual/test.fake.js ./ci-visibility/test-api-manual/run-fake-test-framework', { cwd, - env: getCiVisAgentlessConfig(receiver.port), + env: { + ...getCiVisAgentlessConfig(receiver.port), + DD_CIVISIBILITY_MANUAL_API_ENABLED: 'false' + }, stdio: 'pipe' } ) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 53cf0c21de7..de38feee9da 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -14,8 +14,21 @@ const { TEST_TYPE, TEST_IS_RETRY, TEST_CODE_OWNERS, - TEST_CODE_COVERAGE_LINES_PCT + TEST_CODE_COVERAGE_LINES_PCT, + TEST_SESSION_NAME, + TEST_COMMAND, + TEST_LEVEL_EVENT_TYPES, + TEST_SOURCE_FILE, + TEST_SOURCE_START, + TEST_IS_NEW, + TEST_NAME, + TEST_EARLY_FLAKE_ENABLED, + TEST_EARLY_FLAKE_ABORT_REASON, + TEST_SUITE } = require('../../packages/dd-trace/src/plugins/util/test') +const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') + +const NUM_RETRIES_EFD = 3 const versions = ['1.6.0', 'latest'] @@ -50,6 +63,14 @@ versions.forEach((version) => { it('can run and report tests', (done) => { receiver.gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + + metadataDicts.forEach(metadata => { + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + assert.equal(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') + } + }) + const events = payloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') @@ -129,6 +150,20 @@ versions.forEach((version) => { 'ci-visibility/vitest-tests/test-visibility-passed-suite.mjs.other context can programmatic skip' ] ) + + testEvents.forEach(test => { + assert.equal(test.content.meta[TEST_COMMAND], 'vitest run') + assert.exists(test.content.metrics[DD_HOST_CPU_COUNT]) + }) + + testSuiteEvents.forEach(testSuite => { + assert.equal(testSuite.content.meta[TEST_COMMAND], 'vitest run') + assert.isTrue( + testSuite.content.meta[TEST_SOURCE_FILE].startsWith('ci-visibility/vitest-tests/test-visibility') + ) + assert.equal(testSuite.content.metrics[TEST_SOURCE_START], 1) + assert.exists(testSuite.content.metrics[DD_HOST_CPU_COUNT]) + }) // TODO: check error messages }).then(() => done()).catch(done) @@ -138,7 +173,8 @@ versions.forEach((version) => { cwd, env: { ...getCiVisAgentlessConfig(receiver.port), - NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init' // ESM requires more flags + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', // ESM requires more flags + DD_TEST_SESSION_NAME: 'my-test-session' }, stdio: 'pipe' } @@ -198,7 +234,7 @@ versions.forEach((version) => { }).then(() => done()).catch(done) childProcess = exec( - './node_modules/.bin/vitest run', // TODO: change tests we run + './node_modules/.bin/vitest run', { cwd, env: { @@ -236,7 +272,7 @@ versions.forEach((version) => { }).then(() => done()).catch(done) childProcess = exec( - './node_modules/.bin/vitest run', // TODO: change tests we run + './node_modules/.bin/vitest run', { cwd, env: { @@ -277,7 +313,7 @@ versions.forEach((version) => { }).then(() => done()).catch(done) childProcess = exec( - './node_modules/.bin/vitest run', // TODO: change tests we run + './node_modules/.bin/vitest run', { cwd, env: { @@ -298,7 +334,9 @@ versions.forEach((version) => { const events = payloads.flatMap(({ payload }) => payload.events) const test = events.find(event => event.type === 'test').content + const testSuite = events.find(event => event.type === 'test_suite_end').content assert.equal(test.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) + assert.equal(testSuite.meta[TEST_CODE_OWNERS], JSON.stringify(['@datadog-dd-trace-js'])) }) childProcess = exec( @@ -321,7 +359,7 @@ versions.forEach((version) => { }) }) - // only works for >=2.0.0 + // total code coverage only works for >=2.0.0 if (version === 'latest') { const coverageProviders = ['v8', 'istanbul'] @@ -374,5 +412,489 @@ versions.forEach((version) => { }) }) } + // maybe only latest version? + context('early flake detection', () => { + it('retries new tests', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/early-flake-detection.mjs': [ + // 'early flake detection can retry tests that eventually pass', // will be considered new + // 'early flake detection can retry tests that always pass', // will be considered new + // 'early flake detection can retry tests that eventually fail', // will be considered new + // 'early flake detection does not retry if the test is skipped', // skipped so not retried + 'early flake detection does not retry if it is not new' + ] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + assert.equal(tests.length, 14) + + assert.includeMembers(tests.map(test => test.meta[TEST_NAME]), [ + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that eventually fail', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection does not retry if it is not new', + 'early flake detection does not retry if the test is skipped' + ]) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 12) // 4 executions of the three new tests + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 9) // 3 retries of the three new tests + + // exit code should be 0 and test session should be reported as passed, + // even though there are some failing executions + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 3) + const testSessionEvent = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSessionEvent.meta, TEST_STATUS, 'pass') + assert.propertyVal(testSessionEvent.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + SHOULD_ADD_EVENTUALLY_FAIL: '1' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 0) + done() + }).catch(done) + }) + }) + + it('fails if all the attempts fail', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/early-flake-detection.mjs': [ + // 'early flake detection can retry tests that eventually pass', // will be considered new + // 'early flake detection can retry tests that always pass', // will be considered new + // 'early flake detection does not retry if the test is skipped', // skipped so not retried + 'early flake detection does not retry if it is not new' + ] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + assert.equal(tests.length, 10) + + assert.includeMembers(tests.map(test => test.meta[TEST_NAME]), [ + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection does not retry if it is not new', + 'early flake detection does not retry if the test is skipped' + ]) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 8) // 4 executions of the two new tests + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 6) // 3 retries of the two new tests + + // the multiple attempts did not result in a single pass, + // so the test session should be reported as failed + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 6) + const testSessionEvent = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSessionEvent.meta, TEST_STATUS, 'fail') + assert.propertyVal(testSessionEvent.meta, TEST_EARLY_FLAKE_ENABLED, 'true') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + ALWAYS_FAIL: 'true' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 1) + done() + }).catch(done) + }) + }) + + it('bails out of EFD if the percentage of new tests is too high', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + }, + faulty_session_threshold: 0 + } + }) + + receiver.setKnownTests({ + vitest: {} + }) // tests from ci-visibility/vitest-tests/early-flake-detection.mjs will be new + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + + const tests = events.filter(event => event.type === 'test').map(event => event.content) + assert.equal(tests.length, 4) + + const newTests = tests.filter( + test => test.meta[TEST_IS_NEW] === 'true' + ) + // no new tests + assert.equal(newTests.length, 0) + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + DD_TRACE_DEBUG: '1', + DD_TRACE_LOG_LEVEL: 'error' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 1) + done() + }).catch(done) + }) + }) + + it('is disabled if DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED is false', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/early-flake-detection.mjs': [ + // 'early flake detection can retry tests that eventually pass', // will be considered new + // 'early flake detection can retry tests that always pass', // will be considered new + // 'early flake detection does not retry if the test is skipped', // skipped so not retried + 'early flake detection does not retry if it is not new' + ] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + assert.equal(tests.length, 4) + + assert.includeMembers(tests.map(test => test.meta[TEST_NAME]), [ + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that always pass', + 'early flake detection does not retry if it is not new', + 'early flake detection does not retry if the test is skipped' + ]) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) + + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 1) + const testSessionEvent = events.find(event => event.type === 'test_session_end').content + assert.equal(testSessionEvent.meta[TEST_STATUS], 'fail') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 1) + done() + }).catch(done) + }) + }) + + it('does not run EFD if the known tests request fails', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + receiver.setKnownTestsResponseCode(500) + receiver.setKnownTests({}) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + assert.equal(tests.length, 4) + + assert.includeMembers(tests.map(test => test.meta[TEST_NAME]), [ + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that always pass', + 'early flake detection does not retry if it is not new', + 'early flake detection does not retry if the test is skipped' + ]) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 0) + + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 1) + const testSessionEvent = events.find(event => event.type === 'test_session_end').content + assert.equal(testSessionEvent.meta[TEST_STATUS], 'fail') + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 1) + done() + }).catch(done) + }) + }) + + it('works when the cwd is not the repository root', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': NUM_RETRIES_EFD + } + } + }) + + receiver.setKnownTests({ + vitest: { + 'ci-visibility/subproject/vitest-test.mjs': [ + 'context can report passed test' // no test will be considered new + ] + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + // no retries + assert.equal(tests.length, 1) + + assert.propertyVal(tests[0].meta, TEST_SUITE, 'ci-visibility/subproject/vitest-test.mjs') + // it's not considered new + assert.notProperty(tests[0].meta, TEST_IS_NEW) + }) + + childProcess = exec( + '../../node_modules/.bin/vitest run', + { + cwd: `${cwd}/ci-visibility/subproject`, + env: { + ...getCiVisAgentlessConfig(receiver.port), + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', // ESM requires more flags + TEST_DIR: './vitest-test.mjs' + }, + stdio: 'inherit' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 0) + done() + }).catch(done) + }) + }) + + it('works with repeats config when EFD is disabled', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false, + early_flake_detection: { + enabled: false + } + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', payloads => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const tests = events.filter(event => event.type === 'test').map(test => test.content) + + assert.equal(tests.length, 8) + + assert.includeMembers(tests.map(test => test.meta[TEST_NAME]), [ + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', + 'early flake detection can retry tests that eventually pass', // repeated twice + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', + 'early flake detection can retry tests that always pass', // repeated twice + 'early flake detection does not retry if it is not new', + 'early flake detection does not retry if the test is skipped' + ]) + const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') + assert.equal(newTests.length, 0) // no new test detected + + const retriedTests = tests.filter(test => test.meta[TEST_IS_RETRY] === 'true') + assert.equal(retriedTests.length, 4) // 2 repetitions on 2 tests + + // vitest reports the test as failed if any of the repetitions fail, so we'll follow that + // TODO: we might want to improve htis + const failedTests = tests.filter(test => test.meta[TEST_STATUS] === 'fail') + assert.equal(failedTests.length, 3) + + const testSessionEvent = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSessionEvent.meta, TEST_STATUS, 'fail') + assert.notProperty(testSessionEvent.meta, TEST_EARLY_FLAKE_ENABLED) + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/early-flake-detection*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + SHOULD_REPEAT: '1' + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', (exitCode) => { + eventsPromise.then(() => { + assert.equal(exitCode, 1) + done() + }).catch(done) + }) + }) + }) }) }) diff --git a/package.json b/package.json index 4f1eb4d6348..3c4e743f567 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.22.0", + "version": "5.23.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts", @@ -13,12 +13,15 @@ "type:doc": "cd docs && yarn && yarn build", "type:test": "cd docs && yarn && yarn test", "lint": "node scripts/check_licenses.js && eslint . && yarn audit --groups dependencies", + "lint-fix": "node scripts/check_licenses.js && eslint . --fix && yarn audit --groups dependencies", "services": "node ./scripts/install_plugin_modules && node packages/dd-trace/test/setup/services", "test": "SERVICES=* yarn services && mocha --expose-gc 'packages/dd-trace/test/setup/node.js' 'packages/*/test/**/*.spec.js'", "test:appsec": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" \"packages/dd-trace/test/appsec/**/*.spec.js\"", "test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec", "test:appsec:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"", "test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins", + "test:debugger": "tap packages/dd-trace/test/debugger/**/*.spec.js", + "test:debugger:ci": "npm run test:debugger -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/debugger/**/*.js\"", "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", "test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'", @@ -36,6 +39,7 @@ "test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"", "test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"", "test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"", + "test:integration:debugger": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/debugger/*.spec.js\"", "test:integration:jest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/jest/*.spec.js\"", "test:integration:mocha": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/mocha/*.spec.js\"", "test:integration:playwright": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/playwright/*.spec.js\"", @@ -83,10 +87,11 @@ "crypto-randomuuid": "^1.0.0", "dc-polyfill": "^0.1.4", "ignore": "^5.2.4", - "import-in-the-middle": "^1.8.1", + "import-in-the-middle": "1.11.2", "int64-buffer": "^0.1.9", "istanbul-lib-coverage": "3.2.0", "jest-docblock": "^29.7.0", + "jsonpath-plus": "^9.0.0", "koalas": "^1.0.2", "limiter": "1.1.5", "lodash.sortby": "^4.7.0", @@ -94,10 +99,11 @@ "module-details-from-path": "^1.0.3", "msgpack-lite": "^0.1.26", "opentracing": ">=0.12.1", - "path-to-regexp": "^0.1.2", + "path-to-regexp": "^0.1.10", "pprof-format": "^2.1.0", "protobufjs": "^7.2.5", "retry": "^0.13.1", + "rfdc": "^1.3.1", "semver": "^7.5.4", "shell-quote": "^1.8.1", "tlhunter-sorted-set": "^0.1.0" diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index 645ba06218f..0f84d717381 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -35,7 +35,8 @@ const { mergeCoverage, fromCoverageMapToCoverage, getTestSuitePath, - CUCUMBER_WORKER_TRACE_PAYLOAD_CODE + CUCUMBER_WORKER_TRACE_PAYLOAD_CODE, + getIsFaultyEarlyFlakeDetection } = require('../../dd-trace/src/plugins/util/test') const isMarkedAsUnskippable = (pickle) => { @@ -51,7 +52,9 @@ const patched = new WeakSet() const lastStatusByPickleId = new Map() const numRetriesByPickleId = new Map() const numAttemptToAsyncResource = new Map() +const newTestsByTestFullname = new Map() +let eventDataCollector = null let pickleByFile = {} const pickleResultByFile = {} @@ -64,6 +67,8 @@ let isUnskippable = false let isSuitesSkippingEnabled = false let isEarlyFlakeDetectionEnabled = false let earlyFlakeDetectionNumRetries = 0 +let earlyFlakeDetectionFaultyThreshold = 0 +let isEarlyFlakeDetectionFaulty = false let isFlakyTestRetriesEnabled = false let numTestRetries = 0 let knownTests = [] @@ -129,15 +134,35 @@ function getChannelPromise (channelToPublishTo) { }) } +function getShouldBeSkippedSuite (pickle, suitesToSkip) { + const testSuitePath = getTestSuitePath(pickle.uri, process.cwd()) + const isUnskippable = isMarkedAsUnskippable(pickle) + const isSkipped = suitesToSkip.includes(testSuitePath) + + return [isSkipped && !isUnskippable, testSuitePath] +} + +// From cucumber@>=11 +function getFilteredPicklesNew (coordinator, suitesToSkip) { + return coordinator.sourcedPickles.reduce((acc, sourcedPickle) => { + const { pickle } = sourcedPickle + const [shouldBeSkipped, testSuitePath] = getShouldBeSkippedSuite(pickle, suitesToSkip) + + if (shouldBeSkipped) { + acc.skippedSuites.add(testSuitePath) + } else { + acc.picklesToRun.push(sourcedPickle) + } + return acc + }, { skippedSuites: new Set(), picklesToRun: [] }) +} + function getFilteredPickles (runtime, suitesToSkip) { return runtime.pickleIds.reduce((acc, pickleId) => { - const test = runtime.eventDataCollector.getPickle(pickleId) - const testSuitePath = getTestSuitePath(test.uri, process.cwd()) - - const isUnskippable = isMarkedAsUnskippable(test) - const isSkipped = suitesToSkip.includes(testSuitePath) + const pickle = runtime.eventDataCollector.getPickle(pickleId) + const [shouldBeSkipped, testSuitePath] = getShouldBeSkippedSuite(pickle, suitesToSkip) - if (isSkipped && !isUnskippable) { + if (shouldBeSkipped) { acc.skippedSuites.add(testSuitePath) } else { acc.picklesToRun.push(pickleId) @@ -146,9 +171,21 @@ function getFilteredPickles (runtime, suitesToSkip) { }, { skippedSuites: new Set(), picklesToRun: [] }) } -function getPickleByFile (runtime) { - return runtime.pickleIds.reduce((acc, pickleId) => { - const test = runtime.eventDataCollector.getPickle(pickleId) +// From cucumber@>=11 +function getPickleByFileNew (coordinator) { + return coordinator.sourcedPickles.reduce((acc, { pickle }) => { + if (acc[pickle.uri]) { + acc[pickle.uri].push(pickle) + } else { + acc[pickle.uri] = [pickle] + } + return acc + }, {}) +} + +function getPickleByFile (runtimeOrCoodinator) { + return runtimeOrCoodinator.pickleIds.reduce((acc, pickleId) => { + const test = runtimeOrCoodinator.eventDataCollector.getPickle(pickleId) if (acc[test.uri]) { acc[test.uri].push(test) } else { @@ -294,17 +331,31 @@ function testCaseHook (TestCaseRunner) { return TestCaseRunner } -function getWrappedStart (start, frameworkVersion, isParallel = false) { +// Valid for old and new cucumber versions +function getCucumberOptions (adapterOrCoordinator) { + if (adapterOrCoordinator.adapter) { + return adapterOrCoordinator.adapter.worker?.options || adapterOrCoordinator.adapter.options + } + return adapterOrCoordinator.options +} + +function getWrappedStart (start, frameworkVersion, isParallel = false, isCoordinator = false) { return async function () { if (!libraryConfigurationCh.hasSubscribers) { return start.apply(this, arguments) } + const options = getCucumberOptions(this) + + if (!isParallel && this.adapter?.options) { + isParallel = options.parallel > 0 + } let errorSkippableRequest const configurationResponse = await getChannelPromise(libraryConfigurationCh) isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries + earlyFlakeDetectionFaultyThreshold = configurationResponse.libraryConfig?.earlyFlakeDetectionFaultyThreshold isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled isFlakyTestRetriesEnabled = configurationResponse.libraryConfig?.isFlakyTestRetriesEnabled numTestRetries = configurationResponse.libraryConfig?.flakyTestRetriesCount @@ -325,28 +376,49 @@ function getWrappedStart (start, frameworkVersion, isParallel = false) { skippableSuites = skippableResponse.skippableSuites if (!errorSkippableRequest) { - const filteredPickles = getFilteredPickles(this, skippableSuites) + const filteredPickles = isCoordinator + ? getFilteredPicklesNew(this, skippableSuites) + : getFilteredPickles(this, skippableSuites) + const { picklesToRun } = filteredPickles - isSuitesSkipped = picklesToRun.length !== this.pickleIds.length + const oldPickles = isCoordinator ? this.sourcedPickles : this.pickleIds + + isSuitesSkipped = picklesToRun.length !== oldPickles.length log.debug( - () => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.` + () => `${picklesToRun.length} out of ${oldPickles.length} suites are going to run.` ) - this.pickleIds = picklesToRun + if (isCoordinator) { + this.sourcedPickles = picklesToRun + } else { + this.pickleIds = picklesToRun + } skippedSuites = Array.from(filteredPickles.skippedSuites) itrCorrelationId = skippableResponse.itrCorrelationId } } - pickleByFile = getPickleByFile(this) + pickleByFile = isCoordinator ? getPickleByFileNew(this) : getPickleByFile(this) + + if (isEarlyFlakeDetectionEnabled) { + const isFaulty = getIsFaultyEarlyFlakeDetection( + Object.keys(pickleByFile), + knownTests.cucumber || {}, + earlyFlakeDetectionFaultyThreshold + ) + if (isFaulty) { + isEarlyFlakeDetectionEnabled = false + isEarlyFlakeDetectionFaulty = true + } + } const processArgv = process.argv.slice(2).join(' ') const command = process.env.npm_lifecycle_script || `cucumber-js ${processArgv}` - if (isFlakyTestRetriesEnabled && !this.options.retry && numTestRetries > 0) { - this.options.retry = numTestRetries + if (isFlakyTestRetriesEnabled && !options.retry && numTestRetries > 0) { + options.retry = numTestRetries } sessionAsyncResource.runInAsyncScope(() => { @@ -388,48 +460,65 @@ function getWrappedStart (start, frameworkVersion, isParallel = false) { hasUnskippableSuites: isUnskippable, hasForcedToRunSuites: isForcedToRun, isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty, isParallel }) }) + eventDataCollector = null return success } } -function getWrappedRunTest (runTestFunction) { - return async function (pickleId) { - const test = this.eventDataCollector.getPickle(pickleId) +// Generates suite start and finish events in the main process. +// Handles EFD in both the main process and the worker process. +function getWrappedRunTestCase (runTestCaseFunction, isNewerCucumberVersion = false, isWorker = false) { + return async function () { + let pickle + if (isNewerCucumberVersion) { + pickle = arguments[0].pickle + } else { + pickle = this.eventDataCollector.getPickle(arguments[0]) + } - const testFileAbsolutePath = test.uri + const testFileAbsolutePath = pickle.uri const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd()) - if (!pickleResultByFile[testFileAbsolutePath]) { // first test in suite - isUnskippable = isMarkedAsUnskippable(test) + // If it's a worker, suite events are handled in `getWrappedParseWorkerMessage` + if (!isWorker && !pickleResultByFile[testFileAbsolutePath]) { // first test in suite + isUnskippable = isMarkedAsUnskippable(pickle) isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath) - testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId }) + testSuiteStartCh.publish({ + testFileAbsolutePath, + isUnskippable, + isForcedToRun, + itrCorrelationId + }) } let isNew = false if (isEarlyFlakeDetectionEnabled) { - isNew = isNewTest(testSuitePath, test.name) + isNew = isNewTest(testSuitePath, pickle.name) if (isNew) { - numRetriesByPickleId.set(pickleId, 0) + numRetriesByPickleId.set(pickle.id, 0) } } - const runTestCaseResult = await runTestFunction.apply(this, arguments) + // TODO: for >=11 we could use `runTestCaseResult` instead of accumulating results in `lastStatusByPickleId` + let runTestCaseResult = await runTestCaseFunction.apply(this, arguments) - const testStatuses = lastStatusByPickleId.get(pickleId) + const testStatuses = lastStatusByPickleId.get(pickle.id) const lastTestStatus = testStatuses[testStatuses.length - 1] // If it's a new test and it hasn't been skipped, we run it again if (isEarlyFlakeDetectionEnabled && lastTestStatus !== 'skip' && isNew) { for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) { - numRetriesByPickleId.set(pickleId, retryIndex + 1) - await runTestFunction.apply(this, arguments) + numRetriesByPickleId.set(pickle.id, retryIndex + 1) + runTestCaseResult = await runTestCaseFunction.apply(this, arguments) } } let testStatus = lastTestStatus - if (isEarlyFlakeDetectionEnabled) { + let shouldBePassedByEFD = false + if (isNew && isEarlyFlakeDetectionEnabled) { /** * If Early Flake Detection (EFD) is enabled the logic is as follows: * - If all attempts for a test are failing, the test has failed and we will let the test process fail. @@ -439,6 +528,8 @@ function getWrappedRunTest (runTestFunction) { */ testStatus = getTestStatusFromRetries(testStatuses) if (testStatus === 'pass') { + // for cucumber@>=11, setting `this.success` does not work, so we have to change the returned value + shouldBePassedByEFD = true this.success = true } } @@ -449,8 +540,9 @@ function getWrappedRunTest (runTestFunction) { pickleResultByFile[testFileAbsolutePath].push(testStatus) } - // last test in suite - if (pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) { + // If it's a worker, suite events are handled in `getWrappedParseWorkerMessage` + if (!isWorker && pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) { + // last test in suite const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testFileAbsolutePath]) if (global.__coverage__) { const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__) @@ -469,11 +561,15 @@ function getWrappedRunTest (runTestFunction) { testSuiteFinishCh.publish({ status: testSuiteStatus, testSuitePath }) } + if (isNewerCucumberVersion && isEarlyFlakeDetectionEnabled && isNew) { + return shouldBePassedByEFD + } + return runTestCaseResult } } -function getWrappedParseWorkerMessage (parseWorkerMessageFunction) { +function getWrappedParseWorkerMessage (parseWorkerMessageFunction, isNewVersion) { return function (worker, message) { // If the message is an array, it's a dd-trace message, so we need to stop cucumber processing, // or cucumber will throw an error @@ -488,29 +584,43 @@ function getWrappedParseWorkerMessage (parseWorkerMessageFunction) { } } - const { jsonEnvelope } = message - if (!jsonEnvelope) { + let envelope + + if (isNewVersion) { + envelope = message.envelope + } else { + envelope = message.jsonEnvelope + } + + if (!envelope) { return parseWorkerMessageFunction.apply(this, arguments) } - let parsed = jsonEnvelope + let parsed = envelope if (typeof parsed === 'string') { try { - parsed = JSON.parse(jsonEnvelope) + parsed = JSON.parse(envelope) } catch (e) { // ignore errors and continue return parseWorkerMessageFunction.apply(this, arguments) } } + let pickle + if (parsed.testCaseStarted) { - const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId] - const pickle = this.eventDataCollector.getPickle(pickleId) + if (isNewVersion) { + pickle = this.inProgress[worker.id].pickle + } else { + const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId] + pickle = this.eventDataCollector.getPickle(pickleId) + } + // THIS FAILS IN PARALLEL MODE const testFileAbsolutePath = pickle.uri // First test in suite if (!pickleResultByFile[testFileAbsolutePath]) { pickleResultByFile[testFileAbsolutePath] = [] testSuiteStartCh.publish({ - testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd()) + testFileAbsolutePath }) } } @@ -519,14 +629,47 @@ function getWrappedParseWorkerMessage (parseWorkerMessageFunction) { // after calling `parseWorkerMessageFunction`, the test status can already be read if (parsed.testCaseFinished) { - const { pickle, worstTestStepResult } = - this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId) + let worstTestStepResult + if (isNewVersion && eventDataCollector) { + pickle = this.inProgress[worker.id].pickle + worstTestStepResult = + eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId).worstTestStepResult + } else { + const testCase = this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId) + worstTestStepResult = testCase.worstTestStepResult + pickle = testCase.pickle + } const { status } = getStatusFromResultLatest(worstTestStepResult) + let isNew = false + + if (isEarlyFlakeDetectionEnabled) { + isNew = isNewTest(pickle.uri, pickle.name) + } const testFileAbsolutePath = pickle.uri const finished = pickleResultByFile[testFileAbsolutePath] - finished.push(status) + + if (isNew) { + const testFullname = `${pickle.uri}:${pickle.name}` + let testStatuses = newTestsByTestFullname.get(testFullname) + if (!testStatuses) { + testStatuses = [status] + newTestsByTestFullname.set(testFullname, testStatuses) + } else { + testStatuses.push(status) + } + // We have finished all retries + if (testStatuses.length === earlyFlakeDetectionNumRetries + 1) { + const newTestFinalStatus = getTestStatusFromRetries(testStatuses) + // we only push to `finished` if the retries have finished + finished.push(newTestFinalStatus) + } + } else { + // TODO: can we get error message? + const finished = pickleResultByFile[testFileAbsolutePath] + finished.push(status) + } if (finished.length === pickleByFile[testFileAbsolutePath].length) { testSuiteFinishCh.publish({ @@ -556,13 +699,16 @@ addHook({ // From 7.3.0 onwards, runPickle becomes runTestCase. Not executed in parallel mode. // `getWrappedStart` generates session start and finish events -// `getWrappedRunTest` generates suite start and finish events +// `getWrappedRunTestCase` generates suite start and finish events and handles EFD. +// TODO (fix): there is a lib/runtime/index in >=11.0.0, but we don't instrument it because it's not useful for us +// This causes a info log saying "Found incompatible integration version". addHook({ name: '@cucumber/cucumber', - versions: ['>=7.3.0'], + versions: ['>=7.3.0 <11.0.0'], file: 'lib/runtime/index.js' }, (runtimePackage, frameworkVersion) => { - shimmer.wrap(runtimePackage.default.prototype, 'runTestCase', runTestCase => getWrappedRunTest(runTestCase)) + shimmer.wrap(runtimePackage.default.prototype, 'runTestCase', runTestCase => getWrappedRunTestCase(runTestCase)) + shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion)) return runtimePackage @@ -570,13 +716,13 @@ addHook({ // Not executed in parallel mode. // `getWrappedStart` generates session start and finish events -// `getWrappedRunTest` generates suite start and finish events +// `getWrappedRunTestCase` generates suite start and finish events and handles EFD. addHook({ name: '@cucumber/cucumber', versions: ['>=7.0.0 <7.3.0'], file: 'lib/runtime/index.js' }, (runtimePackage, frameworkVersion) => { - shimmer.wrap(runtimePackage.default.prototype, 'runPickle', runPickle => getWrappedRunTest(runPickle)) + shimmer.wrap(runtimePackage.default.prototype, 'runPickle', runPickle => getWrappedRunTestCase(runPickle)) shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion)) return runtimePackage @@ -584,11 +730,10 @@ addHook({ // Only executed in parallel mode. // `getWrappedStart` generates session start and finish events -// `getWrappedGiveWork` generates suite start events and sets pickleResultByFile (used by suite finish events) -// `getWrappedParseWorkerMessage` generates suite finish events +// `getWrappedParseWorkerMessage` generates suite start and finish events addHook({ name: '@cucumber/cucumber', - versions: ['>=8.0.0'], + versions: ['>=8.0.0 <11.0.0'], file: 'lib/runtime/parallel/coordinator.js' }, (coordinatorPackage, frameworkVersion) => { shimmer.wrap(coordinatorPackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion, true)) @@ -599,3 +744,95 @@ addHook({ ) return coordinatorPackage }) + +// >=11.0.0 hooks +// `getWrappedRunTestCase` does two things: +// - generates suite start and finish events in the main process, +// - handles EFD in both the main process and the worker process. +addHook({ + name: '@cucumber/cucumber', + versions: ['>=11.0.0'], + file: 'lib/runtime/worker.js' +}, (workerPackage) => { + shimmer.wrap( + workerPackage.Worker.prototype, + 'runTestCase', + runTestCase => getWrappedRunTestCase(runTestCase, true, !!process.env.CUCUMBER_WORKER_ID) + ) + return workerPackage +}) + +// `getWrappedStart` generates session start and finish events +addHook({ + name: '@cucumber/cucumber', + versions: ['>=11.0.0'], + file: 'lib/runtime/coordinator.js' +}, (coordinatorPackage, frameworkVersion) => { + shimmer.wrap( + coordinatorPackage.Coordinator.prototype, + 'run', + run => getWrappedStart(run, frameworkVersion, false, true) + ) + return coordinatorPackage +}) + +// Necessary because `eventDataCollector` is no longer available in the runtime instance +addHook({ + name: '@cucumber/cucumber', + versions: ['>=11.0.0'], + file: 'lib/formatter/helpers/event_data_collector.js' +}, (eventDataCollectorPackage) => { + shimmer.wrap(eventDataCollectorPackage.default.prototype, 'parseEnvelope', parseEnvelope => function () { + eventDataCollector = this + return parseEnvelope.apply(this, arguments) + }) + return eventDataCollectorPackage +}) + +// Only executed in parallel mode for >=11, in the main process. +// `getWrappedParseWorkerMessage` generates suite start and finish events +// In `startWorker` we pass early flake detection info to the worker. +addHook({ + name: '@cucumber/cucumber', + versions: ['>=11.0.0'], + file: 'lib/runtime/parallel/adapter.js' +}, (adapterPackage) => { + shimmer.wrap( + adapterPackage.ChildProcessAdapter.prototype, + 'parseWorkerMessage', + parseWorkerMessage => getWrappedParseWorkerMessage(parseWorkerMessage, true) + ) + // EFD in parallel mode only supported in >=11.0.0 + shimmer.wrap(adapterPackage.ChildProcessAdapter.prototype, 'startWorker', startWorker => function () { + if (isEarlyFlakeDetectionEnabled) { + this.options.worldParameters._ddKnownTests = knownTests + this.options.worldParameters._ddEarlyFlakeDetectionNumRetries = earlyFlakeDetectionNumRetries + } + + return startWorker.apply(this, arguments) + }) + return adapterPackage +}) + +// Hook executed in the worker process when in parallel mode. +// In this hook we read the information passed in `worldParameters` and make it available for +// `getWrappedRunTestCase`. +addHook({ + name: '@cucumber/cucumber', + versions: ['>=11.0.0'], + file: 'lib/runtime/parallel/worker.js' +}, (workerPackage) => { + shimmer.wrap( + workerPackage.ChildProcessWorker.prototype, + 'initialize', + initialize => async function () { + await initialize.apply(this, arguments) + isEarlyFlakeDetectionEnabled = !!this.options.worldParameters._ddKnownTests + if (isEarlyFlakeDetectionEnabled) { + knownTests = this.options.worldParameters._ddKnownTests + earlyFlakeDetectionNumRetries = this.options.worldParameters._ddEarlyFlakeDetectionNumRetries + } + } + ) + return workerPackage +}) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index cf6648c4399..e2baf3f9d42 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -404,7 +404,7 @@ addHook({ addHook({ name: '@jest/test-sequencer', - versions: ['>=24.8.0'] + versions: ['>=28'] }, (sequencerPackage, frameworkVersion) => { shimmer.wrap(sequencerPackage.default.prototype, 'shard', shard => function () { const shardedTests = shard.apply(this, arguments) @@ -648,6 +648,7 @@ function jestAdapterWrapper (jestAdapter, jestVersion) { testSuiteStartCh.publish({ testSuite: environment.testSuite, testEnvironmentOptions: environment.testEnvironmentOptions, + testSourceFile: environment.testSourceFile, displayName: environment.displayName, frameworkVersion: jestVersion }) @@ -765,6 +766,7 @@ addHook({ _ddTestModuleId, _ddTestSessionId, _ddTestCommand, + _ddTestSessionName, _ddForcedToRun, _ddUnskippable, _ddItrCorrelationId, diff --git a/packages/datadog-instrumentations/src/kafkajs.js b/packages/datadog-instrumentations/src/kafkajs.js index 1fb5316becd..395c69de057 100644 --- a/packages/datadog-instrumentations/src/kafkajs.js +++ b/packages/datadog-instrumentations/src/kafkajs.js @@ -17,6 +17,10 @@ const consumerCommitCh = channel('apm:kafkajs:consume:commit') const consumerFinishCh = channel('apm:kafkajs:consume:finish') const consumerErrorCh = channel('apm:kafkajs:consume:error') +const batchConsumerStartCh = channel('apm:kafkajs:consume-batch:start') +const batchConsumerFinishCh = channel('apm:kafkajs:consume-batch:finish') +const batchConsumerErrorCh = channel('apm:kafkajs:consume-batch:error') + function commitsFromEvent (event) { const { payload: { groupId, topics } } = event const commitList = [] @@ -96,6 +100,17 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf return createConsumer.apply(this, arguments) } + const eachMessageExtractor = (args) => { + const { topic, partition, message } = args[0] + return { topic, partition, message, groupId } + } + + const eachBatchExtractor = (args) => { + const { batch } = args[0] + const { topic, partition, messages } = batch + return { topic, partition, messages, groupId } + } + const consumer = createConsumer.apply(this, arguments) consumer.on(consumer.events.COMMIT_OFFSETS, commitsFromEvent) @@ -103,43 +118,64 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf const run = consumer.run const groupId = arguments[0].groupId - consumer.run = function ({ eachMessage, ...runArgs }) { - if (typeof eachMessage !== 'function') return run({ eachMessage, ...runArgs }) + consumer.run = function ({ eachMessage, eachBatch, ...runArgs }) { + eachMessage = wrapFunction( + eachMessage, + consumerStartCh, + consumerFinishCh, + consumerErrorCh, + eachMessageExtractor + ) + + eachBatch = wrapFunction( + eachBatch, + batchConsumerStartCh, + batchConsumerFinishCh, + batchConsumerErrorCh, + eachBatchExtractor + ) return run({ - eachMessage: function (...eachMessageArgs) { - const innerAsyncResource = new AsyncResource('bound-anonymous-fn') - return innerAsyncResource.runInAsyncScope(() => { - const { topic, partition, message } = eachMessageArgs[0] - consumerStartCh.publish({ topic, partition, message, groupId }) - try { - const result = eachMessage.apply(this, eachMessageArgs) - if (result && typeof result.then === 'function') { - result.then( - innerAsyncResource.bind(() => consumerFinishCh.publish(undefined)), - innerAsyncResource.bind(err => { - if (err) { - consumerErrorCh.publish(err) - } - consumerFinishCh.publish(undefined) - }) - ) - } else { - consumerFinishCh.publish(undefined) - } - - return result - } catch (e) { - consumerErrorCh.publish(e) - consumerFinishCh.publish(undefined) - throw e - } - }) - }, + eachMessage, + eachBatch, ...runArgs }) } + return consumer }) return Kafka }) + +const wrapFunction = (fn, startCh, finishCh, errorCh, extractArgs) => { + return typeof fn === 'function' + ? function (...args) { + const innerAsyncResource = new AsyncResource('bound-anonymous-fn') + return innerAsyncResource.runInAsyncScope(() => { + const extractedArgs = extractArgs(args) + startCh.publish(extractedArgs) + try { + const result = fn.apply(this, args) + if (result && typeof result.then === 'function') { + result.then( + innerAsyncResource.bind(() => finishCh.publish(undefined)), + innerAsyncResource.bind(err => { + if (err) { + errorCh.publish(err) + } + finishCh.publish(undefined) + }) + ) + } else { + finishCh.publish(undefined) + } + return result + } catch (e) { + errorCh.publish(e) + finishCh.publish(undefined) + throw e + } + }) + } + : fn +} diff --git a/packages/datadog-instrumentations/src/microgateway-core.js b/packages/datadog-instrumentations/src/microgateway-core.js index 553fc402606..f96a769ae85 100644 --- a/packages/datadog-instrumentations/src/microgateway-core.js +++ b/packages/datadog-instrumentations/src/microgateway-core.js @@ -8,7 +8,9 @@ const routeChannel = channel('apm:microgateway-core:request:route') const errorChannel = channel('apm:microgateway-core:request:error') const name = 'microgateway-core' -const versions = ['>=2.1'] + +// TODO Remove " <=3.0.0" when "volos-util-apigee" module is fixed +const versions = ['>=2.1 <=3.0.0'] const requestResources = new WeakMap() function wrapConfigProxyFactory (configProxyFactory) { diff --git a/packages/datadog-instrumentations/src/mocha/main.js b/packages/datadog-instrumentations/src/mocha/main.js index b2e425b62f2..2e796a71371 100644 --- a/packages/datadog-instrumentations/src/mocha/main.js +++ b/packages/datadog-instrumentations/src/mocha/main.js @@ -11,12 +11,12 @@ const { fromCoverageMapToCoverage, getCoveredFilenamesFromCoverage, mergeCoverage, - resetCoverage + resetCoverage, + getIsFaultyEarlyFlakeDetection } = require('../../../dd-trace/src/plugins/util/test') const { isNewTest, - retryTest, getSuitesByTestFile, runnableWrapper, getOnTestHandler, @@ -25,22 +25,21 @@ const { getOnHookEndHandler, getOnFailHandler, getOnPendingHandler, - testFileToSuiteAr + testFileToSuiteAr, + newTests, + getTestFullName, + getRunTestsWrapper } = require('./utils') + require('./common') const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn') const patched = new WeakSet() -const newTests = {} -let suitesToSkip = [] + const unskippableSuites = [] +let suitesToSkip = [] let isSuitesSkipped = false let skippedSuites = [] -let isEarlyFlakeDetectionEnabled = false -let isSuitesSkippingEnabled = false -let isFlakyTestRetriesEnabled = false -let earlyFlakeDetectionNumRetries = 0 -let knownTests = [] let itrCorrelationId = '' let isForcedToRun = false const config = {} @@ -69,6 +68,17 @@ const itrSkippedSuitesCh = channel('ci:mocha:itr:skipped-suites') const getCodeCoverageCh = channel('ci:nyc:get-coverage') +// Tests from workers do not come with `isFailed` method +function isTestFailed (test) { + if (test.isFailed) { + return test.isFailed() + } + if (test.isPending) { + return !test.isPending() && test.state !== 'failed' + } + return false +} + function getFilteredSuites (originalSuites) { return originalSuites.reduce((acc, suite) => { const testPath = getTestSuitePath(suite.file, process.cwd()) @@ -107,7 +117,7 @@ function getOnEndHandler (isParallel) { status = 'fail' } - if (!isParallel && isEarlyFlakeDetectionEnabled) { + if (config.isEarlyFlakeDetectionEnabled) { /** * If Early Flake Detection (EFD) is enabled the logic is as follows: * - If all attempts for a test are failing, the test has failed and we will let the test process fail. @@ -116,7 +126,7 @@ function getOnEndHandler (isParallel) { * on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too. */ for (const tests of Object.values(newTests)) { - const failingNewTests = tests.filter(test => test.isFailed()) + const failingNewTests = tests.filter(test => isTestFailed(test)) const areAllNewTestsFailing = failingNewTests.length === tests.length if (failingNewTests.length && !areAllNewTestsFailing) { this.stats.failures -= failingNewTests.length @@ -153,13 +163,14 @@ function getOnEndHandler (isParallel) { hasForcedToRunSuites: isForcedToRun, hasUnskippableSuites: !!unskippableSuites.length, error, - isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionEnabled: config.isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty: config.isEarlyFlakeDetectionFaulty, isParallel }) }) } -function getExecutionConfiguration (runner, onFinishRequest) { +function getExecutionConfiguration (runner, isParallel, onFinishRequest) { const mochaRunAsyncResource = new AsyncResource('bound-anonymous-fn') const onReceivedSkippableSuites = ({ err, skippableSuites, itrCorrelationId: responseItrCorrelationId }) => { @@ -186,15 +197,15 @@ function getExecutionConfiguration (runner, onFinishRequest) { onFinishRequest() } - const onReceivedKnownTests = ({ err, knownTests: receivedKnownTests }) => { + const onReceivedKnownTests = ({ err, knownTests }) => { if (err) { - knownTests = [] - isEarlyFlakeDetectionEnabled = false + config.knownTests = [] + config.isEarlyFlakeDetectionEnabled = false } else { - knownTests = receivedKnownTests + config.knownTests = knownTests } - if (isSuitesSkippingEnabled) { + if (config.isSuitesSkippingEnabled) { skippableSuitesCh.publish({ onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites) }) @@ -208,22 +219,19 @@ function getExecutionConfiguration (runner, onFinishRequest) { return onFinishRequest() } - isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled - isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled - earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries - isFlakyTestRetriesEnabled = libraryConfig.isFlakyTestRetriesEnabled - - config.isEarlyFlakeDetectionEnabled = isEarlyFlakeDetectionEnabled - config.isSuitesSkippingEnabled = isSuitesSkippingEnabled - config.earlyFlakeDetectionNumRetries = earlyFlakeDetectionNumRetries - config.isFlakyTestRetriesEnabled = isFlakyTestRetriesEnabled - config.flakyTestRetriesCount = libraryConfig.flakyTestRetriesCount + config.isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled + config.earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries + config.earlyFlakeDetectionFaultyThreshold = libraryConfig.earlyFlakeDetectionFaultyThreshold + // ITR and auto test retries are not supported in parallel mode yet + config.isSuitesSkippingEnabled = !isParallel && libraryConfig.isSuitesSkippingEnabled + config.isFlakyTestRetriesEnabled = !isParallel && libraryConfig.isFlakyTestRetriesEnabled + config.flakyTestRetriesCount = !isParallel && libraryConfig.flakyTestRetriesCount - if (isEarlyFlakeDetectionEnabled) { + if (config.isEarlyFlakeDetectionEnabled) { knownTestsCh.publish({ onDone: mochaRunAsyncResource.bind(onReceivedKnownTests) }) - } else if (isSuitesSkippingEnabled) { + } else if (config.isSuitesSkippingEnabled) { skippableSuitesCh.publish({ onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites) }) @@ -251,8 +259,8 @@ addHook({ return run.apply(this, arguments) } - // `options.delay` does not work in parallel mode, so ITR and EFD can't work. - // TODO: use `lib/cli/run-helpers.js#runMocha` to get the data in parallel mode. + // `options.delay` does not work in parallel mode, so we can't delay the execution this way + // This needs to be both here and in `runMocha` hook. Read the comment in `runMocha` hook for more info. this.options.delay = true const runner = run.apply(this, arguments) @@ -264,7 +272,19 @@ addHook({ } }) - getExecutionConfiguration(runner, () => { + getExecutionConfiguration(runner, false, () => { + if (config.isEarlyFlakeDetectionEnabled) { + const testSuites = this.files.map(file => getTestSuitePath(file, process.cwd())) + const isFaulty = getIsFaultyEarlyFlakeDetection( + testSuites, + config.knownTests?.mocha || {}, + config.earlyFlakeDetectionFaultyThreshold + ) + if (isFaulty) { + config.isEarlyFlakeDetectionEnabled = false + config.isEarlyFlakeDetectionFaulty = true + } + } if (getCodeCoverageCh.hasSubscribers) { getCodeCoverageCh.publish({ onDone: (receivedCodeCoverage) => { @@ -282,9 +302,6 @@ addHook({ return Mocha }) -// Only used to set `mocha.options.delay` to true in serial mode. When the mocha CLI is used, -// setting options.delay in Mocha#run is not enough to delay the execution. -// TODO: modify this hook to grab the data in parallel mode, so that ITR and EFD can work. addHook({ name: 'mocha', versions: ['>=5.2.0'], @@ -294,15 +311,20 @@ addHook({ if (!testStartCh.hasSubscribers) { return runMocha.apply(this, arguments) } - const mocha = arguments[0] + /** * This attaches `run` to the global context, which we'll call after - * our configuration and skippable suites requests + * our configuration and skippable suites requests. + * You need this both here and in Mocha#run hook: the programmatic API + * does not call `runMocha`, so it needs to be in Mocha#run. When using + * the CLI, modifying `options.delay` in Mocha#run is not enough (it's too late), + * so it also needs to be here. */ if (!mocha.options.parallel) { mocha.options.delay = true } + return runMocha.apply(this, arguments) }) return run @@ -319,18 +341,7 @@ addHook({ patched.add(Runner) - shimmer.wrap(Runner.prototype, 'runTests', runTests => function (suite, fn) { - if (isEarlyFlakeDetectionEnabled) { - // by the time we reach `this.on('test')`, it is too late. We need to add retries here - suite.tests.forEach(test => { - if (!test.isPending() && isNewTest(test, knownTests)) { - test._ddIsNew = true - retryTest(test, earlyFlakeDetectionNumRetries) - } - }) - } - return runTests.apply(this, arguments) - }) + shimmer.wrap(Runner.prototype, 'runTests', runTests => getRunTestsWrapper(runTests, config)) shimmer.wrap(Runner.prototype, 'run', run => function () { if (!testStartCh.hasSubscribers) { @@ -514,10 +525,10 @@ addHook({ // Used to start and finish test session and test module addHook({ name: 'mocha', - versions: ['>=5.2.0'], + versions: ['>=8.0.0'], file: 'lib/nodejs/parallel-buffered-runner.js' }, (ParallelBufferedRunner, frameworkVersion) => { - shimmer.wrap(ParallelBufferedRunner.prototype, 'run', run => function () { + shimmer.wrap(ParallelBufferedRunner.prototype, 'run', run => function (cb, { files }) { if (!testStartCh.hasSubscribers) { return run.apply(this, arguments) } @@ -525,8 +536,82 @@ addHook({ this.once('start', getOnStartHandler(true, frameworkVersion)) this.once('end', getOnEndHandler(true)) - return run.apply(this, arguments) + getExecutionConfiguration(this, true, () => { + if (config.isEarlyFlakeDetectionEnabled) { + const testSuites = files.map(file => getTestSuitePath(file, process.cwd())) + const isFaulty = getIsFaultyEarlyFlakeDetection( + testSuites, + config.knownTests?.mocha || {}, + config.earlyFlakeDetectionFaultyThreshold + ) + if (isFaulty) { + config.isEarlyFlakeDetectionEnabled = false + config.isEarlyFlakeDetectionFaulty = true + } + } + run.apply(this, arguments) + }) + + return this }) return ParallelBufferedRunner }) + +// Only in parallel mode: BufferedWorkerPool#run is used to run a test file in a worker +// If Early Flake Detection is enabled, +// In this hook we pass the known tests to the worker and collect the new tests that run +addHook({ + name: 'mocha', + versions: ['>=8.0.0'], + file: 'lib/nodejs/buffered-worker-pool.js' +}, (BufferedWorkerPoolPackage) => { + const { BufferedWorkerPool } = BufferedWorkerPoolPackage + + shimmer.wrap(BufferedWorkerPool.prototype, 'run', run => async function (testSuiteAbsolutePath, workerArgs) { + if (!testStartCh.hasSubscribers || !config.isEarlyFlakeDetectionEnabled) { + return run.apply(this, arguments) + } + + const testPath = getTestSuitePath(testSuiteAbsolutePath, process.cwd()) + const testSuiteKnownTests = config.knownTests.mocha?.[testPath] || [] + + // We pass the known tests for the test file to the worker + const testFileResult = await run.apply( + this, + [ + testSuiteAbsolutePath, + { + ...workerArgs, + _ddEfdNumRetries: config.earlyFlakeDetectionNumRetries, + _ddKnownTests: { + mocha: { + [testPath]: testSuiteKnownTests + } + } + } + ] + ) + const tests = testFileResult + .events + .filter(event => event.eventName === 'test end') + .map(event => event.data) + + // `newTests` is filled in the worker process, so we need to use the test results to fill it here too. + for (const test of tests) { + if (isNewTest(test, config.knownTests)) { + const testFullName = getTestFullName(test) + const tests = newTests[testFullName] + + if (!tests) { + newTests[testFullName] = [test] + } else { + tests.push(test) + } + } + } + return testFileResult + }) + + return BufferedWorkerPoolPackage +}) diff --git a/packages/datadog-instrumentations/src/mocha/utils.js b/packages/datadog-instrumentations/src/mocha/utils.js index 612d46e40fc..a4da0762039 100644 --- a/packages/datadog-instrumentations/src/mocha/utils.js +++ b/packages/datadog-instrumentations/src/mocha/utils.js @@ -24,6 +24,7 @@ const originalFns = new WeakMap() const testToStartLine = new WeakMap() const testFileToSuiteAr = new Map() const wrappedFunctions = new WeakSet() +const newTests = {} function isNewTest (test, knownTests) { const testSuite = getTestSuitePath(test.file, process.cwd()) @@ -151,7 +152,7 @@ function runnableWrapper (RunnablePackage, libraryConfig) { return RunnablePackage } -function getOnTestHandler (isMain, newTests) { +function getOnTestHandler (isMain) { return function (test) { const testStartLine = testToStartLine.get(test) const asyncResource = new AsyncResource('bound-anonymous-fn') @@ -179,22 +180,22 @@ function getOnTestHandler (isMain, newTests) { testStartLine } - if (isMain) { - testInfo.isNew = isNew - testInfo.isEfdRetry = isEfdRetry - // We want to store the result of the new tests - if (isNew) { - const testFullName = getTestFullName(test) - if (newTests[testFullName]) { - newTests[testFullName].push(test) - } else { - newTests[testFullName] = [test] - } - } - } else { + if (!isMain) { testInfo.isParallel = true } + testInfo.isNew = isNew + testInfo.isEfdRetry = isEfdRetry + // We want to store the result of the new tests + if (isNew) { + const testFullName = getTestFullName(test) + if (newTests[testFullName]) { + newTests[testFullName].push(test) + } else { + newTests[testFullName] = [test] + } + } + asyncResource.runInAsyncScope(() => { testStartCh.publish(testInfo) }) @@ -327,6 +328,23 @@ function getOnPendingHandler () { } } } + +// Hook to add retries to tests if EFD is enabled +function getRunTestsWrapper (runTests, config) { + return function (suite, fn) { + if (config.isEarlyFlakeDetectionEnabled) { + // by the time we reach `this.on('test')`, it is too late. We need to add retries here + suite.tests.forEach(test => { + if (!test.isPending() && isNewTest(test, config.knownTests)) { + test._ddIsNew = true + retryTest(test, config.earlyFlakeDetectionNumRetries) + } + }) + } + return runTests.apply(this, arguments) + } +} + module.exports = { isNewTest, retryTest, @@ -345,5 +363,7 @@ module.exports = { getOnHookEndHandler, getOnFailHandler, getOnPendingHandler, - testFileToSuiteAr + testFileToSuiteAr, + getRunTestsWrapper, + newTests } diff --git a/packages/datadog-instrumentations/src/mocha/worker.js b/packages/datadog-instrumentations/src/mocha/worker.js index fadd8f80a6e..63670ba5db2 100644 --- a/packages/datadog-instrumentations/src/mocha/worker.js +++ b/packages/datadog-instrumentations/src/mocha/worker.js @@ -9,19 +9,47 @@ const { getOnTestEndHandler, getOnHookEndHandler, getOnFailHandler, - getOnPendingHandler + getOnPendingHandler, + getRunTestsWrapper } = require('./utils') require('./common') const workerFinishCh = channel('ci:mocha:worker:finish') +const config = {} + +addHook({ + name: 'mocha', + versions: ['>=8.0.0'], + file: 'lib/mocha.js' +}, (Mocha) => { + shimmer.wrap(Mocha.prototype, 'run', run => function () { + if (this.options._ddKnownTests) { + // EFD is enabled if there's a list of known tests + config.isEarlyFlakeDetectionEnabled = true + config.knownTests = this.options._ddKnownTests + config.earlyFlakeDetectionNumRetries = this.options._ddEfdNumRetries + delete this.options._ddKnownTests + delete this.options._ddEfdNumRetries + } + return run.apply(this, arguments) + }) + + return Mocha +}) + // Runner is also hooked in mocha/main.js, but in here we only generate test events. addHook({ name: 'mocha', versions: ['>=5.2.0'], file: 'lib/runner.js' }, function (Runner) { + shimmer.wrap(Runner.prototype, 'runTests', runTests => getRunTestsWrapper(runTests, config)) + shimmer.wrap(Runner.prototype, 'run', run => function () { + if (!workerFinishCh.hasSubscribers) { + return run.apply(this, arguments) + } // We flush when the worker ends with its test file (a mocha instance in a worker runs a single test file) this.on('end', () => { workerFinishCh.publish() diff --git a/packages/datadog-instrumentations/src/openai.js b/packages/datadog-instrumentations/src/openai.js index 326e0d3092f..940b5919d24 100644 --- a/packages/datadog-instrumentations/src/openai.js +++ b/packages/datadog-instrumentations/src/openai.js @@ -165,10 +165,12 @@ function addStreamedChunk (content, chunk) { if (tools) { oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => { - const oldTool = oldChoice.delta.tool_calls[toolIdx] + const oldTool = oldChoice.delta.tool_calls?.[toolIdx] if (oldTool) { oldTool.function.arguments += newTool.function.arguments + } else { + return newTool } return oldTool @@ -247,7 +249,7 @@ function wrapStreamIterator (response, options, n, ctx) { return res }) .catch(err => { - finish(undefined, err) + finish(ctx, undefined, err) throw err }) diff --git a/packages/datadog-instrumentations/src/pg.js b/packages/datadog-instrumentations/src/pg.js index be09ac9e928..55642d82e96 100644 --- a/packages/datadog-instrumentations/src/pg.js +++ b/packages/datadog-instrumentations/src/pg.js @@ -53,14 +53,15 @@ function wrapQuery (query) { } return asyncResource.runInAsyncScope(() => { + const abortController = new AbortController() + startCh.publish({ params: this.connectionParameters, query: pgQuery, - processId + processId, + abortController }) - arguments[0] = pgQuery - const finish = asyncResource.bind(function (error) { if (error) { errorCh.publish(error) @@ -68,6 +69,43 @@ function wrapQuery (query) { finishCh.publish() }) + if (abortController.signal.aborted) { + const error = abortController.signal.reason || new Error('Aborted') + + // eslint-disable-next-line max-len + // Based on: https://github.com/brianc/node-postgres/blob/54eb0fa216aaccd727765641e7d1cf5da2bc483d/packages/pg/lib/client.js#L510 + const reusingQuery = typeof pgQuery.submit === 'function' + const callback = arguments[arguments.length - 1] + + finish(error) + + if (reusingQuery) { + if (!pgQuery.callback && typeof callback === 'function') { + pgQuery.callback = callback + } + + if (pgQuery.callback) { + pgQuery.callback(error) + } else { + process.nextTick(() => { + pgQuery.emit('error', error) + }) + } + + return pgQuery + } + + if (typeof callback === 'function') { + callback(error) + + return + } + + return Promise.reject(error) + } + + arguments[0] = pgQuery + const retval = query.apply(this, arguments) const queryQueue = this.queryQueue || this._queryQueue const activeQuery = this.activeQuery || this._activeQuery @@ -112,8 +150,11 @@ function wrapPoolQuery (query) { const pgQuery = arguments[0] !== null && typeof arguments[0] === 'object' ? arguments[0] : { text: arguments[0] } return asyncResource.runInAsyncScope(() => { + const abortController = new AbortController() + startPoolQueryCh.publish({ - query: pgQuery + query: pgQuery, + abortController }) const finish = asyncResource.bind(function () { @@ -121,6 +162,20 @@ function wrapPoolQuery (query) { }) const cb = arguments[arguments.length - 1] + + if (abortController.signal.aborted) { + const error = abortController.signal.reason || new Error('Aborted') + finish() + + if (typeof cb === 'function') { + cb(error) + + return + } else { + return Promise.reject(error) + } + } + if (typeof cb === 'function') { arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function () { finish() diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js index 3d92b91cc05..f0117e0e8c0 100644 --- a/packages/datadog-instrumentations/src/vitest.js +++ b/packages/datadog-instrumentations/src/vitest.js @@ -1,5 +1,6 @@ const { addHook, channel, AsyncResource } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') +const log = require('../../dd-trace/src/log') // test hooks const testStartCh = channel('ci:vitest:test:start') @@ -7,6 +8,7 @@ const testFinishTimeCh = channel('ci:vitest:test:finish-time') const testPassCh = channel('ci:vitest:test:pass') const testErrorCh = channel('ci:vitest:test:error') const testSkipCh = channel('ci:vitest:test:skip') +const isNewTestCh = channel('ci:vitest:test:is-new') // test suite hooks const testSuiteStartCh = channel('ci:vitest:test-suite:start') @@ -17,9 +19,13 @@ const testSuiteErrorCh = channel('ci:vitest:test-suite:error') const testSessionStartCh = channel('ci:vitest:session:start') const testSessionFinishCh = channel('ci:vitest:session:finish') const libraryConfigurationCh = channel('ci:vitest:library-configuration') +const knownTestsCh = channel('ci:vitest:known-tests') +const isEarlyFlakeDetectionFaultyCh = channel('ci:vitest:is-early-flake-detection-faulty') const taskToAsync = new WeakMap() - +const taskToStatuses = new WeakMap() +const newTasks = new WeakSet() +const switchedStatuses = new WeakSet() const sessionAsyncResource = new AsyncResource('bound-anonymous-fn') function isReporterPackage (vitestPackage) { @@ -108,20 +114,61 @@ function getSortWrapper (sort) { // will not work. This will be a known limitation. let isFlakyTestRetriesEnabled = false let flakyTestRetriesCount = 0 + let isEarlyFlakeDetectionEnabled = false + let earlyFlakeDetectionNumRetries = 0 + let isEarlyFlakeDetectionFaulty = false + let knownTests = {} try { const { err, libraryConfig } = await getChannelPromise(libraryConfigurationCh) if (!err) { isFlakyTestRetriesEnabled = libraryConfig.isFlakyTestRetriesEnabled flakyTestRetriesCount = libraryConfig.flakyTestRetriesCount + isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled + earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries } } catch (e) { isFlakyTestRetriesEnabled = false + isEarlyFlakeDetectionEnabled = false } + if (isFlakyTestRetriesEnabled && !this.ctx.config.retry && flakyTestRetriesCount > 0) { this.ctx.config.retry = flakyTestRetriesCount } + if (isEarlyFlakeDetectionEnabled) { + const knownTestsResponse = await getChannelPromise(knownTestsCh) + if (!knownTestsResponse.err) { + knownTests = knownTestsResponse.knownTests + const testFilepaths = await this.ctx.getTestFilepaths() + + isEarlyFlakeDetectionFaultyCh.publish({ + knownTests: knownTests.vitest || {}, + testFilepaths, + onDone: (isFaulty) => { + isEarlyFlakeDetectionFaulty = isFaulty + } + }) + if (isEarlyFlakeDetectionFaulty) { + isEarlyFlakeDetectionEnabled = false + log.warn('Early flake detection is disabled because the number of new tests is too high.') + } else { + // TODO: use this to pass session and module IDs to the worker, instead of polluting process.env + // Note: setting this.ctx.config.provide directly does not work because it's cached + try { + const workspaceProject = this.ctx.getCoreWorkspaceProject() + workspaceProject._provided._ddKnownTests = knownTests.vitest + workspaceProject._provided._ddIsEarlyFlakeDetectionEnabled = isEarlyFlakeDetectionEnabled + workspaceProject._provided._ddEarlyFlakeDetectionNumRetries = earlyFlakeDetectionNumRetries + } catch (e) { + log.warn('Could not send known tests to workers so Early Flake Detection will not work.') + } + } + } else { + isEarlyFlakeDetectionEnabled = false + } + } + let testCodeCoverageLinesTotal if (this.ctx.coverageProvider?.generateCoverage) { @@ -154,6 +201,8 @@ function getSortWrapper (sort) { status: getSessionStatus(this.state), testCodeCoverageLinesTotal, error, + isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty, onFinish }) }) @@ -188,12 +237,83 @@ addHook({ file: 'dist/runners.js' }, (vitestPackage) => { const { VitestTestRunner } = vitestPackage + + // `onBeforeRunTask` is run before any repetition or attempt is run + shimmer.wrap(VitestTestRunner.prototype, 'onBeforeRunTask', onBeforeRunTask => async function (task) { + const testName = getTestName(task) + try { + const { + _ddKnownTests: knownTests, + _ddIsEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled, + _ddEarlyFlakeDetectionNumRetries: numRepeats + } = globalThis.__vitest_worker__.providedContext + + if (isEarlyFlakeDetectionEnabled) { + isNewTestCh.publish({ + knownTests, + testSuiteAbsolutePath: task.file.filepath, + testName, + onDone: (isNew) => { + if (isNew) { + task.repeats = numRepeats + newTasks.add(task) + taskToStatuses.set(task, []) + } + } + }) + } + } catch (e) { + log.error('Vitest workers could not parse known tests, so Early Flake Detection will not work.') + } + + return onBeforeRunTask.apply(this, arguments) + }) + + // `onAfterRunTask` is run after all repetitions or attempts are run + shimmer.wrap(VitestTestRunner.prototype, 'onAfterRunTask', onAfterRunTask => async function (task) { + const { + _ddIsEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled + } = globalThis.__vitest_worker__.providedContext + + if (isEarlyFlakeDetectionEnabled && taskToStatuses.has(task)) { + const statuses = taskToStatuses.get(task) + // If the test has passed at least once, we consider it passed + if (statuses.includes('pass')) { + if (task.result.state === 'fail') { + switchedStatuses.add(task) + } + task.result.state = 'pass' + } + } + + return onAfterRunTask.apply(this, arguments) + }) + // test start (only tests that are not marked as skip or todo) + // `onBeforeTryTask` is run for every repetition and attempt of the test shimmer.wrap(VitestTestRunner.prototype, 'onBeforeTryTask', onBeforeTryTask => async function (task, retryInfo) { if (!testStartCh.hasSubscribers) { return onBeforeTryTask.apply(this, arguments) } - const { retry: numAttempt } = retryInfo + const testName = getTestName(task) + let isNew = false + let isEarlyFlakeDetectionEnabled = false + + try { + const { + _ddIsEarlyFlakeDetectionEnabled + } = globalThis.__vitest_worker__.providedContext + + isEarlyFlakeDetectionEnabled = _ddIsEarlyFlakeDetectionEnabled + + if (isEarlyFlakeDetectionEnabled) { + isNew = newTasks.has(task) + } + } catch (e) { + log.error('Vitest workers could not parse known tests, so Early Flake Detection will not work.') + } + const { retry: numAttempt, repeats: numRepetition } = retryInfo + // We finish the previous test here because we know it has failed already if (numAttempt > 0) { const asyncResource = taskToAsync.get(task) @@ -205,14 +325,58 @@ addHook({ } } + const lastExecutionStatus = task.result.state + + // These clauses handle task.repeats, whether EFD is enabled or not + // The only thing that EFD does is to forcefully pass the test if it has passed at least once + if (numRepetition > 0 && numRepetition < task.repeats) { // it may or may have not failed + // Here we finish the earlier iteration, + // as long as it's not the _last_ iteration (which will be finished normally) + + // TODO: check test duration (not to repeat if it's too slow) + const asyncResource = taskToAsync.get(task) + if (asyncResource) { + if (lastExecutionStatus === 'fail') { + const testError = task.result?.errors?.[0] + asyncResource.runInAsyncScope(() => { + testErrorCh.publish({ error: testError }) + }) + } else { + asyncResource.runInAsyncScope(() => { + testPassCh.publish({ task }) + }) + } + if (isEarlyFlakeDetectionEnabled) { + const statuses = taskToStatuses.get(task) + statuses.push(lastExecutionStatus) + // If we don't "reset" the result.state to "pass", once a repetition fails, + // vitest will always consider the test as failed, so we can't read the actual status + task.result.state = 'pass' + } + } + } else if (numRepetition === task.repeats) { + const asyncResource = taskToAsync.get(task) + if (lastExecutionStatus === 'fail') { + const testError = task.result?.errors?.[0] + asyncResource.runInAsyncScope(() => { + testErrorCh.publish({ error: testError }) + }) + } else { + asyncResource.runInAsyncScope(() => { + testPassCh.publish({ task }) + }) + } + } + const asyncResource = new AsyncResource('bound-anonymous-fn') taskToAsync.set(task, asyncResource) asyncResource.runInAsyncScope(() => { testStartCh.publish({ - testName: getTestName(task), + testName, testSuiteAbsolutePath: task.file.filepath, - isRetry: numAttempt > 0 + isRetry: numAttempt > 0 || numRepetition > 0, + isNew }) }) return onBeforeTryTask.apply(this, arguments) @@ -230,7 +394,7 @@ addHook({ const asyncResource = taskToAsync.get(task) if (asyncResource) { - // We don't finish here because the test might fail in a later hook + // We don't finish here because the test might fail in a later hook (afterEach) asyncResource.runInAsyncScope(() => { testFinishTimeCh.publish({ status, task }) }) @@ -270,7 +434,16 @@ addHook({ addHook({ name: 'vitest', - versions: ['>=2.0.5'], + versions: ['>=2.1.0'], + filePattern: 'dist/chunks/RandomSequencer.*' +}, (randomSequencerPackage) => { + shimmer.wrap(randomSequencerPackage.B.prototype, 'sort', getSortWrapper) + return randomSequencerPackage +}) + +addHook({ + name: 'vitest', + versions: ['>=2.0.5 <2.1.0'], filePattern: 'dist/chunks/index.*' }, (vitestPackage) => { if (isReporterPackageNewest(vitestPackage)) { @@ -323,19 +496,21 @@ addHook({ testTasks.forEach(task => { const testAsyncResource = taskToAsync.get(task) const { result } = task + // We have to trick vitest into thinking that the test has passed + // but we want to report it as failed if it did fail + const isSwitchedStatus = switchedStatuses.has(task) if (result) { const { state, duration, errors } = result if (state === 'skip') { // programmatic skip testSkipCh.publish({ testName: getTestName(task), testSuiteAbsolutePath: task.file.filepath }) - } else if (state === 'pass') { + } else if (state === 'pass' && !isSwitchedStatus) { if (testAsyncResource) { testAsyncResource.runInAsyncScope(() => { testPassCh.publish({ task }) }) } - } else if (state === 'fail') { - // If it's failing, we have no accurate finish time, so we have to use `duration` + } else if (state === 'fail' || isSwitchedStatus) { let testError if (errors?.length) { diff --git a/packages/datadog-instrumentations/test/pg.spec.js b/packages/datadog-instrumentations/test/pg.spec.js new file mode 100644 index 00000000000..21d1bfc0951 --- /dev/null +++ b/packages/datadog-instrumentations/test/pg.spec.js @@ -0,0 +1,244 @@ +'use strict' + +const agent = require('../../dd-trace/test/plugins/agent') +const dc = require('dc-polyfill') +const { assert } = require('chai') + +const clients = { + pg: pg => pg.Client +} + +if (process.env.PG_TEST_NATIVE === 'true') { + clients['pg.native'] = pg => pg.native.Client +} + +describe('pg instrumentation', () => { + withVersions('pg', 'pg', version => { + const queryClientStartChannel = dc.channel('apm:pg:query:start') + const queryPoolStartChannel = dc.channel('datadog:pg:pool:query:start') + + let pg + let Query + + function abortQuery ({ abortController }) { + const error = new Error('Test') + abortController.abort(error) + + if (!abortController.signal.reason) { + abortController.signal.reason = error + } + } + + before(() => { + return agent.load(['pg']) + }) + + describe('pg.Client', () => { + Object.keys(clients).forEach(implementation => { + describe(implementation, () => { + let client + + beforeEach(done => { + pg = require(`../../../versions/pg@${version}`).get() + const Client = clients[implementation](pg) + Query = Client.Query + + client = new Client({ + host: '127.0.0.1', + user: 'postgres', + password: 'postgres', + database: 'postgres', + application_name: 'test' + }) + + client.connect(err => done(err)) + }) + + afterEach(() => { + client.end() + }) + + describe('abortController', () => { + afterEach(() => { + if (queryClientStartChannel.hasSubscribers) { + queryClientStartChannel.unsubscribe(abortQuery) + } + }) + + describe('using callback', () => { + it('Should not fail if it is not aborted', (done) => { + client.query('SELECT 1', (err) => { + done(err) + }) + }) + + it('Should abort query', (done) => { + queryClientStartChannel.subscribe(abortQuery) + + client.query('SELECT 1', (err) => { + assert.propertyVal(err, 'message', 'Test') + done() + }) + }) + }) + + describe('using promise', () => { + it('Should not fail if it is not aborted', async () => { + await client.query('SELECT 1') + }) + + it('Should abort query', async () => { + queryClientStartChannel.subscribe(abortQuery) + + try { + await client.query('SELECT 1') + } catch (err) { + assert.propertyVal(err, 'message', 'Test') + + return + } + + throw new Error('Query was not aborted') + }) + }) + + describe('using query object', () => { + describe('without callback', () => { + it('Should not fail if it is not aborted', (done) => { + const query = new Query('SELECT 1') + + client.query(query) + + query.on('end', () => { + done() + }) + }) + + it('Should abort query', (done) => { + queryClientStartChannel.subscribe(abortQuery) + + const query = new Query('SELECT 1') + + client.query(query) + + query.on('error', err => { + assert.propertyVal(err, 'message', 'Test') + done() + }) + + query.on('end', () => { + done(new Error('Query was not aborted')) + }) + }) + }) + + describe('with callback in query object', () => { + it('Should not fail if it is not aborted', (done) => { + const query = new Query('SELECT 1') + query.callback = (err) => { + done(err) + } + + client.query(query) + }) + + it('Should abort query', (done) => { + queryClientStartChannel.subscribe(abortQuery) + + const query = new Query('SELECT 1') + query.callback = err => { + assert.propertyVal(err, 'message', 'Test') + done() + } + + client.query(query) + }) + }) + + describe('with callback in query parameter', () => { + it('Should not fail if it is not aborted', (done) => { + const query = new Query('SELECT 1') + + client.query(query, (err) => { + done(err) + }) + }) + + it('Should abort query', (done) => { + queryClientStartChannel.subscribe(abortQuery) + + const query = new Query('SELECT 1') + + client.query(query, err => { + assert.propertyVal(err, 'message', 'Test') + done() + }) + }) + }) + }) + }) + }) + }) + }) + + describe('pg.Pool', () => { + let pool + + beforeEach(() => { + const { Pool } = require(`../../../versions/pg@${version}`).get() + + pool = new Pool({ + host: '127.0.0.1', + user: 'postgres', + password: 'postgres', + database: 'postgres', + application_name: 'test' + }) + }) + + describe('abortController', () => { + afterEach(() => { + if (queryPoolStartChannel.hasSubscribers) { + queryPoolStartChannel.unsubscribe(abortQuery) + } + }) + + describe('using callback', () => { + it('Should not fail if it is not aborted', (done) => { + pool.query('SELECT 1', (err) => { + done(err) + }) + }) + + it('Should abort query', (done) => { + queryPoolStartChannel.subscribe(abortQuery) + + pool.query('SELECT 1', (err) => { + assert.propertyVal(err, 'message', 'Test') + done() + }) + }) + }) + + describe('using promise', () => { + it('Should not fail if it is not aborted', async () => { + await pool.query('SELECT 1') + }) + + it('Should abort query', async () => { + queryPoolStartChannel.subscribe(abortQuery) + + try { + await pool.query('SELECT 1') + } catch (err) { + assert.propertyVal(err, 'message', 'Test') + return + } + + throw new Error('Query was not aborted') + }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-amqplib/src/consumer.js b/packages/datadog-plugin-amqplib/src/consumer.js index da4efb33fd0..92684e3f9dc 100644 --- a/packages/datadog-plugin-amqplib/src/consumer.js +++ b/packages/datadog-plugin-amqplib/src/consumer.js @@ -3,7 +3,6 @@ const { TEXT_MAP } = require('../../../ext/formats') const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor') -const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway') const { getResourceName } = require('./util') class AmqplibConsumerPlugin extends ConsumerPlugin { @@ -30,8 +29,7 @@ class AmqplibConsumerPlugin extends ConsumerPlugin { }) if ( - this.config.dsmEnabled && message?.properties?.headers && - DsmPathwayCodec.contextExists(message.properties.headers) + this.config.dsmEnabled && message?.properties?.headers ) { const payloadSize = getAmqpMessageSize({ headers: message.properties.headers, content: message.content }) const queue = fields.queue ? fields.queue : fields.routingKey diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js index 7dae7307d13..e815c1e00aa 100644 --- a/packages/datadog-plugin-aws-sdk/src/base.js +++ b/packages/datadog-plugin-aws-sdk/src/base.js @@ -5,9 +5,11 @@ const ClientPlugin = require('../../dd-trace/src/plugins/client') const { storage } = require('../../datadog-core') const { isTrue } = require('../../dd-trace/src/util') const coalesce = require('koalas') +const { tagsFromRequest, tagsFromResponse } = require('../../dd-trace/src/payload-tagging') class BaseAwsSdkPlugin extends ClientPlugin { static get id () { return 'aws' } + static get isPayloadReporter () { return false } get serviceIdentifier () { const id = this.constructor.id.toLowerCase() @@ -20,6 +22,14 @@ class BaseAwsSdkPlugin extends ClientPlugin { return id } + get cloudTaggingConfig () { + return this._tracerConfig.cloudPayloadTagging + } + + get payloadTaggingRules () { + return this.cloudTaggingConfig.rules.aws?.[this.constructor.id] + } + constructor (...args) { super(...args) @@ -51,6 +61,12 @@ class BaseAwsSdkPlugin extends ClientPlugin { this.requestInject(span, request) + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.requestsEnabled) { + const maxDepth = this.cloudTaggingConfig.maxDepth + const requestTags = tagsFromRequest(this.payloadTaggingRules, request.params, { maxDepth }) + span.addTags(requestTags) + } + const store = storage.getStore() this.enter(span, store) @@ -116,6 +132,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { const params = response.request.params const operation = response.request.operation const extraTags = this.generateTags(params, operation, response) || {} + const tags = Object.assign({ 'aws.response.request_id': response.requestId, 'resource.name': operation, @@ -123,6 +140,22 @@ class BaseAwsSdkPlugin extends ClientPlugin { }, extraTags) span.addTags(tags) + + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.responsesEnabled) { + const maxDepth = this.cloudTaggingConfig.maxDepth + const responseBody = this.extractResponseBody(response) + const responseTags = tagsFromResponse(this.payloadTaggingRules, responseBody, { maxDepth }) + span.addTags(responseTags) + } + } + + extractResponseBody (response) { + if (response.hasOwnProperty('data')) { + return response.data + } + return Object.fromEntries( + Object.entries(response).filter(([key]) => !['request', 'requestId', 'error', '$metadata'].includes(key)) + ) } generateTags () { diff --git a/packages/datadog-plugin-aws-sdk/src/services/kinesis.js b/packages/datadog-plugin-aws-sdk/src/services/kinesis.js index 98547c564f8..60802bfc448 100644 --- a/packages/datadog-plugin-aws-sdk/src/services/kinesis.js +++ b/packages/datadog-plugin-aws-sdk/src/services/kinesis.js @@ -113,7 +113,7 @@ class Kinesis extends BaseAwsSdkPlugin { const parsedAttributes = JSON.parse(Buffer.from(record.Data).toString()) if ( - parsedAttributes?._datadog && streamName && DsmPathwayCodec.contextExists(parsedAttributes._datadog) + parsedAttributes?._datadog && streamName ) { const payloadSize = getSizeOrZero(record.Data) this.tracer.decodeDataStreamsContext(parsedAttributes._datadog) diff --git a/packages/datadog-plugin-aws-sdk/src/services/sns.js b/packages/datadog-plugin-aws-sdk/src/services/sns.js index e1cbd38251e..4e2b16f1d18 100644 --- a/packages/datadog-plugin-aws-sdk/src/services/sns.js +++ b/packages/datadog-plugin-aws-sdk/src/services/sns.js @@ -7,6 +7,7 @@ const BaseAwsSdkPlugin = require('../base') class Sns extends BaseAwsSdkPlugin { static get id () { return 'sns' } static get peerServicePrecursors () { return ['topicname'] } + static get isPayloadReporter () { return true } generateTags (params, operation, response) { if (!params) return {} @@ -20,6 +21,7 @@ class Sns extends BaseAwsSdkPlugin { // Get the topic name from the last part of the ARN const topicName = arnParts[arnParts.length - 1] + return { 'resource.name': `${operation} ${params.TopicArn || response.data.TopicArn}`, 'aws.sns.topic_arn': TopicArn, diff --git a/packages/datadog-plugin-aws-sdk/src/services/sqs.js b/packages/datadog-plugin-aws-sdk/src/services/sqs.js index 35854ed3c1d..54a3e7e756c 100644 --- a/packages/datadog-plugin-aws-sdk/src/services/sqs.js +++ b/packages/datadog-plugin-aws-sdk/src/services/sqs.js @@ -194,7 +194,7 @@ class Sqs extends BaseAwsSdkPlugin { parsedAttributes = this.parseDatadogAttributes(message.MessageAttributes._datadog) } } - if (parsedAttributes && DsmPathwayCodec.contextExists(parsedAttributes)) { + if (parsedAttributes) { const payloadSize = getHeadersSize({ Body: message.Body, MessageAttributes: message.MessageAttributes diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index 848b00855d4..4f68f5fbf94 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -114,6 +114,28 @@ describe('Plugin', () => { s3.listBuckets({}, e => e && done(e)) }) + // different versions of aws-sdk use different casings and different AWS headers + it('should include tracing headers and not cause a 403 error', (done) => { + const HttpClientPlugin = require('../../datadog-plugin-http/src/client.js') + const spy = sinon.spy(HttpClientPlugin.prototype, 'bindStart') + agent.use(traces => { + const headers = new Set( + Object.keys(spy.firstCall.firstArg.args.options.headers) + .map(x => x.toLowerCase()) + ) + spy.restore() + + expect(headers).to.include('authorization') + expect(headers).to.include('x-amz-date') + expect(headers).to.include('x-datadog-trace-id') + expect(headers).to.include('x-datadog-parent-id') + expect(headers).to.include('x-datadog-sampling-priority') + expect(headers).to.include('x-datadog-tags') + }).then(done, done) + + s3.listBuckets({}, e => e && done(e)) + }) + it('should mark error responses', (done) => { let error diff --git a/packages/datadog-plugin-aws-sdk/test/sns.spec.js b/packages/datadog-plugin-aws-sdk/test/sns.spec.js index 293833a6009..7b62156f06c 100644 --- a/packages/datadog-plugin-aws-sdk/test/sns.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/sns.spec.js @@ -84,6 +84,242 @@ describe('Sns', function () { }) } + describe('with payload tagging', () => { + before(async () => { + await agent.load('aws-sdk') + await agent.close({ ritmReset: false, wipe: true }) + await agent.load('aws-sdk', {}, { + cloudPayloadTagging: { + request: '$.MessageAttributes.foo,$.MessageAttributes.redacted.StringValue.foo', + response: '$.MessageId,$.Attributes.DisplayName', + maxDepth: 5 + } + }) + }) + + after(() => agent.close({ ritmReset: false, wipe: true })) + + before(done => { + createResources('TestQueue', 'TestTopic', done) + }) + + after(done => { + sns.deleteTopic({ TopicArn }, done) + }) + + after(done => { + sqs.deleteQueue({ QueueUrl }, done) + }) + + it('adds request and response payloads as flattened tags', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal(`publish ${TopicArn}`) + expect(span.meta).to.include({ + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.TopicArn': TopicArn, + 'aws.request.body.Message': 'message 1', + 'aws.request.body.MessageAttributes.baz.DataType': 'String', + 'aws.request.body.MessageAttributes.baz.StringValue': 'bar', + 'aws.request.body.MessageAttributes.keyOne.DataType': 'String', + 'aws.request.body.MessageAttributes.keyOne.StringValue': 'keyOne', + 'aws.request.body.MessageAttributes.keyTwo.DataType': 'String', + 'aws.request.body.MessageAttributes.keyTwo.StringValue': 'keyTwo', + 'aws.response.body.MessageId': 'redacted' + }) + }).then(done, done) + + sns.publish({ + TopicArn, + Message: 'message 1', + MessageAttributes: { + baz: { DataType: 'String', StringValue: 'bar' }, + keyOne: { DataType: 'String', StringValue: 'keyOne' }, + keyTwo: { DataType: 'String', StringValue: 'keyTwo' } + } + }, e => e && done(e)) + }) + + it('expands and redacts keys identified as expandable', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal(`publish ${TopicArn}`) + expect(span.meta).to.include({ + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.TopicArn': TopicArn, + 'aws.request.body.Message': 'message 1', + 'aws.request.body.MessageAttributes.redacted.StringValue.foo': 'redacted', + 'aws.request.body.MessageAttributes.unredacted.StringValue.foo': 'bar', + 'aws.request.body.MessageAttributes.unredacted.StringValue.baz': 'yup', + 'aws.response.body.MessageId': 'redacted' + }) + }).then(done, done) + + sns.publish({ + TopicArn, + Message: 'message 1', + MessageAttributes: { + unredacted: { DataType: 'String', StringValue: '{"foo": "bar", "baz": "yup"}' }, + redacted: { DataType: 'String', StringValue: '{"foo": "bar"}' } + } + }, e => e && done(e)) + }) + + describe('user-defined redaction', () => { + it('redacts user-defined keys to suppress in request', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal(`publish ${TopicArn}`) + expect(span.meta).to.include({ + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.TopicArn': TopicArn, + 'aws.request.body.Message': 'message 1', + 'aws.request.body.MessageAttributes.foo': 'redacted', + 'aws.request.body.MessageAttributes.keyOne.DataType': 'String', + 'aws.request.body.MessageAttributes.keyOne.StringValue': 'keyOne', + 'aws.request.body.MessageAttributes.keyTwo.DataType': 'String', + 'aws.request.body.MessageAttributes.keyTwo.StringValue': 'keyTwo' + }) + expect(span.meta).to.have.property('aws.response.body.MessageId') + }).then(done, done) + + sns.publish({ + TopicArn, + Message: 'message 1', + MessageAttributes: { + foo: { DataType: 'String', StringValue: 'bar' }, + keyOne: { DataType: 'String', StringValue: 'keyOne' }, + keyTwo: { DataType: 'String', StringValue: 'keyTwo' } + } + }, e => e && done(e)) + }) + + // TODO add response tests + it('redacts user-defined keys to suppress in response', done => { + agent.use(traces => { + const span = traces[0][0] + expect(span.resource).to.equal(`getTopicAttributes ${TopicArn}`) + expect(span.meta).to.include({ + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.TopicArn': TopicArn, + 'aws.response.body.Attributes.DisplayName': 'redacted' + }) + }).then(done, done) + + sns.getTopicAttributes({ TopicArn }, e => e && done(e)) + }) + }) + + describe('redaction of internally suppressed keys', () => { + const supportsSMSNotification = (moduleName, version) => { + switch (moduleName) { + case 'aws-sdk': + // aws-sdk-js phone notifications introduced in c6d1bb1a + return semver.intersects(version, '>=2.10.0') + case '@aws-sdk/smithy-client': + return true + default: + return false + } + } + + if (supportsSMSNotification(moduleName, version)) { + // TODO + describe.skip('phone number', () => { + before(done => { + sns.createSMSSandboxPhoneNumber({ PhoneNumber: '+33628606135' }, err => err && done(err)) + sns.createSMSSandboxPhoneNumber({ PhoneNumber: '+33628606136' }, err => err && done(err)) + }) + + after(done => { + sns.deleteSMSSandboxPhoneNumber({ PhoneNumber: '+33628606135' }, err => err && done(err)) + sns.deleteSMSSandboxPhoneNumber({ PhoneNumber: '+33628606136' }, err => err && done(err)) + }) + + it('redacts phone numbers in request', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal('publish') + expect(span.meta).to.include({ + aws_service: 'SNS', + region: 'us-east-1', + 'aws.request.body.PhoneNumber': 'redacted', + 'aws.request.body.Message': 'message 1' + }) + }).then(done, done) + + sns.publish({ + PhoneNumber: '+33628606135', + Message: 'message 1' + }, e => e && done(e)) + }) + + it('redacts phone numbers in response', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal('publish') + expect(span.meta).to.include({ + aws_service: 'SNS', + region: 'us-east-1', + 'aws.response.body.PhoneNumber': 'redacted' + }) + }).then(done, done) + + sns.listSMSSandboxPhoneNumbers({ + PhoneNumber: '+33628606135', + Message: 'message 1' + }, e => e && done(e)) + }) + }) + } + + describe('subscription confirmation tokens', () => { + it('redacts tokens in request', done => { + agent.use(traces => { + const span = traces[0][0] + + expect(span.resource).to.equal(`confirmSubscription ${TopicArn}`) + expect(span.meta).to.include({ + aws_service: 'SNS', + 'aws.sns.topic_arn': TopicArn, + topicname: 'TestTopic', + region: 'us-east-1', + 'aws.request.body.Token': 'redacted', + 'aws.request.body.TopicArn': TopicArn + }) + }).then(done, done) + + sns.confirmSubscription({ + TopicArn, + Token: '1234' + }, () => {}) + }) + + // TODO + it.skip('redacts tokens in response', () => { + + }) + }) + }) + }) + describe('no configuration', () => { before(() => { parentId = '0' @@ -284,7 +520,7 @@ describe('Sns', function () { }) after(() => { - return agent.close({ ritmReset: false }) + return agent.close({ ritmReset: false, wipe: true }) }) afterEach(() => { diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index 98ed65cfbd4..d24f97c33e6 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -17,6 +17,7 @@ const { ITR_CORRELATION_ID, TEST_SOURCE_FILE, TEST_EARLY_FLAKE_ENABLED, + TEST_EARLY_FLAKE_ABORT_REASON, TEST_IS_NEW, TEST_IS_RETRY, TEST_SUITE_ID, @@ -79,6 +80,7 @@ class CucumberPlugin extends CiPlugin { hasUnskippableSuites, hasForcedToRunSuites, isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty, isParallel }) => { const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {} @@ -99,6 +101,9 @@ class CucumberPlugin extends CiPlugin { if (isEarlyFlakeDetectionEnabled) { this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true') } + if (isEarlyFlakeDetectionFaulty) { + this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + } if (isParallel) { this.testSessionSpan.setTag(CUCUMBER_IS_PARALLEL, 'true') } @@ -116,7 +121,15 @@ class CucumberPlugin extends CiPlugin { this.tracer._exporter.flush() }) - this.addSub('ci:cucumber:test-suite:start', ({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId }) => { + this.addSub('ci:cucumber:test-suite:start', ({ + testFileAbsolutePath, + isUnskippable, + isForcedToRun, + itrCorrelationId + }) => { + const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd()) + const testSourceFile = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot) + const testSuiteMetadata = getTestSuiteCommonTags( this.command, this.frameworkVersion, @@ -134,6 +147,16 @@ class CucumberPlugin extends CiPlugin { if (itrCorrelationId) { testSuiteMetadata[ITR_CORRELATION_ID] = itrCorrelationId } + if (testSourceFile) { + testSuiteMetadata[TEST_SOURCE_FILE] = testSourceFile + testSuiteMetadata[TEST_SOURCE_START] = 1 + } + + const codeOwners = this.getCodeOwners(testSuiteMetadata) + if (codeOwners) { + testSuiteMetadata[TEST_CODE_OWNERS] = codeOwners + } + const testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', { childOf: this.testModuleSpan, tags: { diff --git a/packages/datadog-plugin-cucumber/test/index.spec.js b/packages/datadog-plugin-cucumber/test/index.spec.js index e9ef915dab8..a43a2a53509 100644 --- a/packages/datadog-plugin-cucumber/test/index.spec.js +++ b/packages/datadog-plugin-cucumber/test/index.spec.js @@ -56,7 +56,7 @@ describe('Plugin', function () { let Cucumber this.timeout(10000) withVersions('cucumber', '@cucumber/cucumber', (version, _, specificVersion) => { - if ((NODE_MAJOR <= 16) && semver.satisfies(specificVersion, '>=10')) return + if (NODE_MAJOR <= 16 && semver.satisfies(specificVersion, '>=10')) return afterEach(() => { // > If you want to run tests multiple times, you may need to clear Node's require cache diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js index e7c85ae483c..630d613f772 100644 --- a/packages/datadog-plugin-cypress/src/cypress-plugin.js +++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js @@ -28,7 +28,10 @@ const { TEST_SOURCE_FILE, TEST_IS_NEW, TEST_IS_RETRY, - TEST_EARLY_FLAKE_ENABLED + TEST_EARLY_FLAKE_ENABLED, + getTestSessionName, + TEST_SESSION_NAME, + TEST_LEVEL_EVENT_TYPES } = require('../../dd-trace/src/plugins/util/test') const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util') const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants') @@ -245,10 +248,22 @@ class CypressPlugin { return this.libraryConfigurationPromise } - getTestSuiteSpan (suite) { + getTestSuiteSpan ({ testSuite, testSuiteAbsolutePath }) { const testSuiteSpanMetadata = - getTestSuiteCommonTags(this.command, this.frameworkVersion, suite, TEST_FRAMEWORK_NAME) + getTestSuiteCommonTags(this.command, this.frameworkVersion, testSuite, TEST_FRAMEWORK_NAME) + this.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') + + if (testSuiteAbsolutePath) { + const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) + testSuiteSpanMetadata[TEST_SOURCE_FILE] = testSourceFile + testSuiteSpanMetadata[TEST_SOURCE_START] = 1 + const codeOwners = this.getTestCodeOwners({ testSuite, testSourceFile }) + if (codeOwners) { + testSuiteSpanMetadata[TEST_CODE_OWNERS] = codeOwners + } + } + return this.tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_suite`, { childOf: this.testModuleSpan, tags: { @@ -387,6 +402,18 @@ class CypressPlugin { testSessionSpanMetadata[TEST_EARLY_FLAKE_ENABLED] = 'true' } + const testSessionName = getTestSessionName(this.tracer._tracer._config, this.command, this.testEnvironmentMetadata) + + if (this.tracer._tracer._exporter?.setMetadataTags) { + const metadataTags = {} + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + metadataTags[testLevel] = { + [TEST_SESSION_NAME]: testSessionName + } + } + this.tracer._tracer._exporter.setMetadataTags(metadataTags) + } + this.testSessionSpan = this.tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, { childOf, tags: { @@ -473,7 +500,10 @@ class CypressPlugin { // dd:testSuiteStart hasn't been triggered for whatever reason // We will create the test suite span on the spot if that's the case log.warn('There was an error creating the test suite event.') - this.testSuiteSpan = this.getTestSuiteSpan(spec.relative) + this.testSuiteSpan = this.getTestSuiteSpan({ + testSuite: spec.relative, + testSuiteAbsolutePath: spec.absolute + }) } // Get tests that didn't go through `dd:afterEach` @@ -584,7 +614,7 @@ class CypressPlugin { getTasks () { return { - 'dd:testSuiteStart': (testSuite) => { + 'dd:testSuiteStart': ({ testSuite, testSuiteAbsolutePath }) => { const suitePayload = { isEarlyFlakeDetectionEnabled: this.isEarlyFlakeDetectionEnabled, knownTestsForSuite: this.knownTestsByTestSuite?.[testSuite] || [], @@ -594,7 +624,7 @@ class CypressPlugin { if (this.testSuiteSpan) { return suitePayload } - this.testSuiteSpan = this.getTestSuiteSpan(testSuite) + this.testSuiteSpan = this.getTestSuiteSpan({ testSuite, testSuiteAbsolutePath }) return suitePayload }, 'dd:beforeEach': (test) => { diff --git a/packages/datadog-plugin-cypress/src/support.js b/packages/datadog-plugin-cypress/src/support.js index 9d34176260d..b9a739c94e4 100644 --- a/packages/datadog-plugin-cypress/src/support.js +++ b/packages/datadog-plugin-cypress/src/support.js @@ -61,7 +61,10 @@ beforeEach(function () { }) before(function () { - cy.task('dd:testSuiteStart', Cypress.mocha.getRootSuite().file).then((suiteConfig) => { + cy.task('dd:testSuiteStart', { + testSuite: Cypress.mocha.getRootSuite().file, + testSuiteAbsolutePath: Cypress.spec && Cypress.spec.absolute + }).then((suiteConfig) => { if (suiteConfig) { isEarlyFlakeDetectionEnabled = suiteConfig.isEarlyFlakeDetectionEnabled knownTestsForSuite = suiteConfig.knownTestsForSuite diff --git a/packages/datadog-plugin-fastify/test/index.spec.js b/packages/datadog-plugin-fastify/test/index.spec.js index 33b1430f98c..6b20e58a728 100644 --- a/packages/datadog-plugin-fastify/test/index.spec.js +++ b/packages/datadog-plugin-fastify/test/index.spec.js @@ -5,6 +5,7 @@ const axios = require('axios') const semver = require('semver') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../dd-trace/src/constants') const agent = require('../../dd-trace/test/plugins/agent') +const { NODE_MAJOR } = require('../../../version') const host = 'localhost' @@ -14,7 +15,7 @@ describe('Plugin', () => { let app describe('fastify', () => { - withVersions('fastify', 'fastify', version => { + withVersions('fastify', 'fastify', (version, _, specificVersion) => { beforeEach(() => { tracer = require('../../dd-trace') }) @@ -25,6 +26,8 @@ describe('Plugin', () => { withExports('fastify', version, ['default', 'fastify'], '>=3', getExport => { describe('without configuration', () => { + if (NODE_MAJOR <= 18 && semver.satisfies(specificVersion, '>=5')) return + before(() => { return agent.load(['fastify', 'find-my-way', 'http'], [{}, {}, { client: false }]) }) diff --git a/packages/datadog-plugin-fastify/test/integration-test/client.spec.js b/packages/datadog-plugin-fastify/test/integration-test/client.spec.js index 1ccb9791dc6..6a04cf6912b 100644 --- a/packages/datadog-plugin-fastify/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-fastify/test/integration-test/client.spec.js @@ -1,5 +1,5 @@ 'use strict' - +const semver = require('semver') const { FakeAgent, createSandbox, @@ -8,6 +8,7 @@ const { spawnPluginIntegrationTestProc } = require('../../../../integration-tests/helpers') const { assert } = require('chai') +const { NODE_MAJOR } = require('../../../../version') describe('esm', () => { let agent @@ -15,7 +16,9 @@ describe('esm', () => { let sandbox // skip older versions of fastify due to syntax differences - withVersions('fastify', 'fastify', '>=3', version => { + withVersions('fastify', 'fastify', '>=3', (version, _, specificVersion) => { + if (NODE_MAJOR <= 18 && semver.satisfies(specificVersion, '>=5')) return + before(async function () { this.timeout(20000) sandbox = await createSandbox([`'fastify@${version}'`], false, diff --git a/packages/datadog-plugin-fastify/test/integration-test/helper.mjs b/packages/datadog-plugin-fastify/test/integration-test/helper.mjs index 2c9c2ee8da0..e818008c977 100644 --- a/packages/datadog-plugin-fastify/test/integration-test/helper.mjs +++ b/packages/datadog-plugin-fastify/test/integration-test/helper.mjs @@ -4,7 +4,7 @@ export async function createAndStartServer (app) { }) try { - await app.listen(0) + await app.listen({ port: 0 }) const address = app.server.address() const port = address.port process.send({ port }) diff --git a/packages/datadog-plugin-fetch/test/index.spec.js b/packages/datadog-plugin-fetch/test/index.spec.js index 1d322de04a4..b469f4a9722 100644 --- a/packages/datadog-plugin-fetch/test/index.spec.js +++ b/packages/datadog-plugin-fetch/test/index.spec.js @@ -215,102 +215,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - fetch(`http://localhost:${port}/`, { - headers: { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - }) - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - fetch(`http://localhost:${port}/`, { - headers: { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - fetch(`http://localhost:${port}/`, { - headers: { - 'X-Amz-Signature': 'abc123' - } - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - fetch(`http://localhost:${port}/?X-Amz-Signature=abc123`) - }) - }) - it('should handle connection errors', done => { let error diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index d4c105d2508..55a025f4970 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -58,7 +58,7 @@ class HttpClientPlugin extends ClientPlugin { span._spanContext._trace.record = false } - if (this.shouldInjectTraceHeaders(options, uri)) { + if (this.config.propagationFilter(uri)) { this.tracer.inject(span, HTTP_HEADERS, options.headers) } @@ -71,18 +71,6 @@ class HttpClientPlugin extends ClientPlugin { return message.currentStore } - shouldInjectTraceHeaders (options, uri) { - if (hasAmazonSignature(options) && !this.config.enablePropagationWithAmazonHeaders) { - return false - } - - if (!this.config.propagationFilter(uri)) { - return false - } - - return true - } - bindAsyncStart ({ parentStore }) { return parentStore } @@ -212,31 +200,6 @@ function getHooks (config) { return { request } } -function hasAmazonSignature (options) { - if (!options) { - return false - } - - if (options.headers) { - const headers = Object.keys(options.headers) - .reduce((prev, next) => Object.assign(prev, { - [next.toLowerCase()]: options.headers[next] - }), {}) - - if (headers['x-amz-signature']) { - return true - } - - if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) { - return true - } - } - - const search = options.search || options.path - - return search && search.toLowerCase().indexOf('x-amz-signature=') !== -1 -} - function extractSessionDetails (options) { if (typeof options === 'string') { return new URL(options).host @@ -248,8 +211,4 @@ function extractSessionDetails (options) { return { host, port } } -function startsWith (searchString) { - return value => String(value).startsWith(searchString) -} - module.exports = HttpClientPlugin diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 42f4c8436f8..268aff9b238 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -446,116 +446,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - const req = http.request({ - port, - headers: { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - }) - - req.end() - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - const req = http.request({ - port, - headers: { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - }) - - req.end() - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - const req = http.request({ - port, - headers: { - 'X-Amz-Signature': 'abc123' - } - }) - - req.end() - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - const req = http.request({ - port, - path: '/?X-Amz-Signature=abc123' - }) - - req.end() - }) - }) - it('should run the callback in the parent context', done => { const app = express() @@ -1093,50 +983,6 @@ describe('Plugin', () => { }) }) - describe('with config enablePropagationWithAmazonHeaders enabled', () => { - let config - - beforeEach(() => { - config = { - enablePropagationWithAmazonHeaders: true - } - - return agent.load('http', config) - .then(() => { - http = require(pluginToBeLoaded) - express = require('express') - }) - }) - - it('should inject tracing header into AWS signed request', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.a('string') - expect(req.get('x-datadog-parent-id')).to.be.a('string') - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - appListener = server(app, port => { - const req = http.request({ - port, - headers: { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - }) - - req.end() - }) - }) - }) - describe('with validateStatus configuration', () => { let config diff --git a/packages/datadog-plugin-http2/src/client.js b/packages/datadog-plugin-http2/src/client.js index 296f1161e59..3f8d996fcd3 100644 --- a/packages/datadog-plugin-http2/src/client.js +++ b/packages/datadog-plugin-http2/src/client.js @@ -62,9 +62,7 @@ class Http2ClientPlugin extends ClientPlugin { addHeaderTags(span, headers, HTTP_REQUEST_HEADERS, this.config) - if (!hasAmazonSignature(headers, path)) { - this.tracer.inject(span, HTTP_HEADERS, headers) - } + this.tracer.inject(span, HTTP_HEADERS, headers) message.parentStore = store message.currentStore = { ...store, span } @@ -134,29 +132,6 @@ function extractSessionDetails (authority, options) { return { protocol, port, host } } -function hasAmazonSignature (headers, path) { - if (headers) { - headers = Object.keys(headers) - .reduce((prev, next) => Object.assign(prev, { - [next.toLowerCase()]: headers[next] - }), {}) - - if (headers['x-amz-signature']) { - return true - } - - if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) { - return true - } - } - - return path && path.toLowerCase().indexOf('x-amz-signature=') !== -1 -} - -function startsWith (searchString) { - return value => String(value).startsWith(searchString) -} - function getStatusValidator (config) { if (typeof config.validateStatus === 'function') { return config.validateStatus diff --git a/packages/datadog-plugin-http2/test/client.spec.js b/packages/datadog-plugin-http2/test/client.spec.js index f8d44f3ac0b..cfdedcde489 100644 --- a/packages/datadog-plugin-http2/test/client.spec.js +++ b/packages/datadog-plugin-http2/test/client.spec.js @@ -365,131 +365,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - appListener = server(app, port => { - const headers = { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - appListener = server(app, port => { - const headers = { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - appListener = server(app, port => { - const headers = { - 'X-Amz-Signature': 'abc123' - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - appListener = server(app, port => { - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request({ ':path': '/?X-Amz-Signature=abc123' }) - req.on('error', done) - - req.end() - }) - }) - it('should run the callback in the parent context', done => { const app = (stream, headers) => { stream.respond({ diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 606fdcec538..4362094b0be 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -158,7 +158,13 @@ class JestPlugin extends CiPlugin { }) }) - this.addSub('ci:jest:test-suite:start', ({ testSuite, testEnvironmentOptions, frameworkVersion, displayName }) => { + this.addSub('ci:jest:test-suite:start', ({ + testSuite, + testSourceFile, + testEnvironmentOptions, + frameworkVersion, + displayName + }) => { const { _ddTestSessionId: testSessionId, _ddTestCommand: testCommand, @@ -196,6 +202,16 @@ class JestPlugin extends CiPlugin { if (displayName) { testSuiteMetadata[JEST_DISPLAY_NAME] = displayName } + if (testSourceFile) { + testSuiteMetadata[TEST_SOURCE_FILE] = testSourceFile + // Test suite is the whole test file, so we can use the first line as the start + testSuiteMetadata[TEST_SOURCE_START] = 1 + } + + const codeOwners = this.getCodeOwners(testSuiteMetadata) + if (codeOwners) { + testSuiteMetadata[TEST_CODE_OWNERS] = codeOwners + } this.testSuiteSpan = this.tracer.startSpan('jest.test_suite', { childOf: testSessionSpanContext, diff --git a/packages/datadog-plugin-kafkajs/src/batch-consumer.js b/packages/datadog-plugin-kafkajs/src/batch-consumer.js new file mode 100644 index 00000000000..8415b037644 --- /dev/null +++ b/packages/datadog-plugin-kafkajs/src/batch-consumer.js @@ -0,0 +1,20 @@ +const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') +const { getMessageSize } = require('../../dd-trace/src/datastreams/processor') + +class KafkajsBatchConsumerPlugin extends ConsumerPlugin { + static get id () { return 'kafkajs' } + static get operation () { return 'consume-batch' } + + start ({ topic, partition, messages, groupId }) { + if (!this.config.dsmEnabled) return + for (const message of messages) { + if (!message || !message.headers) continue + const payloadSize = getMessageSize(message) + this.tracer.decodeDataStreamsContext(message.headers) + this.tracer + .setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], null, payloadSize) + } + } +} + +module.exports = KafkajsBatchConsumerPlugin diff --git a/packages/datadog-plugin-kafkajs/src/consumer.js b/packages/datadog-plugin-kafkajs/src/consumer.js index 420fea10902..84b6a02fdda 100644 --- a/packages/datadog-plugin-kafkajs/src/consumer.js +++ b/packages/datadog-plugin-kafkajs/src/consumer.js @@ -2,7 +2,6 @@ const dc = require('dc-polyfill') const { getMessageSize } = require('../../dd-trace/src/datastreams/processor') -const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway') const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') const afterStartCh = dc.channel('dd-trace:kafkajs:consumer:afterStart') @@ -78,7 +77,7 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { 'kafka.partition': partition } }) - if (this.config.dsmEnabled && message?.headers && DsmPathwayCodec.contextExists(message.headers)) { + if (this.config.dsmEnabled && message?.headers) { const payloadSize = getMessageSize(message) this.tracer.decodeDataStreamsContext(message.headers) this.tracer diff --git a/packages/datadog-plugin-kafkajs/src/index.js b/packages/datadog-plugin-kafkajs/src/index.js index 9e5aec80606..3d20e8af67e 100644 --- a/packages/datadog-plugin-kafkajs/src/index.js +++ b/packages/datadog-plugin-kafkajs/src/index.js @@ -2,6 +2,7 @@ const ProducerPlugin = require('./producer') const ConsumerPlugin = require('./consumer') +const BatchConsumerPlugin = require('./batch-consumer') const CompositePlugin = require('../../dd-trace/src/plugins/composite') class KafkajsPlugin extends CompositePlugin { @@ -9,7 +10,8 @@ class KafkajsPlugin extends CompositePlugin { static get plugins () { return { producer: ProducerPlugin, - consumer: ConsumerPlugin + consumer: ConsumerPlugin, + batchConsumer: BatchConsumerPlugin } } } diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index 1e63c8b01c6..3df303a95cf 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -145,7 +145,7 @@ describe('Plugin', () => { ) }) - describe('consumer', () => { + describe('consumer (eachMessage)', () => { let consumer beforeEach(async () => { @@ -387,7 +387,7 @@ describe('Plugin', () => { expect(setDataStreamsContextSpy.args[0][0].hash).to.equal(expectedProducerHash) }) - it('Should set a checkpoint on consume', async () => { + it('Should set a checkpoint on consume (eachMessage)', async () => { const runArgs = [] await consumer.run({ eachMessage: async () => { @@ -401,6 +401,20 @@ describe('Plugin', () => { } }) + it('Should set a checkpoint on consume (eachBatch)', async () => { + const runArgs = [] + await consumer.run({ + eachBatch: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + } + }) + await sendMessages(kafka, testTopic, messages) + await consumer.disconnect() + for (const runArg of runArgs) { + expect(runArg.hash).to.equal(expectedConsumerHash) + } + }) + it('Should set a message payload size when producing a message', async () => { const messages = [{ key: 'key1', value: 'test2' }] if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index 79b0d14c62f..30f6e88a9fc 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -21,6 +21,7 @@ const { TEST_IS_NEW, TEST_IS_RETRY, TEST_EARLY_FLAKE_ENABLED, + TEST_EARLY_FLAKE_ABORT_REASON, TEST_SESSION_ID, TEST_MODULE_ID, TEST_MODULE, @@ -124,6 +125,19 @@ class MochaPlugin extends CiPlugin { testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' this.telemetry.count(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) } + if (this.repositoryRoot !== this.sourceRoot && !!this.repositoryRoot) { + testSuiteMetadata[TEST_SOURCE_FILE] = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) + } else { + testSuiteMetadata[TEST_SOURCE_FILE] = testSuite + } + if (testSuiteMetadata[TEST_SOURCE_FILE]) { + testSuiteMetadata[TEST_SOURCE_START] = 1 + } + + const codeOwners = this.getCodeOwners(testSuiteMetadata) + if (codeOwners) { + testSuiteMetadata[TEST_CODE_OWNERS] = codeOwners + } const testSuiteSpan = this.tracer.startSpan('mocha.test_suite', { childOf: this.testModuleSpan, @@ -267,6 +281,7 @@ class MochaPlugin extends CiPlugin { hasUnskippableSuites, error, isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty, isParallel }) => { if (this.testSessionSpan) { @@ -301,6 +316,9 @@ class MochaPlugin extends CiPlugin { if (isEarlyFlakeDetectionEnabled) { this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true') } + if (isEarlyFlakeDetectionFaulty) { + this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + } this.testModuleSpan.finish() this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') diff --git a/packages/datadog-plugin-openai/src/index.js b/packages/datadog-plugin-openai/src/index.js index 5eef54a8de5..f96b44543d2 100644 --- a/packages/datadog-plugin-openai/src/index.js +++ b/packages/datadog-plugin-openai/src/index.js @@ -276,25 +276,34 @@ class OpenApiPlugin extends TracingPlugin { const completionTokens = spanTags['openai.response.usage.completion_tokens'] const completionTokensEstimated = spanTags['openai.response.usage.completion_tokens_estimated'] + const totalTokens = spanTags['openai.response.usage.total_tokens'] + if (!error) { - if (promptTokensEstimated) { - this.metrics.distribution( - 'openai.tokens.prompt', promptTokens, [...tags, 'openai.estimated:true']) - } else { - this.metrics.distribution('openai.tokens.prompt', promptTokens, tags) + if (promptTokens != null) { + if (promptTokensEstimated) { + this.metrics.distribution( + 'openai.tokens.prompt', promptTokens, [...tags, 'openai.estimated:true']) + } else { + this.metrics.distribution('openai.tokens.prompt', promptTokens, tags) + } } - if (completionTokensEstimated) { - this.metrics.distribution( - 'openai.tokens.completion', completionTokens, [...tags, 'openai.estimated:true']) - } else { - this.metrics.distribution('openai.tokens.completion', completionTokens, tags) + + if (completionTokens != null) { + if (completionTokensEstimated) { + this.metrics.distribution( + 'openai.tokens.completion', completionTokens, [...tags, 'openai.estimated:true']) + } else { + this.metrics.distribution('openai.tokens.completion', completionTokens, tags) + } } - if (promptTokensEstimated || completionTokensEstimated) { - this.metrics.distribution( - 'openai.tokens.total', promptTokens + completionTokens, [...tags, 'openai.estimated:true']) - } else { - this.metrics.distribution('openai.tokens.total', promptTokens + completionTokens, tags) + if (totalTokens != null) { + if (promptTokensEstimated || completionTokensEstimated) { + this.metrics.distribution( + 'openai.tokens.total', totalTokens, [...tags, 'openai.estimated:true']) + } else { + this.metrics.distribution('openai.tokens.total', totalTokens, tags) + } } } @@ -777,9 +786,9 @@ function usageExtraction (tags, body, methodName, openaiStore) { if (completionEstimated) tags['openai.response.usage.completion_tokens_estimated'] = true } - if (promptTokens) tags['openai.response.usage.prompt_tokens'] = promptTokens - if (completionTokens) tags['openai.response.usage.completion_tokens'] = completionTokens - if (totalTokens) tags['openai.response.usage.total_tokens'] = totalTokens + if (promptTokens != null) tags['openai.response.usage.prompt_tokens'] = promptTokens + if (completionTokens != null) tags['openai.response.usage.completion_tokens'] = completionTokens + if (totalTokens != null) tags['openai.response.usage.total_tokens'] = totalTokens } function truncateApiKey (apiKey) { diff --git a/packages/datadog-plugin-openai/test/index.spec.js b/packages/datadog-plugin-openai/test/index.spec.js index 228cafaa3b8..8df38a11650 100644 --- a/packages/datadog-plugin-openai/test/index.spec.js +++ b/packages/datadog-plugin-openai/test/index.spec.js @@ -560,6 +560,57 @@ describe('Plugin', () => { }) }) + describe('embedding with missing usages', () => { + afterEach(() => { + nock.cleanAll() + }) + + it('makes a successful call', async () => { + nock('https://api.openai.com:443') + .post('/v1/embeddings') + .reply(200, { + object: 'list', + data: [{ + object: 'embedding', + index: 0, + embedding: [-0.0034387498, -0.026400521] + }], + model: 'text-embedding-ada-002-v2', + usage: { + prompt_tokens: 0 + } + }, []) + + const checkTraces = agent + .use(traces => { + expect(traces[0][0].metrics).to.have.property('openai.response.usage.prompt_tokens', 0) + expect(traces[0][0].metrics).to.not.have.property('openai.response.usage.completion_tokens') + expect(traces[0][0].metrics).to.not.have.property('openai.response.usage.total_tokens') + }) + + const params = { + model: 'text-embedding-ada-002', + input: '', + user: 'hunter2' + } + + if (semver.satisfies(realVersion, '>=4.0.0')) { + const result = await openai.embeddings.create(params) + expect(result.model).to.eql('text-embedding-ada-002-v2') + } else { + const result = await openai.createEmbedding(params) + expect(result.data.model).to.eql('text-embedding-ada-002-v2') + } + + await checkTraces + + expect(metricStub).to.have.been.calledWith('openai.request.duration') // timing value not guaranteed + expect(metricStub).to.have.been.calledWith('openai.tokens.prompt') + expect(metricStub).to.not.have.been.calledWith('openai.tokens.completion') + expect(metricStub).to.not.have.been.calledWith('openai.tokens.total') + }) + }) + describe('list models', () => { let scope @@ -3615,6 +3666,61 @@ describe('Plugin', () => { await checkTraces }) + + it('makes a successful chat completion call with tools and content', async () => { + nock('https://api.openai.com:443') + .post('/v1/chat/completions') + .reply(200, function () { + return fs.createReadStream( + Path.join(__dirname, 'streamed-responses/chat.completions.tool.and.content.txt') + ) + }, { + 'Content-Type': 'text/plain', + 'openai-organization': 'kill-9' + }) + + const checkTraces = agent + .use(traces => { + const span = traces[0][0] + + expect(span).to.have.property('name', 'openai.request') + expect(span).to.have.property('type', 'openai') + expect(span).to.have.property('error', 0) + expect(span.meta).to.have.property('openai.organization.name', 'kill-9') + expect(span.meta).to.have.property('openai.request.method', 'POST') + expect(span.meta).to.have.property('openai.request.endpoint', '/v1/chat/completions') + expect(span.meta).to.have.property('openai.request.model', 'gpt-4') + expect(span.meta).to.have.property('openai.request.messages.0.content', 'Hello, OpenAI!') + expect(span.meta).to.have.property('openai.request.messages.0.role', 'user') + expect(span.meta).to.have.property('openai.request.messages.0.name', 'hunter2') + expect(span.meta).to.have.property('openai.response.choices.0.message.role', 'assistant') + expect(span.meta).to.have.property('openai.response.choices.0.message.content', + 'THOUGHT: Hi') + expect(span.meta).to.have.property('openai.response.choices.0.finish_reason', 'tool_calls') + expect(span.meta).to.have.property('openai.response.choices.0.logprobs', 'returned') + expect(span.meta).to.have.property('openai.response.choices.0.message.tool_calls.0.function.name', + 'finish') + expect(span.meta).to.have.property( + 'openai.response.choices.0.message.tool_calls.0.function.arguments', + '{\n"answer": "5"\n}' + ) + }) + + const stream = await openai.chat.completions.create({ + model: 'gpt-4', + messages: [{ role: 'user', content: 'Hello, OpenAI!', name: 'hunter2' }], + temperature: 0.5, + tools: [], // dummy tools, the response is hardcoded + stream: true + }) + + for await (const part of stream) { + expect(part).to.have.property('choices') + expect(part.choices[0]).to.have.property('delta') + } + + await checkTraces + }) } }) } diff --git a/packages/datadog-plugin-openai/test/streamed-responses/chat.completions.tool.and.content.txt b/packages/datadog-plugin-openai/test/streamed-responses/chat.completions.tool.and.content.txt new file mode 100644 index 00000000000..3947339157d --- /dev/null +++ b/packages/datadog-plugin-openai/test/streamed-responses/chat.completions.tool.and.content.txt @@ -0,0 +1,33 @@ +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"TH"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"O"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"UGHT"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":" Hi"},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_Tg0o5wgoNSKF2iggAPmfWwem","type":"function","function":{"name":"finish","arguments":""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\n"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"answer"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" \""}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"5"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"\n"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"}"}}]},"logprobs":null,"finish_reason":null}]} + +data: {"id":"chatcmpl-A3juVlDlz6tV3bfCY2WZfYxRlKiAH","object":"chat.completion.chunk","created":1725454827,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}]} + +data: [DONE] \ No newline at end of file diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index 482bd6f10b9..941f779ff54 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -69,6 +69,7 @@ class PlaywrightPlugin extends CiPlugin { this.addSub('ci:playwright:test-suite:start', (testSuiteAbsolutePath) => { const store = storage.getStore() const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.rootDir) + const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) const testSuiteMetadata = getTestSuiteCommonTags( this.command, @@ -76,6 +77,14 @@ class PlaywrightPlugin extends CiPlugin { testSuite, 'playwright' ) + if (testSourceFile) { + testSuiteMetadata[TEST_SOURCE_FILE] = testSourceFile + testSuiteMetadata[TEST_SOURCE_START] = 1 + } + const codeOwners = this.getCodeOwners(testSuiteMetadata) + if (codeOwners) { + testSuiteMetadata[TEST_CODE_OWNERS] = codeOwners + } const testSuiteSpan = this.tracer.startSpan('playwright.test_suite', { childOf: this.testModuleSpan, diff --git a/packages/datadog-plugin-rhea/src/consumer.js b/packages/datadog-plugin-rhea/src/consumer.js index 226834885be..56aad8f7b9d 100644 --- a/packages/datadog-plugin-rhea/src/consumer.js +++ b/packages/datadog-plugin-rhea/src/consumer.js @@ -3,7 +3,6 @@ const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') const { storage } = require('../../datadog-core') const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor') -const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway') class RheaConsumerPlugin extends ConsumerPlugin { static get id () { return 'rhea' } @@ -34,8 +33,7 @@ class RheaConsumerPlugin extends ConsumerPlugin { if ( this.config.dsmEnabled && - msgObj?.message?.delivery_annotations && - DsmPathwayCodec.contextExists(msgObj.message.delivery_annotations) + msgObj?.message?.delivery_annotations ) { const payloadSize = getAmqpMessageSize( { headers: msgObj.message.delivery_annotations, content: msgObj.message.body } diff --git a/packages/datadog-plugin-undici/test/index.spec.js b/packages/datadog-plugin-undici/test/index.spec.js index 1224ead7f7f..03541e0eb7c 100644 --- a/packages/datadog-plugin-undici/test/index.spec.js +++ b/packages/datadog-plugin-undici/test/index.spec.js @@ -23,7 +23,7 @@ describe('Plugin', () => { describe('undici-fetch', () => { withVersions('undici', 'undici', version => { const specificVersion = require(`../../../versions/undici@${version}`).version() - if ((NODE_MAJOR <= 16) && semver.satisfies(specificVersion, '>=6')) return + if (NODE_MAJOR <= 16 && semver.satisfies(specificVersion, '>=6')) return function server (app, listener) { const server = require('http').createServer(app) diff --git a/packages/datadog-plugin-vitest/src/index.js b/packages/datadog-plugin-vitest/src/index.js index a93eeb1ea4d..34617bdb1ac 100644 --- a/packages/datadog-plugin-vitest/src/index.js +++ b/packages/datadog-plugin-vitest/src/index.js @@ -6,10 +6,18 @@ const { finishAllTraceSpans, getTestSuitePath, getTestSuiteCommonTags, + getTestSessionName, + getIsFaultyEarlyFlakeDetection, TEST_SOURCE_FILE, TEST_IS_RETRY, TEST_CODE_COVERAGE_LINES_PCT, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + TEST_LEVEL_EVENT_TYPES, + TEST_SESSION_NAME, + TEST_SOURCE_START, + TEST_IS_NEW, + TEST_EARLY_FLAKE_ENABLED, + TEST_EARLY_FLAKE_ABORT_REASON } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const { @@ -33,7 +41,26 @@ class VitestPlugin extends CiPlugin { this.taskToFinishTime = new WeakMap() - this.addSub('ci:vitest:test:start', ({ testName, testSuiteAbsolutePath, isRetry }) => { + this.addSub('ci:vitest:test:is-new', ({ knownTests, testSuiteAbsolutePath, testName, onDone }) => { + const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) + const testsForThisTestSuite = knownTests[testSuite] || [] + onDone(!testsForThisTestSuite.includes(testName)) + }) + + this.addSub('ci:vitest:is-early-flake-detection-faulty', ({ + knownTests, + testFilepaths, + onDone + }) => { + const isFaulty = getIsFaultyEarlyFlakeDetection( + testFilepaths.map(testFilepath => getTestSuitePath(testFilepath, this.repositoryRoot)), + knownTests, + this.libraryConfig.earlyFlakeDetectionFaultyThreshold + ) + onDone(isFaulty) + }) + + this.addSub('ci:vitest:test:start', ({ testName, testSuiteAbsolutePath, isRetry, isNew }) => { const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) const store = storage.getStore() @@ -43,6 +70,9 @@ class VitestPlugin extends CiPlugin { if (isRetry) { extraTags[TEST_IS_RETRY] = 'true' } + if (isNew) { + extraTags[TEST_IS_NEW] = 'true' + } const span = this.startTestSpan( testName, @@ -110,6 +140,7 @@ class VitestPlugin extends CiPlugin { this.testSuiteSpan, { [TEST_SOURCE_FILE]: testSuite, + [TEST_SOURCE_START]: 1, // we can't get the proper start line in vitest [TEST_STATUS]: 'skip' } ) @@ -120,12 +151,25 @@ class VitestPlugin extends CiPlugin { }) this.addSub('ci:vitest:test-suite:start', ({ testSuiteAbsolutePath, frameworkVersion }) => { + this.command = process.env.DD_CIVISIBILITY_TEST_COMMAND this.frameworkVersion = frameworkVersion const testSessionSpanContext = this.tracer.extract('text_map', { 'x-datadog-trace-id': process.env.DD_CIVISIBILITY_TEST_SESSION_ID, 'x-datadog-parent-id': process.env.DD_CIVISIBILITY_TEST_MODULE_ID }) + // test suites run in a different process, so they also need to init the metadata dictionary + const testSessionName = getTestSessionName(this.config, this.command, this.testEnvironmentMetadata) + const metadataTags = {} + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + metadataTags[testLevel] = { + [TEST_SESSION_NAME]: testSessionName + } + } + if (this.tracer._exporter.setMetadataTags) { + this.tracer._exporter.setMetadataTags(metadataTags) + } + const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot) const testSuiteMetadata = getTestSuiteCommonTags( this.command, @@ -133,6 +177,14 @@ class VitestPlugin extends CiPlugin { testSuite, 'vitest' ) + testSuiteMetadata[TEST_SOURCE_FILE] = testSuite + testSuiteMetadata[TEST_SOURCE_START] = 1 + + const codeOwners = this.getCodeOwners(testSuiteMetadata) + if (codeOwners) { + testSuiteMetadata[TEST_CODE_OWNERS] = codeOwners + } + const testSuiteSpan = this.tracer.startSpan('vitest.test_suite', { childOf: testSessionSpanContext, tags: { @@ -169,7 +221,14 @@ class VitestPlugin extends CiPlugin { } }) - this.addSub('ci:vitest:session:finish', ({ status, onFinish, error, testCodeCoverageLinesTotal }) => { + this.addSub('ci:vitest:session:finish', ({ + status, + error, + testCodeCoverageLinesTotal, + isEarlyFlakeDetectionEnabled, + isEarlyFlakeDetectionFaulty, + onFinish + }) => { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) if (error) { @@ -180,6 +239,12 @@ class VitestPlugin extends CiPlugin { this.testModuleSpan.setTag(TEST_CODE_COVERAGE_LINES_PCT, testCodeCoverageLinesTotal) this.testSessionSpan.setTag(TEST_CODE_COVERAGE_LINES_PCT, testCodeCoverageLinesTotal) } + if (isEarlyFlakeDetectionEnabled) { + this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true') + } + if (isEarlyFlakeDetectionFaulty) { + this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ABORT_REASON, 'faulty') + } this.testModuleSpan.finish() this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() diff --git a/packages/dd-trace/src/appsec/addresses.js b/packages/dd-trace/src/appsec/addresses.js index 086052218fd..e2cf6c6940a 100644 --- a/packages/dd-trace/src/appsec/addresses.js +++ b/packages/dd-trace/src/appsec/addresses.js @@ -22,5 +22,7 @@ module.exports = { USER_ID: 'usr.id', WAF_CONTEXT_PROCESSOR: 'waf.context.processor', - HTTP_OUTGOING_URL: 'server.io.net.url' + HTTP_OUTGOING_URL: 'server.io.net.url', + DB_STATEMENT: 'server.db.statement', + DB_SYSTEM: 'server.db.system' } diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index 66781d88821..c098efd5538 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -21,6 +21,8 @@ module.exports = { responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'), httpClientRequestStart: dc.channel('apm:http:client:request:start'), responseSetHeader: dc.channel('datadog:http:server:response:set-header:start'), - setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start') - + setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start'), + pgQueryStart: dc.channel('apm:pg:query:start'), + pgPoolQueryStart: dc.channel('datadog:pg:pool:query:start'), + wafRunFinished: dc.channel('datadog:waf:run:finish') } diff --git a/packages/dd-trace/src/appsec/rasp.js b/packages/dd-trace/src/appsec/rasp.js deleted file mode 100644 index de13c33e4e9..00000000000 --- a/packages/dd-trace/src/appsec/rasp.js +++ /dev/null @@ -1,176 +0,0 @@ -'use strict' - -const { storage } = require('../../../datadog-core') -const web = require('./../plugins/util/web') -const addresses = require('./addresses') -const { httpClientRequestStart, setUncaughtExceptionCaptureCallbackStart } = require('./channels') -const { reportStackTrace } = require('./stack_trace') -const waf = require('./waf') -const { getBlockingAction, block } = require('./blocking') -const log = require('../log') - -const RULE_TYPES = { - SSRF: 'ssrf' -} - -class DatadogRaspAbortError extends Error { - constructor (req, res, blockingAction) { - super('DatadogRaspAbortError') - this.name = 'DatadogRaspAbortError' - this.req = req - this.res = res - this.blockingAction = blockingAction - } -} - -let config, abortOnUncaughtException - -function removeAllListeners (emitter, event) { - const listeners = emitter.listeners(event) - emitter.removeAllListeners(event) - - let cleaned = false - return function () { - if (cleaned === true) { - return - } - cleaned = true - - for (let i = 0; i < listeners.length; ++i) { - emitter.on(event, listeners[i]) - } - } -} - -function findDatadogRaspAbortError (err, deep = 10) { - if (err instanceof DatadogRaspAbortError) { - return err - } - - if (err.cause && deep > 0) { - return findDatadogRaspAbortError(err.cause, deep - 1) - } -} - -function handleUncaughtExceptionMonitor (err) { - const abortError = findDatadogRaspAbortError(err) - if (!abortError) return - - const { req, res, blockingAction } = abortError - block(req, res, web.root(req), null, blockingAction) - - if (!process.hasUncaughtExceptionCaptureCallback()) { - const cleanUp = removeAllListeners(process, 'uncaughtException') - const handler = () => { - process.removeListener('uncaughtException', handler) - } - - setTimeout(() => { - process.removeListener('uncaughtException', handler) - cleanUp() - }) - - process.on('uncaughtException', handler) - } else { - // uncaughtException event is not executed when hasUncaughtExceptionCaptureCallback is true - let previousCb - const cb = ({ currentCallback, abortController }) => { - setUncaughtExceptionCaptureCallbackStart.unsubscribe(cb) - if (!currentCallback) { - abortController.abort() - return - } - - previousCb = currentCallback - } - - setUncaughtExceptionCaptureCallbackStart.subscribe(cb) - - process.setUncaughtExceptionCaptureCallback(null) - - // For some reason, previous callback was defined before the instrumentation - // We can not restore it, so we let the app decide - if (previousCb) { - process.setUncaughtExceptionCaptureCallback(() => { - process.setUncaughtExceptionCaptureCallback(null) - process.setUncaughtExceptionCaptureCallback(previousCb) - }) - } - } -} - -function enable (_config) { - config = _config - httpClientRequestStart.subscribe(analyzeSsrf) - - process.on('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) - abortOnUncaughtException = process.execArgv?.includes('--abort-on-uncaught-exception') - - if (abortOnUncaughtException) { - log.warn('The --abort-on-uncaught-exception flag is enabled. The RASP module will not block operations.') - } -} - -function disable () { - if (httpClientRequestStart.hasSubscribers) httpClientRequestStart.unsubscribe(analyzeSsrf) - - process.off('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) -} - -function analyzeSsrf (ctx) { - const store = storage.getStore() - const req = store?.req - const url = ctx.args.uri - - if (!req || !url) return - - const persistent = { - [addresses.HTTP_OUTGOING_URL]: url - } - - const result = waf.run({ persistent }, req, RULE_TYPES.SSRF) - - const res = store?.res - handleResult(result, req, res, ctx.abortController) -} - -function getGenerateStackTraceAction (actions) { - return actions?.generate_stack -} - -function handleResult (actions, req, res, abortController) { - const generateStackTraceAction = getGenerateStackTraceAction(actions) - if (generateStackTraceAction && config.appsec.stackTrace.enabled) { - const rootSpan = web.root(req) - reportStackTrace( - rootSpan, - generateStackTraceAction.stack_id, - config.appsec.stackTrace.maxDepth, - config.appsec.stackTrace.maxStackTraces - ) - } - - if (!abortController || abortOnUncaughtException) return - - const blockingAction = getBlockingAction(actions) - if (blockingAction) { - const rootSpan = web.root(req) - // Should block only in express - if (rootSpan?.context()._name === 'express.request') { - const abortError = new DatadogRaspAbortError(req, res, blockingAction) - abortController.abort(abortError) - - // TODO Delete this when support for node 16 is removed - if (!abortController.signal.reason) { - abortController.signal.reason = abortError - } - } - } -} - -module.exports = { - enable, - disable, - handleResult, - handleUncaughtExceptionMonitor // exported only for testing purpose -} diff --git a/packages/dd-trace/src/appsec/rasp/index.js b/packages/dd-trace/src/appsec/rasp/index.js new file mode 100644 index 00000000000..801608e54d8 --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/index.js @@ -0,0 +1,103 @@ +'use strict' + +const web = require('../../plugins/util/web') +const { setUncaughtExceptionCaptureCallbackStart } = require('../channels') +const { block } = require('../blocking') +const ssrf = require('./ssrf') +const sqli = require('./sql_injection') + +const { DatadogRaspAbortError } = require('./utils') + +function removeAllListeners (emitter, event) { + const listeners = emitter.listeners(event) + emitter.removeAllListeners(event) + + let cleaned = false + return function () { + if (cleaned === true) { + return + } + cleaned = true + + for (let i = 0; i < listeners.length; ++i) { + emitter.on(event, listeners[i]) + } + } +} + +function findDatadogRaspAbortError (err, deep = 10) { + if (err instanceof DatadogRaspAbortError) { + return err + } + + if (err.cause && deep > 0) { + return findDatadogRaspAbortError(err.cause, deep - 1) + } +} + +function handleUncaughtExceptionMonitor (err) { + const abortError = findDatadogRaspAbortError(err) + if (!abortError) return + + const { req, res, blockingAction } = abortError + block(req, res, web.root(req), null, blockingAction) + + if (!process.hasUncaughtExceptionCaptureCallback()) { + const cleanUp = removeAllListeners(process, 'uncaughtException') + const handler = () => { + process.removeListener('uncaughtException', handler) + } + + setTimeout(() => { + process.removeListener('uncaughtException', handler) + cleanUp() + }) + + process.on('uncaughtException', handler) + } else { + // uncaughtException event is not executed when hasUncaughtExceptionCaptureCallback is true + let previousCb + const cb = ({ currentCallback, abortController }) => { + setUncaughtExceptionCaptureCallbackStart.unsubscribe(cb) + if (!currentCallback) { + abortController.abort() + return + } + + previousCb = currentCallback + } + + setUncaughtExceptionCaptureCallbackStart.subscribe(cb) + + process.setUncaughtExceptionCaptureCallback(null) + + // For some reason, previous callback was defined before the instrumentation + // We can not restore it, so we let the app decide + if (previousCb) { + process.setUncaughtExceptionCaptureCallback(() => { + process.setUncaughtExceptionCaptureCallback(null) + process.setUncaughtExceptionCaptureCallback(previousCb) + }) + } + } +} + +function enable (config) { + ssrf.enable(config) + sqli.enable(config) + + process.on('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) +} + +function disable () { + ssrf.disable() + sqli.disable() + + process.off('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor) +} + +module.exports = { + enable, + disable, + handleUncaughtExceptionMonitor // exported only for testing purpose +} diff --git a/packages/dd-trace/src/appsec/rasp/sql_injection.js b/packages/dd-trace/src/appsec/rasp/sql_injection.js new file mode 100644 index 00000000000..b942dd82be5 --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/sql_injection.js @@ -0,0 +1,86 @@ +'use strict' + +const { pgQueryStart, pgPoolQueryStart, wafRunFinished } = require('../channels') +const { storage } = require('../../../../datadog-core') +const addresses = require('../addresses') +const waf = require('../waf') +const { RULE_TYPES, handleResult } = require('./utils') + +const DB_SYSTEM_POSTGRES = 'postgresql' +const reqQueryMap = new WeakMap() // WeakMap> + +let config + +function enable (_config) { + config = _config + + pgQueryStart.subscribe(analyzePgSqlInjection) + pgPoolQueryStart.subscribe(analyzePgSqlInjection) + wafRunFinished.subscribe(clearQuerySet) +} + +function disable () { + if (pgQueryStart.hasSubscribers) pgQueryStart.unsubscribe(analyzePgSqlInjection) + if (pgPoolQueryStart.hasSubscribers) pgPoolQueryStart.unsubscribe(analyzePgSqlInjection) + if (wafRunFinished.hasSubscribers) wafRunFinished.unsubscribe(clearQuerySet) +} + +function analyzePgSqlInjection (ctx) { + const query = ctx.query?.text + if (!query) return + + const store = storage.getStore() + if (!store) return + + const { req, res } = store + + if (!req) return + + let executedQueries = reqQueryMap.get(req) + if (executedQueries?.has(query)) return + + // Do not waste time executing same query twice + // This also will prevent double calls in pg.Pool internal queries + if (!executedQueries) { + executedQueries = new Set() + reqQueryMap.set(req, executedQueries) + } + executedQueries.add(query) + + const persistent = { + [addresses.DB_STATEMENT]: query, + [addresses.DB_SYSTEM]: DB_SYSTEM_POSTGRES + } + + const result = waf.run({ persistent }, req, RULE_TYPES.SQL_INJECTION) + + handleResult(result, req, res, ctx.abortController, config) +} + +function hasInputAddress (payload) { + return hasAddressesObjectInputAddress(payload.ephemeral) || hasAddressesObjectInputAddress(payload.persistent) +} + +function hasAddressesObjectInputAddress (addressesObject) { + return addressesObject && Object.keys(addressesObject) + .some(address => address.startsWith('server.request') || address.startsWith('graphql.server')) +} + +function clearQuerySet ({ payload }) { + if (!payload) return + + const store = storage.getStore() + if (!store) return + + const { req } = store + if (!req) return + + const executedQueries = reqQueryMap.get(req) + if (!executedQueries) return + + if (hasInputAddress(payload)) { + executedQueries.clear() + } +} + +module.exports = { enable, disable } diff --git a/packages/dd-trace/src/appsec/rasp/ssrf.js b/packages/dd-trace/src/appsec/rasp/ssrf.js new file mode 100644 index 00000000000..ae45ed7daf2 --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/ssrf.js @@ -0,0 +1,37 @@ +'use strict' + +const { httpClientRequestStart } = require('../channels') +const { storage } = require('../../../../datadog-core') +const addresses = require('../addresses') +const waf = require('../waf') +const { RULE_TYPES, handleResult } = require('./utils') + +let config + +function enable (_config) { + config = _config + httpClientRequestStart.subscribe(analyzeSsrf) +} + +function disable () { + if (httpClientRequestStart.hasSubscribers) httpClientRequestStart.unsubscribe(analyzeSsrf) +} + +function analyzeSsrf (ctx) { + const store = storage.getStore() + const req = store?.req + const url = ctx.args.uri + + if (!req || !url) return + + const persistent = { + [addresses.HTTP_OUTGOING_URL]: url + } + + const result = waf.run({ persistent }, req, RULE_TYPES.SSRF) + + const res = store?.res + handleResult(result, req, res, ctx.abortController, config) +} + +module.exports = { enable, disable } diff --git a/packages/dd-trace/src/appsec/rasp/utils.js b/packages/dd-trace/src/appsec/rasp/utils.js new file mode 100644 index 00000000000..2a46b76d6e4 --- /dev/null +++ b/packages/dd-trace/src/appsec/rasp/utils.js @@ -0,0 +1,63 @@ +'use strict' + +const web = require('../../plugins/util/web') +const { reportStackTrace } = require('../stack_trace') +const { getBlockingAction } = require('../blocking') +const log = require('../../log') + +const abortOnUncaughtException = process.execArgv?.includes('--abort-on-uncaught-exception') + +if (abortOnUncaughtException) { + log.warn('The --abort-on-uncaught-exception flag is enabled. The RASP module will not block operations.') +} + +const RULE_TYPES = { + SSRF: 'ssrf', + SQL_INJECTION: 'sql_injection' +} + +class DatadogRaspAbortError extends Error { + constructor (req, res, blockingAction) { + super('DatadogRaspAbortError') + this.name = 'DatadogRaspAbortError' + this.req = req + this.res = res + this.blockingAction = blockingAction + } +} + +function handleResult (actions, req, res, abortController, config) { + const generateStackTraceAction = actions?.generate_stack + if (generateStackTraceAction && config.appsec.stackTrace.enabled) { + const rootSpan = web.root(req) + reportStackTrace( + rootSpan, + generateStackTraceAction.stack_id, + config.appsec.stackTrace.maxDepth, + config.appsec.stackTrace.maxStackTraces + ) + } + + if (!abortController || abortOnUncaughtException) return + + const blockingAction = getBlockingAction(actions) + if (blockingAction) { + const rootSpan = web.root(req) + // Should block only in express + if (rootSpan?.context()._name === 'express.request') { + const abortError = new DatadogRaspAbortError(req, res, blockingAction) + abortController.abort(abortError) + + // TODO Delete this when support for node 16 is removed + if (!abortController.signal.reason) { + abortController.signal.reason = abortError + } + } + } +} + +module.exports = { + handleResult, + RULE_TYPES, + DatadogRaspAbortError +} diff --git a/packages/dd-trace/src/appsec/remote_config/capabilities.js b/packages/dd-trace/src/appsec/remote_config/capabilities.js index 6e320493336..f42d7358203 100644 --- a/packages/dd-trace/src/appsec/remote_config/capabilities.js +++ b/packages/dd-trace/src/appsec/remote_config/capabilities.js @@ -17,5 +17,7 @@ module.exports = { APM_TRACING_HTTP_HEADER_TAGS: 1n << 14n, APM_TRACING_CUSTOM_TAGS: 1n << 15n, APM_TRACING_ENABLED: 1n << 19n, + ASM_RASP_SQLI: 1n << 21n, + ASM_RASP_SSRF: 1n << 23n, APM_TRACING_SAMPLE_RULES: 1n << 29n } diff --git a/packages/dd-trace/src/appsec/remote_config/index.js b/packages/dd-trace/src/appsec/remote_config/index.js index 169e5c2dff7..b63b3690102 100644 --- a/packages/dd-trace/src/appsec/remote_config/index.js +++ b/packages/dd-trace/src/appsec/remote_config/index.js @@ -28,7 +28,7 @@ function enable (config, appsec) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) } - rc.on('ASM_FEATURES', (action, rcConfig) => { + rc.setProductHandler('ASM_FEATURES', (action, rcConfig) => { if (!rcConfig) return if (activation === Activation.ONECLICK) { @@ -76,9 +76,15 @@ function enableWafUpdate (appsecConfig) { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - rc.on('ASM_DATA', noop) - rc.on('ASM_DD', noop) - rc.on('ASM', noop) + if (appsecConfig.rasp?.enabled) { + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, true) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, true) + } + + // TODO: delete noop handlers and kPreUpdate and replace with batched handlers + rc.setProductHandler('ASM_DATA', noop) + rc.setProductHandler('ASM_DD', noop) + rc.setProductHandler('ASM', noop) rc.on(RemoteConfigManager.kPreUpdate, RuleManager.updateWafFromRC) } @@ -98,9 +104,12 @@ function disableWafUpdate () { rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) rc.updateCapabilities(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) - rc.off('ASM_DATA', noop) - rc.off('ASM_DD', noop) - rc.off('ASM', noop) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SQLI, false) + rc.updateCapabilities(RemoteConfigCapabilities.ASM_RASP_SSRF, false) + + rc.removeProductHandler('ASM_DATA') + rc.removeProductHandler('ASM_DD') + rc.removeProductHandler('ASM') rc.off(RemoteConfigManager.kPreUpdate, RuleManager.updateWafFromRC) } diff --git a/packages/dd-trace/src/appsec/remote_config/manager.js b/packages/dd-trace/src/appsec/remote_config/manager.js index 5b0044e2c71..8f2aa44cea2 100644 --- a/packages/dd-trace/src/appsec/remote_config/manager.js +++ b/packages/dd-trace/src/appsec/remote_config/manager.js @@ -15,6 +15,7 @@ const clientId = uuid() const DEFAULT_CAPABILITY = Buffer.alloc(1).toString('base64') // 0x00 const kPreUpdate = Symbol('kPreUpdate') +const kSupportsAckCallback = Symbol('kSupportsAckCallback') // There MUST NOT exist separate instances of RC clients in a tracer making separate ClientGetConfigsRequest // with their own separated Client.ClientState. @@ -32,14 +33,26 @@ class RemoteConfigManager extends EventEmitter { port: config.port })) + this._handlers = new Map() + const appliedConfigs = this.appliedConfigs = new Map() + this.scheduler = new Scheduler((cb) => this.poll(cb), pollInterval) this.state = { client: { - state: { // updated by `parseConfig()` + state: { // updated by `parseConfig()` and `poll()` root_version: 1, targets_version: 0, - config_states: [], + // Use getter so `apply_*` can be updated async and still affect the content of `config_states` + get config_states () { + return Array.from(appliedConfigs.values()).map((conf) => ({ + id: conf.id, + version: conf.version, + product: conf.product, + apply_state: conf.apply_state, + apply_error: conf.apply_error + })) + }, has_error: false, error: '', backend_client_state: '' @@ -60,8 +73,6 @@ class RemoteConfigManager extends EventEmitter { }, cached_target_files: [] // updated by `parseConfig()` } - - this.appliedConfigs = new Map() } updateCapabilities (mask, value) { @@ -82,32 +93,24 @@ class RemoteConfigManager extends EventEmitter { this.state.client.capabilities = Buffer.from(str, 'hex').toString('base64') } - on (event, listener) { - super.on(event, listener) - + setProductHandler (product, handler) { + this._handlers.set(product, handler) this.updateProducts() - - if (this.state.client.products.length) { + if (this.state.client.products.length === 1) { this.scheduler.start() } - - return this } - off (event, listener) { - super.off(event, listener) - + removeProductHandler (product) { + this._handlers.delete(product) this.updateProducts() - - if (!this.state.client.products.length) { + if (this.state.client.products.length === 0) { this.scheduler.stop() } - - return this } updateProducts () { - this.state.client.products = this.eventNames().filter(e => typeof e === 'string') + this.state.client.products = Array.from(this._handlers.keys()) } getPayload () { @@ -228,24 +231,11 @@ class RemoteConfigManager extends EventEmitter { this.dispatch(toApply, 'apply') this.dispatch(toModify, 'modify') - this.state.client.state.config_states = [] - this.state.cached_target_files = [] - - for (const conf of this.appliedConfigs.values()) { - this.state.client.state.config_states.push({ - id: conf.id, - version: conf.version, - product: conf.product, - apply_state: conf.apply_state, - apply_error: conf.apply_error - }) - - this.state.cached_target_files.push({ - path: conf.path, - length: conf.length, - hashes: Object.entries(conf.hashes).map((entry) => ({ algorithm: entry[0], hash: entry[1] })) - }) - } + this.state.cached_target_files = Array.from(this.appliedConfigs.values()).map((conf) => ({ + path: conf.path, + length: conf.length, + hashes: Object.entries(conf.hashes).map((entry) => ({ algorithm: entry[0], hash: entry[1] })) + })) } } @@ -254,20 +244,7 @@ class RemoteConfigManager extends EventEmitter { // TODO: we need a way to tell if unapply configs were handled by kPreUpdate or not, because they're always // emitted unlike the apply and modify configs - // in case the item was already handled by kPreUpdate - if (item.apply_state === UNACKNOWLEDGED || action === 'unapply') { - try { - // TODO: do we want to pass old and new config ? - const hadListeners = this.emit(item.product, action, item.file, item.id) - - if (hadListeners) { - item.apply_state = ACKNOWLEDGED - } - } catch (err) { - item.apply_state = ERROR - item.apply_error = err.toString() - } - } + this._callHandlerFor(action, item) if (action === 'unapply') { this.appliedConfigs.delete(item.path) @@ -276,6 +253,49 @@ class RemoteConfigManager extends EventEmitter { } } } + + _callHandlerFor (action, item) { + // in case the item was already handled by kPreUpdate + if (item.apply_state !== UNACKNOWLEDGED && action !== 'unapply') return + + const handler = this._handlers.get(item.product) + + if (!handler) return + + try { + if (supportsAckCallback(handler)) { + // If the handler accepts an `ack` callback, expect that to be called and set `apply_state` accordinly + // TODO: do we want to pass old and new config ? + handler(action, item.file, item.id, (err) => { + if (err) { + item.apply_state = ERROR + item.apply_error = err.toString() + } else if (item.apply_state !== ERROR) { + item.apply_state = ACKNOWLEDGED + } + }) + } else { + // If the handler doesn't accept an `ack` callback, assume `apply_state` is `ACKNOWLEDGED`, + // unless it returns a promise, in which case we wait for the promise to be resolved or rejected. + // TODO: do we want to pass old and new config ? + const result = handler(action, item.file, item.id) + if (result instanceof Promise) { + result.then( + () => { item.apply_state = ACKNOWLEDGED }, + (err) => { + item.apply_state = ERROR + item.apply_error = err.toString() + } + ) + } else { + item.apply_state = ACKNOWLEDGED + } + } + } catch (err) { + item.apply_state = ERROR + item.apply_error = err.toString() + } + } } function fromBase64JSON (str) { @@ -299,4 +319,22 @@ function parseConfigPath (configPath) { } } +function supportsAckCallback (handler) { + if (kSupportsAckCallback in handler) return handler[kSupportsAckCallback] + + const numOfArgs = handler.length + let result = false + + if (numOfArgs >= 4) { + result = true + } else if (numOfArgs !== 0) { + const source = handler.toString() + result = source.slice(0, source.indexOf(')')).includes('...') + } + + handler[kSupportsAckCallback] = result + + return result +} + module.exports = RemoteConfigManager diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index 9c6c131ac26..ed946633174 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -4,6 +4,7 @@ const log = require('../../log') const Reporter = require('../reporter') const addresses = require('../addresses') const { getBlockingAction } = require('../blocking') +const { wafRunFinished } = require('../channels') // TODO: remove once ephemeral addresses are implemented const preventDuplicateAddresses = new Set([ @@ -11,42 +12,56 @@ const preventDuplicateAddresses = new Set([ ]) class WAFContextWrapper { - constructor (ddwafContext, wafTimeout, wafVersion, rulesVersion) { + constructor (ddwafContext, wafTimeout, wafVersion, rulesVersion, knownAddresses) { this.ddwafContext = ddwafContext this.wafTimeout = wafTimeout this.wafVersion = wafVersion this.rulesVersion = rulesVersion this.addressesToSkip = new Set() + this.knownAddresses = knownAddresses } run ({ persistent, ephemeral }, raspRuleType) { + if (this.ddwafContext.disposed) { + log.warn('Calling run on a disposed context') + return + } + const payload = {} let payloadHasData = false - const inputs = {} const newAddressesToSkip = new Set(this.addressesToSkip) if (persistent !== null && typeof persistent === 'object') { - // TODO: possible optimization: only send params that haven't already been sent with same value to this wafContext + const persistentInputs = {} + for (const key of Object.keys(persistent)) { - // TODO: requiredAddresses is no longer used due to processor addresses are not included in the list. Check on - // future versions when the actual addresses are included in the 'loaded' section inside diagnostics. - if (!this.addressesToSkip.has(key)) { - inputs[key] = persistent[key] + if (!this.addressesToSkip.has(key) && this.knownAddresses.has(key)) { + persistentInputs[key] = persistent[key] if (preventDuplicateAddresses.has(key)) { newAddressesToSkip.add(key) } } } - } - if (Object.keys(inputs).length) { - payload.persistent = inputs - payloadHasData = true + if (Object.keys(persistentInputs).length) { + payload.persistent = persistentInputs + payloadHasData = true + } } - if (ephemeral && Object.keys(ephemeral).length) { - payload.ephemeral = ephemeral - payloadHasData = true + if (ephemeral !== null && typeof ephemeral === 'object') { + const ephemeralInputs = {} + + for (const key of Object.keys(ephemeral)) { + if (this.knownAddresses.has(key)) { + ephemeralInputs[key] = ephemeral[key] + } + } + + if (Object.keys(ephemeralInputs).length) { + payload.ephemeral = ephemeralInputs + payloadHasData = true + } } if (!payloadHasData) return @@ -80,6 +95,10 @@ class WAFContextWrapper { Reporter.reportSchemas(result.derivatives) + if (wafRunFinished.hasSubscribers) { + wafRunFinished.publish({ payload }) + } + return result.actions } catch (err) { log.error('Error while running the AppSec WAF') diff --git a/packages/dd-trace/src/appsec/waf/waf_manager.js b/packages/dd-trace/src/appsec/waf/waf_manager.js index deac04f80ed..8d044764705 100644 --- a/packages/dd-trace/src/appsec/waf/waf_manager.js +++ b/packages/dd-trace/src/appsec/waf/waf_manager.js @@ -39,7 +39,8 @@ class WAFManager { this.ddwaf.createContext(), this.wafTimeout, this.ddwafVersion, - this.rulesVersion + this.rulesVersion, + this.ddwaf.knownAddresses ) contexts.set(req, wafContext) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js index 3934ec0d5b2..466c5230b22 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js @@ -72,6 +72,10 @@ class Writer extends BaseWriter { done() }) } + + setMetadataTags (tags) { + this._encoder.setMetadataTags(tags) + } } module.exports = Writer diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index 4ec092d4905..9dabd34f7f3 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -291,6 +291,19 @@ class CiVisibilityExporter extends AgentInfoExporter { _getApiUrl () { return this._url } + + // By the time setMetadataTags is called, the agent info request might not have finished + setMetadataTags (tags) { + if (this._writer?.setMetadataTags) { + this._writer.setMetadataTags(tags) + } else { + this._canUseCiVisProtocolPromise.then(() => { + if (this._writer?.setMetadataTags) { + this._writer.setMetadataTags(tags) + } + }) + } + } } module.exports = CiVisibilityExporter diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index c4cae9a9268..dc5bb524d1a 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -3,7 +3,7 @@ const fs = require('fs') const os = require('os') const uuid = require('crypto-randomuuid') // we need to keep the old uuid dep because of cypress -const URL = require('url').URL +const { URL } = require('url') const log = require('./log') const pkg = require('./pkg') const coalesce = require('koalas') @@ -18,6 +18,7 @@ const { updateConfig } = require('./telemetry') const telemetryMetrics = require('./telemetry/metrics') const { getIsGCPFunction, getIsAzureFunction } = require('./serverless') const { ORIGIN_KEY } = require('./constants') +const { appendRules } = require('./payload-tagging/config') const tracerMetrics = telemetryMetrics.manager.namespace('tracers') @@ -173,6 +174,21 @@ function validateNamingVersion (versionString) { return versionString } +/** + * Given a string of comma-separated paths, return the array of paths. + * If a blank path is provided a null is returned to signal that the feature is disabled. + * An empty array means the feature is enabled but that no rules need to be applied. + * + * @param {string} input + * @returns {[string]|null} + */ +function splitJSONPathRules (input) { + if (!input) return null + if (Array.isArray(input)) return input + if (input === 'all') return [] + return input.split(',') +} + // Shallow clone with property name remapping function remapify (input, mappings) { if (!input) return @@ -281,6 +297,26 @@ class Config { null ) + const DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = splitJSONPathRules( + coalesce( + process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, + options.cloudPayloadTagging?.request, + '' + )) + + const DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = splitJSONPathRules( + coalesce( + process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, + options.cloudPayloadTagging?.response, + '' + )) + + const DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = coalesce( + process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, + options.cloudPayloadTagging?.maxDepth, + 10 + ) + // TODO: refactor this.apiKey = DD_API_KEY @@ -291,6 +327,15 @@ class Config { type: DD_INSTRUMENTATION_INSTALL_TYPE } + this.cloudPayloadTagging = { + requestsEnabled: !!DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, + responsesEnabled: !!DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, + maxDepth: DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, + rules: appendRules( + DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING + ) + } + this._applyDefaults() this._applyEnvironment() this._applyOptions(options) @@ -423,6 +468,7 @@ class Config { this._setValue(defaults, 'dogstatsd.hostname', '127.0.0.1') this._setValue(defaults, 'dogstatsd.port', '8125') this._setValue(defaults, 'dsmEnabled', false) + this._setValue(defaults, 'dynamicInstrumentationEnabled', false) this._setValue(defaults, 'env', undefined) this._setValue(defaults, 'experimental.enableGetRumData', false) this._setValue(defaults, 'experimental.exporter', undefined) @@ -451,6 +497,7 @@ class Config { this._setValue(defaults, 'isGitUploadEnabled', false) this._setValue(defaults, 'isIntelligentTestRunnerEnabled', false) this._setValue(defaults, 'isManualApiEnabled', false) + this._setValue(defaults, 'ciVisibilityTestSessionName', '') this._setValue(defaults, 'logInjection', false) this._setValue(defaults, 'lookup', undefined) this._setValue(defaults, 'memcachedCommandEnabled', false) @@ -528,6 +575,7 @@ class Config { DD_DBM_PROPAGATION_MODE, DD_DOGSTATSD_HOSTNAME, DD_DOGSTATSD_PORT, + DD_DYNAMIC_INSTRUMENTATION_ENABLED, DD_ENV, DD_EXPERIMENTAL_API_SECURITY_ENABLED, DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED, @@ -657,6 +705,7 @@ class Config { this._setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOSTNAME) this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT) this._setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) + this._setBoolean(env, 'dynamicInstrumentationEnabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) this._setString(env, 'env', DD_ENV || tags.env) this._setBoolean(env, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) this._setString(env, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) @@ -824,11 +873,11 @@ class Config { this._setString(opts, 'dogstatsd.port', options.dogstatsd.port) } this._setBoolean(opts, 'dsmEnabled', options.dsmEnabled) + this._setBoolean(opts, 'dynamicInstrumentationEnabled', options.experimental?.dynamicInstrumentationEnabled) this._setString(opts, 'env', options.env || tags.env) - this._setBoolean(opts, 'experimental.enableGetRumData', - options.experimental && options.experimental.enableGetRumData) - this._setString(opts, 'experimental.exporter', options.experimental && options.experimental.exporter) - this._setBoolean(opts, 'experimental.runtimeId', options.experimental && options.experimental.runtimeId) + this._setBoolean(opts, 'experimental.enableGetRumData', options.experimental?.enableGetRumData) + this._setString(opts, 'experimental.exporter', options.experimental?.exporter) + this._setBoolean(opts, 'experimental.runtimeId', options.experimental?.runtimeId) this._setValue(opts, 'flushInterval', maybeInt(options.flushInterval)) this._optsUnprocessed.flushInterval = options.flushInterval this._setValue(opts, 'flushMinSpans', maybeInt(options.flushMinSpans)) @@ -954,10 +1003,10 @@ class Config { } _isCiVisibilityManualApiEnabled () { - return isTrue(coalesce( + return coalesce( process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED, - false - )) + true + ) } _isTraceStatsComputationEnabled () { @@ -985,7 +1034,8 @@ class Config { DD_CIVISIBILITY_AGENTLESS_URL, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, DD_CIVISIBILITY_FLAKY_RETRY_ENABLED, - DD_CIVISIBILITY_FLAKY_RETRY_COUNT + DD_CIVISIBILITY_FLAKY_RETRY_COUNT, + DD_TEST_SESSION_NAME } = process.env if (DD_CIVISIBILITY_AGENTLESS_URL) { @@ -1000,7 +1050,8 @@ class Config { coalesce(DD_CIVISIBILITY_FLAKY_RETRY_ENABLED, true)) this._setValue(calc, 'flakyTestRetriesCount', coalesce(maybeInt(DD_CIVISIBILITY_FLAKY_RETRY_COUNT), 5)) this._setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(this._isCiVisibilityItrEnabled())) - this._setBoolean(calc, 'isManualApiEnabled', this._isCiVisibilityManualApiEnabled()) + this._setBoolean(calc, 'isManualApiEnabled', !isFalse(this._isCiVisibilityManualApiEnabled())) + this._setString(calc, 'ciVisibilityTestSessionName', DD_TEST_SESSION_NAME) } this._setString(calc, 'dogstatsd.hostname', this._getHostname()) this._setBoolean(calc, 'isGitUploadEnabled', diff --git a/packages/dd-trace/src/constants.js b/packages/dd-trace/src/constants.js index 0d9bcc495dd..61f5b705ddb 100644 --- a/packages/dd-trace/src/constants.js +++ b/packages/dd-trace/src/constants.js @@ -34,5 +34,15 @@ module.exports = { SCI_REPOSITORY_URL: '_dd.git.repository_url', SCI_COMMIT_SHA: '_dd.git.commit.sha', APM_TRACING_ENABLED_KEY: '_dd.apm.enabled', - APPSEC_PROPAGATION_KEY: '_dd.p.appsec' + APPSEC_PROPAGATION_KEY: '_dd.p.appsec', + PAYLOAD_TAG_REQUEST_PREFIX: 'aws.request.body', + PAYLOAD_TAG_RESPONSE_PREFIX: 'aws.response.body', + PAYLOAD_TAGGING_MAX_TAGS: 758, + SCHEMA_DEFINITION: 'schema.definition', + SCHEMA_WEIGHT: 'schema.weight', + SCHEMA_TYPE: 'schema.type', + SCHEMA_ID: 'schema.id', + SCHEMA_TOPIC: 'schema.topic', + SCHEMA_OPERATION: 'schema.operation', + SCHEMA_NAME: 'schema.name' } diff --git a/packages/dd-trace/src/data_streams_context.js b/packages/dd-trace/src/data_streams_context.js index 33354920443..e3c62d35e25 100644 --- a/packages/dd-trace/src/data_streams_context.js +++ b/packages/dd-trace/src/data_streams_context.js @@ -1,4 +1,5 @@ const { storage } = require('../../datadog-core') +const log = require('./log') function getDataStreamsContext () { const store = storage.getStore() @@ -6,6 +7,8 @@ function getDataStreamsContext () { } function setDataStreamsContext (dataStreamsContext) { + log.debug(() => `Setting new DSM Context: ${JSON.stringify(dataStreamsContext)}.`) + if (dataStreamsContext) storage.enterWith({ ...(storage.getStore()), dataStreamsContext }) } diff --git a/packages/dd-trace/src/datastreams/fnv.js b/packages/dd-trace/src/datastreams/fnv.js new file mode 100644 index 00000000000..c226ec40cd4 --- /dev/null +++ b/packages/dd-trace/src/datastreams/fnv.js @@ -0,0 +1,23 @@ +const FNV_64_PRIME = BigInt('0x100000001B3') +const FNV1_64_INIT = BigInt('0xCBF29CE484222325') + +function fnv (data, hvalInit, fnvPrime, fnvSize) { + let hval = hvalInit + for (const byte of data) { + hval = (hval * fnvPrime) % fnvSize + hval = hval ^ BigInt(byte) + } + return hval +} + +function fnv64 (data) { + if (!Buffer.isBuffer(data)) { + data = Buffer.from(data, 'utf-8') + } + const byteArray = new Uint8Array(data) + return fnv(byteArray, FNV1_64_INIT, FNV_64_PRIME, BigInt(2) ** BigInt(64)) +} + +module.exports = { + fnv64 +} diff --git a/packages/dd-trace/src/datastreams/pathway.js b/packages/dd-trace/src/datastreams/pathway.js index 5d587a4768f..066af789e64 100644 --- a/packages/dd-trace/src/datastreams/pathway.js +++ b/packages/dd-trace/src/datastreams/pathway.js @@ -4,6 +4,8 @@ const crypto = require('crypto') const { encodeVarint, decodeVarint } = require('./encoding') const LRUCache = require('lru-cache') +const log = require('../log') +const pick = require('../../../datadog-core/src/utils/src/pick') const options = { max: 500 } const cache = new LRUCache(options) @@ -11,6 +13,8 @@ const cache = new LRUCache(options) const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx' const CONTEXT_PROPAGATION_KEY_BASE64 = 'dd-pathway-ctx-base64' +const logKeys = [CONTEXT_PROPAGATION_KEY, CONTEXT_PROPAGATION_KEY_BASE64] + function shaHash (checkpointString) { const hash = crypto.createHash('md5').update(checkpointString).digest('hex').slice(0, 16) return Buffer.from(hash, 'hex') @@ -80,9 +84,13 @@ class DsmPathwayCodec { return } carrier[CONTEXT_PROPAGATION_KEY_BASE64] = encodePathwayContextBase64(dataStreamsContext) + + log.debug(() => `Injected into DSM carrier: ${JSON.stringify(pick(carrier, logKeys))}.`) } static decode (carrier) { + log.debug(() => `Attempting extract from DSM carrier: ${JSON.stringify(pick(carrier, logKeys))}.`) + if (carrier == null) return let ctx @@ -97,13 +105,12 @@ class DsmPathwayCodec { // pass } // cover case where base64 context was received under wrong key - if (!ctx) ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY]) + if (!ctx && CONTEXT_PROPAGATION_KEY in carrier) { + ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY]) + } } - return ctx - } - static contextExists (carrier) { - return CONTEXT_PROPAGATION_KEY_BASE64 in carrier || CONTEXT_PROPAGATION_KEY in carrier + return ctx } } diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index cd8220a267e..d036af805a7 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -9,6 +9,9 @@ const { DataStreamsWriter } = require('./writer') const { computePathwayHash } = require('./pathway') const { types } = require('util') const { PATHWAY_HASH } = require('../../../../ext/tags') +const { SchemaBuilder } = require('./schemas/schema_builder') +const { SchemaSampler } = require('./schemas/schema_sampler') +const log = require('../log') const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex') @@ -194,6 +197,7 @@ class DataStreamsProcessor { this.version = version || '' this.sequence = 0 this.flushInterval = flushInterval + this._schemaSamplers = {} if (this.enabled) { this.timer = setInterval(this.onInterval.bind(this), flushInterval) @@ -269,6 +273,11 @@ class DataStreamsProcessor { closestOppositeDirectionHash = parentHash closestOppositeDirectionEdgeStart = edgeStartNs } + log.debug( + () => `Setting DSM Checkpoint from extracted parent context with hash: ${parentHash} and edge tags: ${edgeTags}` + ) + } else { + log.debug(() => 'Setting DSM Checkpoint with empty parent context.') } const hash = computePathwayHash(this.service, this.env, edgeTags, parentHash) const edgeLatencyNs = nowNs - edgeStartNs @@ -352,6 +361,32 @@ class DataStreamsProcessor { setUrl (url) { this.writer.setUrl(url) } + + trySampleSchema (topic) { + const nowMs = Date.now() + + if (!this._schemaSamplers[topic]) { + this._schemaSamplers[topic] = new SchemaSampler() + } + + const sampler = this._schemaSamplers[topic] + return sampler.trySample(nowMs) + } + + canSampleSchema (topic) { + const nowMs = Date.now() + + if (!this._schemaSamplers[topic]) { + this._schemaSamplers[topic] = new SchemaSampler() + } + + const sampler = this._schemaSamplers[topic] + return sampler.canSample(nowMs) + } + + getSchema (schemaName, iterator) { + return SchemaBuilder.getSchema(schemaName, iterator) + } } module.exports = { diff --git a/packages/dd-trace/src/datastreams/schemas/schema.js b/packages/dd-trace/src/datastreams/schemas/schema.js new file mode 100644 index 00000000000..4378e37d080 --- /dev/null +++ b/packages/dd-trace/src/datastreams/schemas/schema.js @@ -0,0 +1,8 @@ +class Schema { + constructor (definition, id) { + this.definition = definition + this.id = id + } +} + +module.exports = { Schema } diff --git a/packages/dd-trace/src/datastreams/schemas/schema_builder.js b/packages/dd-trace/src/datastreams/schemas/schema_builder.js new file mode 100644 index 00000000000..a65863d4d87 --- /dev/null +++ b/packages/dd-trace/src/datastreams/schemas/schema_builder.js @@ -0,0 +1,125 @@ +const LRUCache = require('lru-cache') +const { fnv64 } = require('../fnv') +const { Schema } = require('./schema') + +const maxDepth = 10 +const maxProperties = 1000 +const CACHE = new LRUCache({ max: 32 }) + +class SchemaBuilder { + constructor (iterator) { + this.schema = new OpenApiSchema() + this.iterator = iterator + this.proerties = 0 + } + + addProperty (schemaName, fieldName, isArray, type, description, ref, format, enumValues) { + if (this.properties >= maxProperties) { + return false + } + this.properties += 1 + let property = new OpenApiSchema.PROPERTY(type, description, ref, format, enumValues, null) + if (isArray) { + property = new OpenApiSchema.PROPERTY('array', null, null, null, null, property) + } + this.schema.components.schemas[schemaName].properties[fieldName] = property + return true + } + + build () { + this.iterator.iterateOverSchema(this) + const noNones = convertToJsonCompatible(this.schema) + const definition = jsonStringify(noNones) + const id = fnv64(Buffer.from(definition, 'utf-8')).toString() + return new Schema(definition, id) + } + + shouldExtractSchema (schemaName, depth) { + if (depth > maxDepth) { + return false + } + if (schemaName in this.schema.components.schemas) { + return false + } + this.schema.components.schemas[schemaName] = new OpenApiSchema.SCHEMA() + return true + } + + static getSchema (schemaName, iterator) { + if (!CACHE.has(schemaName)) { + CACHE.set(schemaName, new SchemaBuilder(iterator).build()) + } + return CACHE.get(schemaName) + } +} + +class OpenApiSchema { + constructor () { + this.openapi = '3.0.0' + this.components = new OpenApiComponents() + } +} + +OpenApiSchema.SCHEMA = class { + constructor () { + this.type = 'object' + this.properties = {} + } +} + +OpenApiSchema.PROPERTY = class { + constructor (type, description = null, ref = null, format = null, enumValues = null, items = null) { + this.type = type + this.description = description + this.$ref = ref + this.format = format + this.enum = enumValues + this.items = items + } +} + +class OpenApiComponents { + constructor () { + this.schemas = {} + } +} + +function convertToJsonCompatible (obj) { + if (Array.isArray(obj)) { + return obj.filter(item => item !== null).map(item => convertToJsonCompatible(item)) + } else if (obj && typeof obj === 'object') { + const jsonObj = {} + for (const [key, value] of Object.entries(obj)) { + if (value !== null) { + jsonObj[key] = convertToJsonCompatible(value) + } + } + return jsonObj + } + return obj +} + +function convertKey (key) { + if (key === 'enumValues') { + return 'enum' + } + return key +} + +function jsonStringify (obj, indent = 2) { + // made to stringify json exactly similar to python / java in order for hashing to be the same + const jsonString = JSON.stringify(obj, (_, value) => value, indent) + return jsonString.replace(/^ +/gm, ' ') // Replace leading spaces with single space + .replace(/\n/g, '') // Remove newlines + .replace(/{ /g, '{') // Remove space after '{' + .replace(/ }/g, '}') // Remove space before '}' + .replace(/\[ /g, '[') // Remove space after '[' + .replace(/ \]/g, ']') // Remove space before ']' +} + +module.exports = { + SchemaBuilder, + OpenApiSchema, + convertToJsonCompatible, + convertKey +} diff --git a/packages/dd-trace/src/datastreams/schemas/schema_sampler.js b/packages/dd-trace/src/datastreams/schemas/schema_sampler.js new file mode 100644 index 00000000000..903a4ea1dec --- /dev/null +++ b/packages/dd-trace/src/datastreams/schemas/schema_sampler.js @@ -0,0 +1,29 @@ +const SAMPLE_INTERVAL_MILLIS = 30 * 1000 + +class SchemaSampler { + constructor () { + this.weight = 0 + this.lastSampleMs = 0 + } + + trySample (currentTimeMs) { + if (currentTimeMs >= this.lastSampleMs + SAMPLE_INTERVAL_MILLIS) { + if (currentTimeMs >= this.lastSampleMs + SAMPLE_INTERVAL_MILLIS) { + this.lastSampleMs = currentTimeMs + const weight = this.weight + this.weight = 0 + return weight + } + } + return 0 + } + + canSample (currentTimeMs) { + this.weight += 1 + return currentTimeMs >= this.lastSampleMs + SAMPLE_INTERVAL_MILLIS + } +} + +module.exports = { + SchemaSampler +} diff --git a/packages/dd-trace/src/debugger/devtools_client/config.js b/packages/dd-trace/src/debugger/devtools_client/config.js new file mode 100644 index 00000000000..3e7c19715e1 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/config.js @@ -0,0 +1,24 @@ +'use strict' + +const { workerData: { config: parentConfig, parentThreadId, configPort } } = require('node:worker_threads') +const { format } = require('node:url') +const log = require('../../log') + +const config = module.exports = { + runtimeId: parentConfig.tags['runtime-id'], + service: parentConfig.service, + parentThreadId +} + +updateUrl(parentConfig) + +configPort.on('message', updateUrl) +configPort.on('messageerror', (err) => log.error(err)) + +function updateUrl (updates) { + config.url = updates.url || format({ + protocol: 'http:', + hostname: updates.hostname || 'localhost', + port: updates.port + }) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/index.js b/packages/dd-trace/src/debugger/devtools_client/index.js new file mode 100644 index 00000000000..f4789ea65a8 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/index.js @@ -0,0 +1,57 @@ +'use strict' + +const { randomUUID } = require('crypto') +const { breakpoints } = require('./state') +const session = require('./session') +const send = require('./send') +const { ackEmitting } = require('./status') +const { parentThreadId } = require('./config') +const log = require('../../log') +const { version } = require('../../../../../package.json') + +require('./remote_config') + +// There doesn't seem to be an official standard for the content of these fields, so we're just populating them with +// something that should be useful to a Node.js developer. +const threadId = parentThreadId === 0 ? `pid:${process.pid}` : `pid:${process.pid};tid:${parentThreadId}` +const threadName = parentThreadId === 0 ? 'MainThread' : `WorkerThread:${parentThreadId}` + +session.on('Debugger.paused', async ({ params }) => { + const start = process.hrtime.bigint() + const timestamp = Date.now() + const probes = params.hitBreakpoints.map((id) => breakpoints.get(id)) + await session.post('Debugger.resume') + const diff = process.hrtime.bigint() - start // TODO: Should this be recored as telemetry? + + log.debug(`Finished processing breakpoints - main thread paused for: ${Number(diff) / 1000000} ms`) + + const logger = { + // We can safely use `location.file` from the first probe in the array, since all probes hit by `hitBreakpoints` + // must exist in the same file since the debugger can only pause the main thread in one location. + name: probes[0].location.file, // name of the class/type/file emitting the snapshot + method: params.callFrames[0].functionName, // name of the method/function emitting the snapshot + version, + thread_id: threadId, + thread_name: threadName + } + + // TODO: Send multiple probes in one HTTP request as an array + for (const probe of probes) { + const snapshot = { + id: randomUUID(), + timestamp, + probe: { + id: probe.id, + version: probe.version, + location: probe.location + }, + language: 'javascript' + } + + // TODO: Process template + send(probe.template, logger, snapshot, (err) => { + if (err) log.error(err) + else ackEmitting(probe) + }) + } +}) diff --git a/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js new file mode 100644 index 00000000000..bb4b0340be6 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/inspector_promises_polyfill.js @@ -0,0 +1,23 @@ +'use strict' + +const { builtinModules } = require('node:module') + +if (builtinModules.includes('inspector/promises')) { + module.exports = require('node:inspector/promises') +} else { + const inspector = require('node:inspector') + const { promisify } = require('node:util') + + // The rest of the code in this file is lifted from: + // https://github.com/nodejs/node/blob/1d4d76ff3fb08f9a0c55a1d5530b46c4d5d550c7/lib/inspector/promises.js + class Session extends inspector.Session { + constructor () { super() } // eslint-disable-line no-useless-constructor + } + + Session.prototype.post = promisify(inspector.Session.prototype.post) + + module.exports = { + ...inspector, + Session + } +} diff --git a/packages/dd-trace/src/debugger/devtools_client/remote_config.js b/packages/dd-trace/src/debugger/devtools_client/remote_config.js new file mode 100644 index 00000000000..25ac070cc9f --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/remote_config.js @@ -0,0 +1,164 @@ +'use strict' + +const { workerData: { rcPort } } = require('node:worker_threads') +const { getScript, probes, breakpoints } = require('./state') +const session = require('./session') +const { ackReceived, ackInstalled, ackError } = require('./status') +const log = require('../../log') + +let sessionStarted = false + +// Example log line probe (simplified): +// { +// id: '100c9a5c-45ad-49dc-818b-c570d31e11d1', +// version: 0, +// type: 'LOG_PROBE', +// where: { sourceFile: 'index.js', lines: ['25'] }, // only use first array element +// template: 'Hello World 2', +// segments: [...], +// captureSnapshot: true, +// capture: { maxReferenceDepth: 1 }, +// sampling: { snapshotsPerSecond: 1 }, +// evaluateAt: 'EXIT' // only used for method probes +// } +// +// Example log method probe (simplified): +// { +// id: 'd692ee6d-5734-4df7-9d86-e3bc6449cc8c', +// version: 0, +// type: 'LOG_PROBE', +// where: { typeName: 'index.js', methodName: 'handlerA' }, +// template: 'Executed index.js.handlerA, it took {@duration}ms', +// segments: [...], +// captureSnapshot: false, +// capture: { maxReferenceDepth: 3 }, +// sampling: { snapshotsPerSecond: 5000 }, +// evaluateAt: 'EXIT' // only used for method probes +// } +rcPort.on('message', async ({ action, conf: probe, ackId }) => { + try { + await processMsg(action, probe) + rcPort.postMessage({ ackId }) + } catch (err) { + rcPort.postMessage({ ackId, error: err }) + ackError(err, probe) + } +}) +rcPort.on('messageerror', (err) => log.error(err)) + +async function start () { + sessionStarted = true + return session.post('Debugger.enable') // return instead of await to reduce number of promises created +} + +async function stop () { + sessionStarted = false + return session.post('Debugger.disable') // return instead of await to reduce number of promises created +} + +async function processMsg (action, probe) { + log.debug(`Received request to ${action} ${probe.type} probe (id: ${probe.id}, version: ${probe.version})`) + + if (action !== 'unapply') ackReceived(probe) + + if (probe.type !== 'LOG_PROBE') { + throw new Error(`Unsupported probe type: ${probe.type} (id: ${probe.id}, version: ${probe.version})`) + } + if (!probe.where.sourceFile && !probe.where.lines) { + throw new Error( + // eslint-disable-next-line max-len + `Unsupported probe insertion point! Only line-based probes are supported (id: ${probe.id}, version: ${probe.version})` + ) + } + + // This lock is to ensure that we don't get the following race condition: + // + // When a breakpoint is being removed and there are no other breakpoints, we disable the debugger by calling + // `Debugger.disable` to free resources. However, if a new breakpoint is being added around the same time, we might + // have a race condition where the new breakpoint thinks that the debugger is already enabled because the removal of + // the other breakpoint hasn't had a chance to call `Debugger.disable` yet. Then once the code that's adding the new + // breakpoints tries to call `Debugger.setBreakpoint` it fails because in the meantime `Debugger.disable` was called. + // + // If the code is ever refactored to not tear down the debugger if there's no active breakpoints, we can safely remove + // this lock. + const release = await lock() + + try { + switch (action) { + case 'unapply': + await removeBreakpoint(probe) + break + case 'apply': + await addBreakpoint(probe) + break + case 'modify': + // TODO: Can we modify in place? + await removeBreakpoint(probe) + await addBreakpoint(probe) + break + default: + throw new Error( + // eslint-disable-next-line max-len + `Cannot process probe ${probe.id} (version: ${probe.version}) - unknown remote configuration action: ${action}` + ) + } + } finally { + release() + } +} + +async function addBreakpoint (probe) { + if (!sessionStarted) await start() + + const file = probe.where.sourceFile + const line = Number(probe.where.lines[0]) // Tracer doesn't support multiple-line breakpoints + + // Optimize for sending data to /debugger/v1/input endpoint + probe.location = { file, lines: [line] } + delete probe.where + + // TODO: Inbetween `await session.post('Debugger.enable')` and here, the scripts are parsed and cached. + // Maybe there's a race condition here or maybe we're guraenteed that `await session.post('Debugger.enable')` will + // not continue untill all scripts have been parsed? + const script = getScript(file) + if (!script) throw new Error(`No loaded script found for ${file} (probe: ${probe.id}, version: ${probe.version})`) + const [path, scriptId] = script + + log.debug(`Adding breakpoint at ${path}:${line} (probe: ${probe.id}, version: ${probe.version})`) + + const { breakpointId } = await session.post('Debugger.setBreakpoint', { + location: { + scriptId, + lineNumber: line - 1 // Beware! lineNumber is zero-indexed + } + }) + + probes.set(probe.id, breakpointId) + breakpoints.set(breakpointId, probe) + + ackInstalled(probe) +} + +async function removeBreakpoint ({ id }) { + if (!sessionStarted) { + // We should not get in this state, but abort if we do, so the code doesn't fail unexpected + throw Error(`Cannot remove probe ${id}: Debugger not started`) + } + if (!probes.has(id)) { + throw Error(`Unknown probe id: ${id}`) + } + + const breakpointId = probes.get(id) + await session.post('Debugger.removeBreakpoint', { breakpointId }) + probes.delete(id) + breakpoints.delete(breakpointId) + + if (breakpoints.size === 0) await stop() +} + +async function lock () { + if (lock.p) await lock.p + let resolve + lock.p = new Promise((_resolve) => { resolve = _resolve }).then(() => { lock.p = null }) + return resolve +} diff --git a/packages/dd-trace/src/debugger/devtools_client/send.js b/packages/dd-trace/src/debugger/devtools_client/send.js new file mode 100644 index 00000000000..709e14d52b7 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/send.js @@ -0,0 +1,28 @@ +'use strict' + +const config = require('./config') +const request = require('../../exporters/common/request') + +module.exports = send + +const ddsource = 'dd_debugger' +const service = config.service + +function send (message, logger, snapshot, cb) { + const opts = { + method: 'POST', + url: config.url, + path: '/debugger/v1/input', + headers: { 'Content-Type': 'application/json; charset=utf-8' } + } + + const payload = { + ddsource, + service, + message, + logger, + 'debugger.snapshot': snapshot + } + + request(JSON.stringify(payload), opts, cb) +} diff --git a/packages/dd-trace/src/debugger/devtools_client/session.js b/packages/dd-trace/src/debugger/devtools_client/session.js new file mode 100644 index 00000000000..3cda2322b36 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/session.js @@ -0,0 +1,7 @@ +'use strict' + +const inspector = require('./inspector_promises_polyfill') + +const session = module.exports = new inspector.Session() + +session.connectToMainThread() diff --git a/packages/dd-trace/src/debugger/devtools_client/state.js b/packages/dd-trace/src/debugger/devtools_client/state.js new file mode 100644 index 00000000000..316841667fb --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/state.js @@ -0,0 +1,47 @@ +'use strict' + +const session = require('./session') + +const scripts = [] + +module.exports = { + probes: new Map(), + breakpoints: new Map(), + + /** + * Find the matching script that can be inspected based on a partial path. + * + * Algorithm: Find the sortest url that ends in the requested path. + * + * Will identify the correct script as long as Node.js doesn't load a module from a `node_modules` folder outside the + * project root. If so, there's a risk that this path is shorter than the expected path inside the project root. + * Example of mismatch where path = `index.js`: + * + * Expected match: /www/code/my-projects/demo-project1/index.js + * Actual shorter match: /www/node_modules/dd-trace/index.js + * + * To fix this, specify a more unique file path, e.g `demo-project1/index.js` instead of `index.js` + * + * @param {string} path + * @returns {[string, string] | undefined} + */ + getScript (path) { + return scripts + .filter(([url]) => url.endsWith(path)) + .sort(([a], [b]) => a.length - b.length)[0] + } +} + +// Known params.url protocols: +// - `node:` - Ignored, as we don't want to instrument Node.js internals +// - `wasm:` - Ignored, as we don't support instrumenting WebAssembly +// - `file:` - Regular on-disk file +// Unknown params.url values: +// - `structured-stack` - Not sure what this is, but should just be ignored +// - `` - Not sure what this is, but should just be ignored +// TODO: Event fired for all files, every time debugger is enabled. So when we disable it, we need to reset the state +session.on('Debugger.scriptParsed', ({ params }) => { + if (params.url.startsWith('file:')) { + scripts.push([params.url, params.scriptId]) + } +}) diff --git a/packages/dd-trace/src/debugger/devtools_client/status.js b/packages/dd-trace/src/debugger/devtools_client/status.js new file mode 100644 index 00000000000..e4ba10d8c55 --- /dev/null +++ b/packages/dd-trace/src/debugger/devtools_client/status.js @@ -0,0 +1,109 @@ +'use strict' + +const LRUCache = require('lru-cache') +const config = require('./config') +const request = require('../../exporters/common/request') +const FormData = require('../../exporters/common/form-data') +const log = require('../../log') + +module.exports = { + ackReceived, + ackInstalled, + ackEmitting, + ackError +} + +const ddsource = 'dd_debugger' +const service = config.service +const runtimeId = config.runtimeId + +const cache = new LRUCache({ + ttl: 1000 * 60 * 60, // 1 hour + // Unfortunate requirement when using LRUCache: + // It will emit a warning unless `ttlAutopurge`, `max`, or `maxSize` is set when using `ttl`. + // TODO: Consider alternative as this is NOT performant :( + ttlAutopurge: true +}) + +const STATUSES = { + RECEIVED: 'RECEIVED', + INSTALLED: 'INSTALLED', + EMITTING: 'EMITTING', + ERROR: 'ERROR', + BLOCKED: 'BLOCKED' // TODO: Implement once support for allow list, deny list or max probe limit has been added +} + +function ackReceived ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.RECEIVED, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.RECEIVED)) + ) +} + +function ackInstalled ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.INSTALLED, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.INSTALLED)) + ) +} + +function ackEmitting ({ id: probeId, version }) { + onlyUniqueUpdates( + STATUSES.EMITTING, probeId, version, + () => send(statusPayload(probeId, version, STATUSES.EMITTING)) + ) +} + +function ackError (err, { id: probeId, version }) { + log.error(err) + + onlyUniqueUpdates(STATUSES.ERROR, probeId, version, () => { + const payload = statusPayload(probeId, version, STATUSES.ERROR) + + payload.debugger.diagnostics.exception = { + type: err.code, + message: err.message, + stacktrace: err.stack + } + + send(payload) + }) +} + +function send (payload) { + const form = new FormData() + + form.append( + 'event', + JSON.stringify(payload), + { filename: 'event.json', contentType: 'application/json; charset=utf-8' } + ) + + const options = { + method: 'POST', + url: config.url, + path: '/debugger/v1/diagnostics', + headers: form.getHeaders() + } + + request(form, options, (err) => { + if (err) log.error(err) + }) +} + +function statusPayload (probeId, version, status) { + return { + ddsource, + service, + debugger: { + diagnostics: { probeId, runtimeId, version, status } + } + } +} + +function onlyUniqueUpdates (type, id, version, fn) { + const key = `${type}-${id}-${version}` + if (cache.has(key)) return + fn() + cache.set(key) +} diff --git a/packages/dd-trace/src/debugger/index.js b/packages/dd-trace/src/debugger/index.js new file mode 100644 index 00000000000..5db1a440cf2 --- /dev/null +++ b/packages/dd-trace/src/debugger/index.js @@ -0,0 +1,92 @@ +'use strict' + +const { join } = require('path') +const { Worker, MessageChannel, threadId: parentThreadId } = require('worker_threads') +const log = require('../log') + +let worker = null +let configChannel = null + +const { NODE_OPTIONS, ...env } = process.env + +module.exports = { + start, + configure +} + +function start (config, rc) { + if (worker !== null) return + + log.debug('Starting Dynamic Instrumentation client...') + + const rcAckCallbacks = new Map() + const rcChannel = new MessageChannel() + configChannel = new MessageChannel() + + rc.setProductHandler('LIVE_DEBUGGING', (action, conf, id, ack) => { + const ackId = `${id}-${conf.version}` + rcAckCallbacks.set(ackId, ack) + rcChannel.port2.postMessage({ action, conf, ackId }) + }) + + rcChannel.port2.on('message', ({ ackId, error }) => { + rcAckCallbacks.get(ackId)(error) + rcAckCallbacks.delete(ackId) + }) + rcChannel.port2.on('messageerror', (err) => log.error(err)) + + worker = new Worker( + join(__dirname, 'devtools_client', 'index.js'), + { + execArgv: [], // Avoid worker thread inheriting the `-r` command line argument + env, // Avoid worker thread inheriting the `NODE_OPTIONS` environment variable (in case it contains `-r`) + workerData: { + config: serializableConfig(config), + parentThreadId, + rcPort: rcChannel.port1, + configPort: configChannel.port1 + }, + transferList: [rcChannel.port1, configChannel.port1] + } + ) + + worker.unref() + + worker.on('online', () => { + log.debug(`Dynamic Instrumentation worker thread started successfully (thread id: ${worker.threadId})`) + }) + + worker.on('error', (err) => log.error(err)) + worker.on('messageerror', (err) => log.error(err)) + + worker.on('exit', (code) => { + const error = new Error(`Dynamic Instrumentation worker thread exited unexpectedly with code ${code}`) + + log.error(error) + + // Be nice, clean up now that the worker thread encounted an issue and we can't continue + rc.removeProductHandler('LIVE_DEBUGGING') + worker.removeAllListeners() + configChannel = null + for (const ackId of rcAckCallbacks.keys()) { + rcAckCallbacks.get(ackId)(error) + rcAckCallbacks.delete(ackId) + } + }) +} + +function configure (config) { + if (configChannel === null) return + configChannel.port2.postMessage(serializableConfig(config)) +} + +// TODO: Refactor the Config class so it never produces any config objects that are incompatible with MessageChannel +function serializableConfig (config) { + // URL objects cannot be serialized over the MessageChannel, so we need to convert them to strings first + if (config.url instanceof URL) { + config = { ...config } + config.url = config.url.toString() + } + + return config +} diff --git a/packages/dd-trace/src/encode/agentless-ci-visibility.js b/packages/dd-trace/src/encode/agentless-ci-visibility.js index 7b78c0ea3ce..dea15182323 100644 --- a/packages/dd-trace/src/encode/agentless-ci-visibility.js +++ b/packages/dd-trace/src/encode/agentless-ci-visibility.js @@ -43,9 +43,15 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { // length of `payload.events` when calling `makePayload` this._eventCount = 0 + this.metadataTags = {} + this.reset() } + setMetadataTags (tags) { + this.metadataTags = tags + } + _encodeTestSuite (bytes, content) { let keysLength = TEST_SUITE_KEYS_LENGTH const itrCorrelationId = content.meta[ITR_CORRELATION_ID] @@ -277,6 +283,10 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { } _encode (bytes, trace) { + if (this._isReset) { + this._encodePayloadStart(bytes) + this._isReset = false + } const startTime = Date.now() const rawEvents = trace.map(formatSpan) @@ -330,7 +340,8 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { '*': { language: 'javascript', library_version: ddTraceVersion - } + }, + ...this.metadataTags }, events: [] } @@ -349,6 +360,22 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { this._encodeMapPrefix(bytes, Object.keys(payload.metadata).length) this._encodeString(bytes, '*') this._encodeMap(bytes, payload.metadata['*']) + if (payload.metadata.test) { + this._encodeString(bytes, 'test') + this._encodeMap(bytes, payload.metadata.test) + } + if (payload.metadata.test_suite_end) { + this._encodeString(bytes, 'test_suite_end') + this._encodeMap(bytes, payload.metadata.test_suite_end) + } + if (payload.metadata.test_module_end) { + this._encodeString(bytes, 'test_module_end') + this._encodeMap(bytes, payload.metadata.test_module_end) + } + if (payload.metadata.test_session_end) { + this._encodeString(bytes, 'test_session_end') + this._encodeMap(bytes, payload.metadata.test_session_end) + } this._encodeString(bytes, 'events') // Get offset of the events list to update the length of the array when calling `makePayload` this._eventsOffset = bytes.length @@ -359,7 +386,7 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { reset () { this._reset() this._eventCount = 0 - this._encodePayloadStart(this._traceBytes) + this._isReset = true } } diff --git a/packages/dd-trace/src/exporters/common/request.js b/packages/dd-trace/src/exporters/common/request.js index 62f3bafb03e..6823119c0d8 100644 --- a/packages/dd-trace/src/exporters/common/request.js +++ b/packages/dd-trace/src/exporters/common/request.js @@ -183,7 +183,7 @@ function request (data, options, callback) { } function byteLength (data) { - return data.length > 0 ? data.reduce((prev, next) => prev + next.length, 0) : 0 + return data.length > 0 ? data.reduce((prev, next) => prev + Buffer.byteLength(next, 'utf8'), 0) : 0 } Object.defineProperty(request, 'writable', { diff --git a/packages/dd-trace/src/payload-tagging/config/aws.json b/packages/dd-trace/src/payload-tagging/config/aws.json new file mode 100644 index 00000000000..400b25bf670 --- /dev/null +++ b/packages/dd-trace/src/payload-tagging/config/aws.json @@ -0,0 +1,30 @@ +{ + "sns": { + "request": [ + "$.Attributes.KmsMasterKeyId", + "$.Attributes.PlatformCredential", + "$.Attributes.PlatformPrincipal", + "$.Attributes.Token", + "$.AWSAccountId", + "$.Endpoint", + "$.OneTimePassword", + "$.phoneNumber", + "$.PhoneNumber", + "$.Token" + ], + "response": [ + "$.Attributes.KmsMasterKeyId", + "$.Attributes.Token", + "$.Endpoints.*.Token", + "$.PhoneNumber", + "$.PhoneNumbers", + "$.phoneNumbers", + "$.PlatformApplication.*.PlatformCredential", + "$.PlatformApplication.*.PlatformPrincipal", + "$.Subscriptions.*.Endpoint" + ], + "expand": [ + "$.MessageAttributes.*.StringValue" + ] + } +} diff --git a/packages/dd-trace/src/payload-tagging/config/index.js b/packages/dd-trace/src/payload-tagging/config/index.js new file mode 100644 index 00000000000..16ab4dfd814 --- /dev/null +++ b/packages/dd-trace/src/payload-tagging/config/index.js @@ -0,0 +1,30 @@ +const aws = require('./aws.json') +const sdks = { aws } + +function getSDKRules (sdk, requestInput, responseInput) { + return Object.fromEntries( + Object.entries(sdk).map(([service, serviceRules]) => { + return [ + service, + { + request: serviceRules.request.concat(requestInput || []), + response: serviceRules.response.concat(responseInput || []), + expand: serviceRules.expand || [] + } + ] + }) + ) +} + +function appendRules (requestInput, responseInput) { + return Object.fromEntries( + Object.entries(sdks).map(([name, sdk]) => { + return [ + name, + getSDKRules(sdk, requestInput, responseInput) + ] + }) + ) +} + +module.exports = { appendRules } diff --git a/packages/dd-trace/src/payload-tagging/index.js b/packages/dd-trace/src/payload-tagging/index.js new file mode 100644 index 00000000000..c7f5dd19d30 --- /dev/null +++ b/packages/dd-trace/src/payload-tagging/index.js @@ -0,0 +1,93 @@ +const rfdc = require('rfdc')({ proto: false, circles: false }) + +const { + PAYLOAD_TAG_REQUEST_PREFIX, + PAYLOAD_TAG_RESPONSE_PREFIX +} = require('../constants') + +const jsonpath = require('jsonpath-plus').JSONPath + +const { tagsFromObject } = require('./tagging') + +/** + * Given an identified value, attempt to parse it as JSON if relevant + * + * @param {any} value + * @returns {any} the parsed object if parsing was successful, the input if not + */ +function maybeJSONParseValue (value) { + if (typeof value !== 'string' || value[0] !== '{') { + return value + } + + try { + return JSON.parse(value) + } catch (e) { + return value + } +} + +/** + * Apply expansion to all expansion JSONPath queries + * + * @param {Object} object + * @param {[String]} expansionRules list of JSONPath queries + */ +function expand (object, expansionRules) { + for (const rule of expansionRules) { + jsonpath(rule, object, (value, _type, desc) => { + desc.parent[desc.parentProperty] = maybeJSONParseValue(value) + }) + } +} + +/** + * Apply redaction to all redaction JSONPath queries + * + * @param {Object} object + * @param {[String]} redactionRules + */ +function redact (object, redactionRules) { + for (const rule of redactionRules) { + jsonpath(rule, object, (_value, _type, desc) => { + desc.parent[desc.parentProperty] = 'redacted' + }) + } +} + +/** + * Generate a map of tag names to tag values by performing: + * 1. Attempting to parse identified fields as JSON + * 2. Redacting fields identified by redaction rules + * 3. Flattening the resulting object, producing as many tag name/tag value pairs + * as there are leaf values in the object + * This function performs side-effects on a _copy_ of the input object. + * + * @param {Object} config sdk configuration for the service + * @param {[String]} config.expand expansion rules for the service + * @param {[String]} config.request redaction rules for the request + * @param {[String]} config.response redaction rules for the response + * @param {Object} object the input object to generate tags from + * @param {Object} opts tag generation options + * @param {String} opts.prefix prefix for all generated tags + * @param {number} opts.maxDepth maximum depth to traverse the object + * @returns + */ +function computeTags (config, object, opts) { + const payload = rfdc(object) + const redactionRules = opts.prefix === PAYLOAD_TAG_REQUEST_PREFIX ? config.request : config.response + const expansionRules = config.expand + expand(payload, expansionRules) + redact(payload, redactionRules) + return tagsFromObject(payload, opts) +} + +function tagsFromRequest (config, object, opts) { + return computeTags(config, object, { ...opts, prefix: PAYLOAD_TAG_REQUEST_PREFIX }) +} + +function tagsFromResponse (config, object, opts) { + return computeTags(config, object, { ...opts, prefix: PAYLOAD_TAG_RESPONSE_PREFIX }) +} + +module.exports = { computeTags, tagsFromRequest, tagsFromResponse } diff --git a/packages/dd-trace/src/payload-tagging/tagging.js b/packages/dd-trace/src/payload-tagging/tagging.js new file mode 100644 index 00000000000..4643b5d7a40 --- /dev/null +++ b/packages/dd-trace/src/payload-tagging/tagging.js @@ -0,0 +1,83 @@ +const { PAYLOAD_TAGGING_MAX_TAGS } = require('../constants') + +const redactedKeys = [ + 'authorization', 'x-authorization', 'password', 'token' +] +const truncated = 'truncated' +const redacted = 'redacted' + +function escapeKey (key) { + return key.replaceAll('.', '\\.') +} + +/** + * Compute normalized payload tags from any given object. + * + * @param {object} object + * @param {import('./mask').Mask} mask + * @param {number} maxDepth + * @param {string} prefix + * @returns + */ +function tagsFromObject (object, opts) { + const { maxDepth, prefix } = opts + + let tagCount = 0 + let abort = false + const result = {} + + function tagRec (prefix, object, depth = 0) { + // Off by one: _dd.payload_tags_trimmed counts as 1 tag + if (abort) { return } + + if (tagCount >= PAYLOAD_TAGGING_MAX_TAGS - 1) { + abort = true + result['_dd.payload_tags_incomplete'] = true + return + } + + if (depth >= maxDepth && typeof object === 'object') { + tagCount += 1 + result[prefix] = truncated + return + } + + if (object === undefined) { + tagCount += 1 + result[prefix] = 'undefined' + return + } + + if (object === null) { + tagCount += 1 + result[prefix] = 'null' + return + } + + if (['number', 'boolean'].includes(typeof object) || Buffer.isBuffer(object)) { + tagCount += 1 + result[prefix] = object.toString().substring(0, 5000) + return + } + + if (typeof object === 'string') { + tagCount += 1 + result[prefix] = object.substring(0, 5000) + } + + if (typeof object === 'object') { + for (const [key, value] of Object.entries(object)) { + if (redactedKeys.includes(key.toLowerCase())) { + tagCount += 1 + result[`${prefix}.${escapeKey(key)}`] = redacted + } else { + tagRec(`${prefix}.${escapeKey(key)}`, value, depth + 1) + } + } + } + } + tagRec(prefix, object) + return result +} + +module.exports = { tagsFromObject } diff --git a/packages/dd-trace/src/plugin_manager.js b/packages/dd-trace/src/plugin_manager.js index 8b23c965a47..80e87ce545e 100644 --- a/packages/dd-trace/src/plugin_manager.js +++ b/packages/dd-trace/src/plugin_manager.js @@ -136,10 +136,19 @@ module.exports = class PluginManager { dbmPropagationMode, dsmEnabled, clientIpEnabled, - memcachedCommandEnabled + memcachedCommandEnabled, + ciVisibilityTestSessionName } = this._tracerConfig - const sharedConfig = {} + const sharedConfig = { + dbmPropagationMode, + dsmEnabled, + memcachedCommandEnabled, + site, + url, + headers: headerTags || [], + ciVisibilityTestSessionName + } if (logInjection !== undefined) { sharedConfig.logInjection = logInjection @@ -149,10 +158,6 @@ module.exports = class PluginManager { sharedConfig.queryStringObfuscation = queryStringObfuscation } - sharedConfig.dbmPropagationMode = dbmPropagationMode - sharedConfig.dsmEnabled = dsmEnabled - sharedConfig.memcachedCommandEnabled = memcachedCommandEnabled - if (serviceMapping && serviceMapping[name]) { sharedConfig.service = serviceMapping[name] } @@ -161,10 +166,6 @@ module.exports = class PluginManager { sharedConfig.clientIpEnabled = clientIpEnabled } - sharedConfig.site = site - sharedConfig.url = url - sharedConfig.headers = headerTags || [] - return sharedConfig } } diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 8c8c15c8e55..b86d20d5760 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -1,5 +1,6 @@ const { getTestEnvironmentMetadata, + getTestSessionName, getCodeOwnersFileEntries, getTestParentSpan, getTestCommonTags, @@ -13,11 +14,14 @@ const { TEST_SESSION_ID, TEST_COMMAND, TEST_MODULE, + TEST_SESSION_NAME, getTestSuiteCommonTags, TEST_STATUS, TEST_SKIPPED_BY_ITR, ITR_CORRELATION_ID, - TEST_SOURCE_FILE + TEST_SOURCE_FILE, + TEST_LEVEL_EVENT_TYPES, + TEST_SUITE } = require('./util/test') const Plugin = require('./plugin') const { COMPONENT } = require('../constants') @@ -75,6 +79,19 @@ module.exports = class CiPlugin extends Plugin { // only for playwright this.rootDir = rootDir + const testSessionName = getTestSessionName(this.config, this.command, this.testEnvironmentMetadata) + + const metadataTags = {} + for (const testLevel of TEST_LEVEL_EVENT_TYPES) { + metadataTags[testLevel] = { + [TEST_SESSION_NAME]: testSessionName + } + } + // tracer might not be initialized correctly + if (this.tracer._exporter.setMetadataTags) { + this.tracer._exporter.setMetadataTags(metadataTags) + } + this.testSessionSpan = this.tracer.startSpan(`${this.constructor.id}.test_session`, { childOf, tags: { @@ -97,6 +114,7 @@ module.exports = class CiPlugin extends Plugin { if (this.constructor.id === 'vitest') { process.env.DD_CIVISIBILITY_TEST_SESSION_ID = this.testSessionSpan.context().toTraceId() process.env.DD_CIVISIBILITY_TEST_MODULE_ID = this.testModuleSpan.context().toSpanId() + process.env.DD_CIVISIBILITY_TEST_COMMAND = this.command } this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'module') @@ -194,6 +212,19 @@ module.exports = class CiPlugin extends Plugin { } } + getCodeOwners (tags) { + const { + [TEST_SOURCE_FILE]: testSourceFile, + [TEST_SUITE]: testSuite + } = tags + // We'll try with the test source file if available (it could be different from the test suite) + let codeOwners = getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries) + if (!codeOwners) { + codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries) + } + return codeOwners + } + startTestSpan (testName, testSuite, testSuiteSpan, extraTags = {}) { const childOf = getTestParentSpan(this.tracer) @@ -208,13 +239,7 @@ module.exports = class CiPlugin extends Plugin { ...extraTags } - const { [TEST_SOURCE_FILE]: testSourceFile } = extraTags - // We'll try with the test source file if available (it could be different from the test suite) - let codeOwners = getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries) - if (!codeOwners) { - codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries) - } - + const codeOwners = this.getCodeOwners(testTags) if (codeOwners) { testTags[TEST_CODE_OWNERS] = codeOwners } diff --git a/packages/dd-trace/src/plugins/util/env.js b/packages/dd-trace/src/plugins/util/env.js index c53c0956e53..c1721c4bb11 100644 --- a/packages/dd-trace/src/plugins/util/env.js +++ b/packages/dd-trace/src/plugins/util/env.js @@ -5,6 +5,7 @@ const OS_VERSION = 'os.version' const OS_ARCHITECTURE = 'os.architecture' const RUNTIME_NAME = 'runtime.name' const RUNTIME_VERSION = 'runtime.version' +const DD_HOST_CPU_COUNT = '_dd.host.vcpu_count' function getRuntimeAndOSMetadata () { return { @@ -12,7 +13,8 @@ function getRuntimeAndOSMetadata () { [OS_ARCHITECTURE]: process.arch, [OS_PLATFORM]: process.platform, [RUNTIME_NAME]: 'node', - [OS_VERSION]: os.release() + [OS_VERSION]: os.release(), + [DD_HOST_CPU_COUNT]: os.cpus().length } } @@ -22,5 +24,6 @@ module.exports = { OS_VERSION, OS_ARCHITECTURE, RUNTIME_NAME, - RUNTIME_VERSION + RUNTIME_VERSION, + DD_HOST_CPU_COUNT } diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index d1d1861ea5d..3cf1421ad15 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -19,7 +19,8 @@ const { GIT_COMMIT_AUTHOR_NAME, GIT_COMMIT_MESSAGE, CI_WORKSPACE_PATH, - CI_PIPELINE_URL + CI_PIPELINE_URL, + CI_JOB_NAME } = require('./tags') const id = require('../../id') @@ -28,6 +29,9 @@ const { SAMPLING_RULE_DECISION } = require('../../constants') const { AUTO_KEEP } = require('../../../../../ext/priority') const { version: ddTraceVersion } = require('../../../../../package.json') +// session tags +const TEST_SESSION_NAME = 'test_session.name' + const TEST_FRAMEWORK = 'test.framework' const TEST_FRAMEWORK_VERSION = 'test.framework_version' const TEST_TYPE = 'test.type' @@ -95,8 +99,16 @@ const MOCHA_WORKER_TRACE_PAYLOAD_CODE = 80 const EFD_STRING = "Retried by Datadog's Early Flake Detection" const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g') +const TEST_LEVEL_EVENT_TYPES = [ + 'test', + 'test_suite_end', + 'test_module_end', + 'test_session_end' +] + module.exports = { TEST_CODE_OWNERS, + TEST_SESSION_NAME, TEST_FRAMEWORK, TEST_FRAMEWORK_VERSION, JEST_TEST_RUNNER, @@ -167,7 +179,9 @@ module.exports = { TEST_BROWSER_DRIVER, TEST_BROWSER_DRIVER_VERSION, TEST_BROWSER_NAME, - TEST_BROWSER_VERSION + TEST_BROWSER_VERSION, + getTestSessionName, + TEST_LEVEL_EVENT_TYPES } // Returns pkg manager and its version, separated by '-', e.g. npm-8.15.0 or yarn-1.22.19 @@ -615,3 +629,13 @@ function getIsFaultyEarlyFlakeDetection (projectSuites, testsBySuiteName, faulty newSuitesPercentage > faultyThresholdPercentage ) } + +function getTestSessionName (config, testCommand, envTags) { + if (config.ciVisibilityTestSessionName) { + return config.ciVisibilityTestSessionName + } + if (envTags[CI_JOB_NAME]) { + return `${envTags[CI_JOB_NAME]}-${testCommand}` + } + return testCommand +} diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 8d399c68bf5..538400aaa7a 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -97,6 +97,11 @@ class Config { const samplingContextsAvailable = process.platform !== 'win32' function checkOptionAllowed (option, description, condition) { if (option && !condition) { + // injection hardening: all of these can only happen if user explicitly + // sets an environment variable to its non-default value on the platform. + // In practical terms, it'd require someone explicitly turning on OOM + // monitoring, code hotspots, endpoint profiling, or CPU profiling on + // Windows, where it is not supported. throw new Error(`${description} not supported on ${process.platform}.`) } } diff --git a/packages/dd-trace/src/profiling/exporters/agent.js b/packages/dd-trace/src/profiling/exporters/agent.js index 13310d82770..b34ab3c9d94 100644 --- a/packages/dd-trace/src/profiling/exporters/agent.js +++ b/packages/dd-trace/src/profiling/exporters/agent.js @@ -199,7 +199,7 @@ class AgentExporter { this._logger.error(`Error from the agent: ${err.message}`) return } else if (err) { - reject(new Error('Profiler agent export back-off period expired')) + reject(err) return } diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js new file mode 100644 index 00000000000..29b1e62775f --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns.js @@ -0,0 +1,13 @@ +const EventPlugin = require('./event') + +class DNSPlugin extends EventPlugin { + static get id () { + return 'dns' + } + + static get entryType () { + return 'dns' + } +} + +module.exports = DNSPlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js new file mode 100644 index 00000000000..b72b0eb6205 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookup.js @@ -0,0 +1,16 @@ +const DNSPlugin = require('./dns') + +class DNSLookupPlugin extends DNSPlugin { + static get operation () { + return 'lookup' + } + + extendEvent (event, startEvent) { + event.name = 'lookup' + event.detail = { hostname: startEvent[0] } + + return event + } +} + +module.exports = DNSLookupPlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js new file mode 100644 index 00000000000..45860eea7aa --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_lookupservice.js @@ -0,0 +1,16 @@ +const DNSPlugin = require('./dns') + +class DNSLookupServicePlugin extends DNSPlugin { + static get operation () { + return 'lookup_service' + } + + extendEvent (event, startEvent) { + event.name = 'lookupService' + event.detail = { host: startEvent[0], port: startEvent[1] } + + return event + } +} + +module.exports = DNSLookupServicePlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js new file mode 100644 index 00000000000..f390e60c375 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_resolve.js @@ -0,0 +1,24 @@ +const DNSPlugin = require('./dns') + +const queryNames = new Map() + +class DNSResolvePlugin extends DNSPlugin { + static get operation () { + return 'resolve' + } + + extendEvent (event, startEvent) { + const rrtype = startEvent[1] + let name = queryNames.get(rrtype) + if (!name) { + name = `query${rrtype}` + queryNames.set(rrtype, name) + } + event.name = name + event.detail = { host: startEvent[0] } + + return event + } +} + +module.exports = DNSResolvePlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js new file mode 100644 index 00000000000..67ad56c9057 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/dns_reverse.js @@ -0,0 +1,16 @@ +const DNSPlugin = require('./dns') + +class DNSReversePlugin extends DNSPlugin { + static get operation () { + return 'reverse' + } + + extendEvent (event, startEvent) { + event.name = 'getHostByAddr' + event.detail = { host: startEvent[0] } + + return event + } +} + +module.exports = DNSReversePlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/event.js b/packages/dd-trace/src/profiling/profilers/event_plugins/event.js new file mode 100644 index 00000000000..f47a3468f78 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/event.js @@ -0,0 +1,48 @@ +const { AsyncLocalStorage } = require('async_hooks') +const TracingPlugin = require('../../../plugins/tracing') +const { performance } = require('perf_hooks') + +// We are leveraging the TracingPlugin class for its functionality to bind +// start/error/finish methods to the appropriate diagnostic channels. +class EventPlugin extends TracingPlugin { + constructor (eventHandler) { + super() + this.eventHandler = eventHandler + this.store = new AsyncLocalStorage() + this.entryType = this.constructor.entryType + } + + start (startEvent) { + this.store.enterWith({ + startEvent, + startTime: performance.now() + }) + } + + error () { + this.store.getStore().error = true + } + + finish () { + const { startEvent, startTime, error } = this.store.getStore() + if (error) { + return // don't emit perf events for failed operations + } + const duration = performance.now() - startTime + + const context = this.activeSpan?.context() + const _ddSpanId = context?.toSpanId() + const _ddRootSpanId = context?._trace.started[0]?.context().toSpanId() || _ddSpanId + + const event = { + entryType: this.entryType, + startTime, + duration, + _ddSpanId, + _ddRootSpanId + } + this.eventHandler(this.extendEvent(event, startEvent)) + } +} + +module.exports = EventPlugin diff --git a/packages/dd-trace/src/profiling/profilers/event_plugins/net.js b/packages/dd-trace/src/profiling/profilers/event_plugins/net.js new file mode 100644 index 00000000000..ffd99bbda70 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/event_plugins/net.js @@ -0,0 +1,24 @@ +const EventPlugin = require('./event') + +class NetPlugin extends EventPlugin { + static get id () { + return 'net' + } + + static get operation () { + return 'tcp' + } + + static get entryType () { + return 'net' + } + + extendEvent (event, { options }) { + event.name = 'connect' + event.detail = options + + return event + } +} + +module.exports = NetPlugin diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 535437b1185..e1d42484f13 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -1,12 +1,8 @@ const { performance, constants, PerformanceObserver } = require('perf_hooks') -const { END_TIMESTAMP_LABEL } = require('./shared') -const semver = require('semver') +const { END_TIMESTAMP_LABEL, SPAN_ID_LABEL, LOCAL_ROOT_SPAN_ID_LABEL } = require('./shared') const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') const pprof = require('@datadog/pprof/') -// Format of perf_hooks events changed with Node 16, we need to be mindful of it. -const node16 = semver.gte(process.version, '16.0.0') - // perf_hooks uses millis, with fractional part representing nanos. We emit nanos into the pprof file. const MS_TO_NS = 1000000 @@ -48,7 +44,7 @@ class GCDecorator { } decorateSample (sampleInput, item) { - const { kind, flags } = node16 ? item.detail : item + const { kind, flags } = item.detail sampleInput.label.push(this.kindLabels[kind]) const reasonLabel = this.getReasonLabel(flags) if (reasonLabel) { @@ -140,12 +136,9 @@ class NetDecorator { // Keys correspond to PerformanceEntry.entryType, values are constructor // functions for type-specific decorators. const decoratorTypes = { - gc: GCDecorator -} -// Needs at least node 16 for DNS and Net -if (node16) { - decoratorTypes.dns = DNSDecorator - decoratorTypes.net = NetDecorator + gc: GCDecorator, + dns: DNSDecorator, + net: NetDecorator } // Translates performance entries into pprof samples. @@ -168,10 +161,12 @@ class EventSerializer { this.locationId = [location.id] this.timestampLabelKey = this.stringTable.dedup(END_TIMESTAMP_LABEL) + this.spanIdKey = this.stringTable.dedup(SPAN_ID_LABEL) + this.rootSpanIdKey = this.stringTable.dedup(LOCAL_ROOT_SPAN_ID_LABEL) } addEvent (item) { - const { entryType, startTime, duration } = item + const { entryType, startTime, duration, _ddSpanId, _ddRootSpanId } = item let decorator = this.decorators[entryType] if (!decorator) { const DecoratorCtor = decoratorTypes[entryType] @@ -186,13 +181,21 @@ class EventSerializer { } } const endTime = startTime + duration + const label = [ + decorator.eventTypeLabel, + new Label({ key: this.timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) }) + ] + if (_ddSpanId) { + label.push(labelFromStr(this.stringTable, this.spanIdKey, _ddSpanId)) + } + if (_ddRootSpanId) { + label.push(labelFromStr(this.stringTable, this.rootSpanIdKey, _ddRootSpanId)) + } + const sampleInput = { value: [Math.round(duration * MS_TO_NS)], locationId: this.locationId, - label: [ - decorator.eventTypeLabel, - new Label({ key: this.timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) }) - ] + label } decorator.decorateSample(sampleInput, item) this.samples.push(new Sample(sampleInput)) @@ -219,36 +222,109 @@ class EventSerializer { } /** - * This class generates pprof files with timeline events sourced from Node.js - * performance measurement APIs. + * Class that sources timeline events through Node.js performance measurement APIs. */ -class EventsProfiler { - constructor (options = {}) { - this.type = 'events' - this._flushIntervalNanos = (options.flushInterval || 60000) * 1e6 // 60 sec - this._observer = undefined - this.eventSerializer = new EventSerializer() +class NodeApiEventSource { + constructor (eventHandler, entryTypes) { + this.eventHandler = eventHandler + this.observer = undefined + this.entryTypes = entryTypes || Object.keys(decoratorTypes) } start () { // if already started, do nothing - if (this._observer) return + if (this.observer) return function add (items) { for (const item of items.getEntries()) { - this.eventSerializer.addEvent(item) + this.eventHandler(item) } } - this._observer = new PerformanceObserver(add.bind(this)) - this._observer.observe({ entryTypes: Object.keys(decoratorTypes) }) + + this.observer = new PerformanceObserver(add.bind(this)) + this.observer.observe({ entryTypes: this.entryTypes }) + } + + stop () { + if (this.observer) { + this.observer.disconnect() + this.observer = undefined + } + } +} + +class DatadogInstrumentationEventSource { + constructor (eventHandler) { + this.plugins = ['dns_lookup', 'dns_lookupservice', 'dns_resolve', 'dns_reverse', 'net'].map(m => { + const Plugin = require(`./event_plugins/${m}`) + return new Plugin(eventHandler) + }) + + this.started = false + } + + start () { + if (!this.started) { + this.plugins.forEach(p => p.configure({ enabled: true })) + this.started = true + } } stop () { - if (this._observer) { - this._observer.disconnect() - this._observer = undefined + if (this.started) { + this.plugins.forEach(p => p.configure({ enabled: false })) + this.started = false } } +} + +class CompositeEventSource { + constructor (sources) { + this.sources = sources + } + + start () { + this.sources.forEach(s => s.start()) + } + + stop () { + this.sources.forEach(s => s.stop()) + } +} + +/** + * This class generates pprof files with timeline events. It combines an event + * source with an event serializer. + */ +class EventsProfiler { + constructor (options = {}) { + this.type = 'events' + this.eventSerializer = new EventSerializer() + + const eventHandler = event => { + this.eventSerializer.addEvent(event) + } + + if (options.codeHotspotsEnabled) { + // Use Datadog instrumentation to collect events with span IDs. Still use + // Node API for GC events. + this.eventSource = new CompositeEventSource([ + new DatadogInstrumentationEventSource(eventHandler), + new NodeApiEventSource(eventHandler, ['gc']) + ]) + } else { + // Use Node API instrumentation to collect events without span IDs + this.eventSource = new NodeApiEventSource(eventHandler) + } + } + + start () { + this.eventSource.start() + } + + stop () { + this.eventSource.stop() + } profile (restart, startDate, endDate) { if (!restart) { diff --git a/packages/dd-trace/src/profiling/profilers/shared.js b/packages/dd-trace/src/profiling/profilers/shared.js index 626c1993060..8f1e15c75c2 100644 --- a/packages/dd-trace/src/profiling/profilers/shared.js +++ b/packages/dd-trace/src/profiling/profilers/shared.js @@ -6,6 +6,9 @@ const END_TIMESTAMP_LABEL = 'end_timestamp_ns' const THREAD_NAME_LABEL = 'thread name' const OS_THREAD_ID_LABEL = 'os thread id' const THREAD_ID_LABEL = 'thread id' +const SPAN_ID_LABEL = 'span id' +const LOCAL_ROOT_SPAN_ID_LABEL = 'local root span id' + const threadNamePrefix = isMainThread ? 'Main' : `Worker #${threadId}` const eventLoopThreadName = `${threadNamePrefix} Event Loop` @@ -38,6 +41,8 @@ module.exports = { THREAD_NAME_LABEL, THREAD_ID_LABEL, OS_THREAD_ID_LABEL, + SPAN_ID_LABEL, + LOCAL_ROOT_SPAN_ID_LABEL, threadNamePrefix, eventLoopThreadName, getNonJSThreadsLabels, diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index a0295222782..ee23b1145b0 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -7,7 +7,13 @@ const { HTTP_METHOD, HTTP_ROUTE, RESOURCE_NAME, SPAN_TYPE } = require('../../../ const { WEB } = require('../../../../../ext/types') const runtimeMetrics = require('../../runtime_metrics') const telemetryMetrics = require('../../telemetry/metrics') -const { END_TIMESTAMP_LABEL, getNonJSThreadsLabels, getThreadLabels } = require('./shared') +const { + END_TIMESTAMP_LABEL, + SPAN_ID_LABEL, + LOCAL_ROOT_SPAN_ID_LABEL, + getNonJSThreadsLabels, + getThreadLabels +} = require('./shared') const beforeCh = dc.channel('dd-trace:storage:before') const enterCh = dc.channel('dd-trace:storage:enter') @@ -275,10 +281,10 @@ class NativeWallProfiler { } if (spanId) { - labels['span id'] = spanId + labels[SPAN_ID_LABEL] = spanId } if (rootSpanId) { - labels['local root span id'] = rootSpanId + labels[LOCAL_ROOT_SPAN_ID_LABEL] = rootSpanId } if (webTags && Object.keys(webTags).length !== 0) { labels['trace endpoint'] = endpointNameFromTags(webTags) diff --git a/packages/dd-trace/src/profiling/ssi-heuristics.js b/packages/dd-trace/src/profiling/ssi-heuristics.js index 9910b9273bc..4790ae2b9b5 100644 --- a/packages/dd-trace/src/profiling/ssi-heuristics.js +++ b/packages/dd-trace/src/profiling/ssi-heuristics.js @@ -3,6 +3,7 @@ const telemetryMetrics = require('../telemetry/metrics') const profilersNamespace = telemetryMetrics.manager.namespace('profilers') const dc = require('dc-polyfill') +const log = require('../log') // If the process lives for at least 30 seconds, it's considered long-lived const DEFAULT_LONG_LIVED_THRESHOLD = 30000 @@ -40,9 +41,14 @@ class SSIHeuristics { const longLivedThreshold = config.profiling.longLivedThreshold || DEFAULT_LONG_LIVED_THRESHOLD if (typeof longLivedThreshold !== 'number' || longLivedThreshold <= 0) { - throw new Error('Long-lived threshold must be a positive number') + this.longLivedThreshold = DEFAULT_LONG_LIVED_THRESHOLD + log.warn( + `Invalid SSIHeuristics.longLivedThreshold value: ${config.profiling.longLivedThreshold}. ` + + `Using default value: ${DEFAULT_LONG_LIVED_THRESHOLD}` + ) + } else { + this.longLivedThreshold = longLivedThreshold } - this.longLivedThreshold = longLivedThreshold this.hasSentProfiles = false this.noSpan = true @@ -94,6 +100,8 @@ class SSIHeuristics { }) break default: + // injection hardening: only usage is internal, one call site with + // a function and another with undefined, so we can throw here. throw new TypeError('callback must be a function or undefined') } } diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index c3b865226ed..d7ce0538f39 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -5,6 +5,7 @@ const Config = require('./config') const runtimeMetrics = require('./runtime_metrics') const log = require('./log') const { setStartupLogPluginManager } = require('./startup-log') +const DynamicInstrumentation = require('./debugger') const telemetry = require('./telemetry') const nomenclature = require('./service-naming') const PluginManager = require('./plugin_manager') @@ -83,7 +84,7 @@ class Tracer extends NoopProxy { if (config.remoteConfig.enabled && !config.isCiVisibility) { const rc = remoteConfig.enable(config, this._modules.appsec) - rc.on('APM_TRACING', (action, conf) => { + rc.setProductHandler('APM_TRACING', (action, conf) => { if (action === 'unapply') { config.configure({}, true) } else { @@ -92,7 +93,7 @@ class Tracer extends NoopProxy { this._enableOrDisableTracing(config) }) - rc.on('AGENT_CONFIG', (action, conf) => { + rc.setProductHandler('AGENT_CONFIG', (action, conf) => { if (!conf?.name?.startsWith('flare-log-level.')) return if (action === 'unapply') { @@ -103,13 +104,17 @@ class Tracer extends NoopProxy { } }) - rc.on('AGENT_TASK', (action, conf) => { + rc.setProductHandler('AGENT_TASK', (action, conf) => { if (action === 'unapply' || !conf) return if (conf.task_type !== 'tracer_flare' || !conf.args) return this._flare.enable(config) this._flare.module.send(conf.args) }) + + if (config.dynamicInstrumentationEnabled) { + DynamicInstrumentation.start(config, rc) + } } if (config.isGCPFunction || config.isAzureFunction) { @@ -196,12 +201,14 @@ class Tracer extends NoopProxy { if (this._tracingInitialized) { this._tracer.configure(config) this._pluginManager.configure(config) + DynamicInstrumentation.configure(config) setStartupLogPluginManager(this._pluginManager) } } profilerStarted () { if (!this._profilerStarted) { + // injection hardening: this is only ever invoked from tests. throw new Error('profilerStarted() must be called after init()') } return this._profilerStarted diff --git a/packages/dd-trace/src/span_stats.js b/packages/dd-trace/src/span_stats.js index 3f7b5e34ea7..790166d058a 100644 --- a/packages/dd-trace/src/span_stats.js +++ b/packages/dd-trace/src/span_stats.js @@ -127,7 +127,8 @@ class SpanStatsProcessor { url, env, tags, - appsec + appsec, + version } = {}) { this.exporter = new SpanStatsExporter({ hostname, @@ -143,6 +144,7 @@ class SpanStatsProcessor { this.env = env this.tags = tags || {} this.sequence = 0 + this.version = version if (this.enabled) { this.timer = setInterval(this.onInterval.bind(this), interval * 1e3) @@ -157,7 +159,7 @@ class SpanStatsProcessor { this.exporter.export({ Hostname: this.hostname, Env: this.env, - Version: version, + Version: this.version || version, Stats: serialized, Lang: 'javascript', TracerVersion: pkg.version, diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index f3d68c4a1bf..b8a41d840b5 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -1182,6 +1182,7 @@ describe('IP blocking', function () { }).then(() => { throw new Error('Not expected') }).catch((err) => { + expect(err.message).to.not.equal('Not expected') expect(err.response.status).to.be.equal(500) expect(err.response.data).to.deep.equal(jsonDefaultContent) }) @@ -1196,6 +1197,7 @@ describe('IP blocking', function () { }).then(() => { throw new Error('Not expected') }).catch((err) => { + expect(err.message).to.not.equal('Not expected') expect(err.response.status).to.be.equal(500) expect(err.response.data).to.deep.equal(htmlDefaultContent) }) @@ -1241,6 +1243,7 @@ describe('IP blocking', function () { }).then(() => { throw new Error('Not resolve expected') }).catch((err) => { + expect(err.message).to.not.equal('Not resolve expected') expect(err.response.status).to.be.equal(301) expect(err.response.headers.location).to.be.equal('/error') }) diff --git a/packages/dd-trace/test/appsec/rasp.spec.js b/packages/dd-trace/test/appsec/rasp.spec.js deleted file mode 100644 index c594a2a98c2..00000000000 --- a/packages/dd-trace/test/appsec/rasp.spec.js +++ /dev/null @@ -1,180 +0,0 @@ -'use strict' - -const proxyquire = require('proxyquire') -const { httpClientRequestStart } = require('../../src/appsec/channels') -const addresses = require('../../src/appsec/addresses') -const { handleUncaughtExceptionMonitor } = require('../../src/appsec/rasp') - -describe('RASP', () => { - let waf, rasp, datadogCore, stackTrace, web - - beforeEach(() => { - datadogCore = { - storage: { - getStore: sinon.stub() - } - } - waf = { - run: sinon.stub() - } - - stackTrace = { - reportStackTrace: sinon.stub() - } - - web = { - root: sinon.stub() - } - - rasp = proxyquire('../../src/appsec/rasp', { - '../../../datadog-core': datadogCore, - './waf': waf, - './stack_trace': stackTrace, - './../plugins/util/web': web - }) - - const config = { - appsec: { - stackTrace: { - enabled: true, - maxStackTraces: 2, - maxDepth: 42 - } - } - } - - rasp.enable(config) - }) - - afterEach(() => { - sinon.restore() - rasp.disable() - }) - - describe('handleResult', () => { - it('should report stack trace when generate_stack action is present in waf result', () => { - const req = {} - const rootSpan = {} - const stackId = 'test_stack_id' - const result = { - generate_stack: { - stack_id: stackId - } - } - - web.root.returns(rootSpan) - - rasp.handleResult(result, req) - sinon.assert.calledOnceWithExactly(stackTrace.reportStackTrace, rootSpan, stackId, 42, 2) - }) - - it('should not report stack trace when no action is present in waf result', () => { - const req = {} - const result = {} - - rasp.handleResult(result, req) - sinon.assert.notCalled(stackTrace.reportStackTrace) - }) - - it('should not report stack trace when stack trace reporting is disabled', () => { - const req = {} - const result = { - generate_stack: { - stack_id: 'stackId' - } - } - const config = { - appsec: { - stackTrace: { - enabled: false, - maxStackTraces: 2, - maxDepth: 42 - } - } - } - - rasp.enable(config) - - rasp.handleResult(result, req) - sinon.assert.notCalled(stackTrace.reportStackTrace) - }) - }) - - describe('analyzeSsrf', () => { - it('should analyze ssrf', () => { - const ctx = { - args: { - uri: 'http://example.com' - } - } - const req = {} - datadogCore.storage.getStore.returns({ req }) - - httpClientRequestStart.publish(ctx) - - const persistent = { [addresses.HTTP_OUTGOING_URL]: 'http://example.com' } - sinon.assert.calledOnceWithExactly(waf.run, { persistent }, req, 'ssrf') - }) - - it('should not analyze ssrf if rasp is disabled', () => { - rasp.disable() - const ctx = { - args: { - uri: 'http://example.com' - } - } - const req = {} - datadogCore.storage.getStore.returns({ req }) - - httpClientRequestStart.publish(ctx) - - sinon.assert.notCalled(waf.run) - }) - - it('should not analyze ssrf if no store', () => { - const ctx = { - args: { - uri: 'http://example.com' - } - } - datadogCore.storage.getStore.returns(undefined) - - httpClientRequestStart.publish(ctx) - - sinon.assert.notCalled(waf.run) - }) - - it('should not analyze ssrf if no req', () => { - const ctx = { - args: { - uri: 'http://example.com' - } - } - datadogCore.storage.getStore.returns({}) - - httpClientRequestStart.publish(ctx) - - sinon.assert.notCalled(waf.run) - }) - - it('should not analyze ssrf if no url', () => { - const ctx = { - args: {} - } - datadogCore.storage.getStore.returns({}) - - httpClientRequestStart.publish(ctx) - - sinon.assert.notCalled(waf.run) - }) - }) - - describe('handleUncaughtExceptionMonitor', () => { - it('should not break with infinite loop of cause', () => { - const err = new Error() - err.cause = err - - handleUncaughtExceptionMonitor(err) - }) - }) -}) diff --git a/packages/dd-trace/test/appsec/rasp/index.spec.js b/packages/dd-trace/test/appsec/rasp/index.spec.js new file mode 100644 index 00000000000..0dae9c527e5 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/index.spec.js @@ -0,0 +1,34 @@ +'use strict' + +const rasp = require('../../../src/appsec/rasp') +const { handleUncaughtExceptionMonitor } = require('../../../src/appsec/rasp') + +describe('RASP', () => { + beforeEach(() => { + const config = { + appsec: { + stackTrace: { + enabled: true, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + + rasp.enable(config) + }) + + afterEach(() => { + sinon.restore() + rasp.disable() + }) + + describe('handleUncaughtExceptionMonitor', () => { + it('should not break with infinite loop of cause', () => { + const err = new Error() + err.cause = err + + handleUncaughtExceptionMonitor(err) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/resources/postgress-app/index.js b/packages/dd-trace/test/appsec/rasp/resources/postgress-app/index.js new file mode 100644 index 00000000000..e60041bfe7c --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/resources/postgress-app/index.js @@ -0,0 +1,55 @@ +'use strict' + +const tracer = require('dd-trace') +tracer.init({ + flushInterval: 0 +}) + +const express = require('express') +const pg = require('pg') + +const connectionData = { + host: '127.0.0.1', + user: 'postgres', + password: 'postgres', + database: 'postgres', + application_name: 'test' +} + +const pool = new pg.Pool(connectionData) + +const app = express() +const port = process.env.APP_PORT || 3000 + +app.get('/sqli/client/uncaught-promise', async (req, res) => { + const client = new pg.Client(connectionData) + await client.connect() + + try { + await client.query(`SELECT * FROM users WHERE id = '${req.query.param}'`) + } finally { + client.end() + } + + res.end('OK') +}) + +app.get('/sqli/client/uncaught-query-error', async (req, res) => { + const client = new pg.Client(connectionData) + await client.connect() + const query = new pg.Query(`SELECT * FROM users WHERE id = '${req.query.param}'`) + client.query(query) + + query.on('end', () => { + res.end('OK') + }) +}) + +app.get('/sqli/pool/uncaught-promise', async (req, res) => { + await pool.query(`SELECT * FROM users WHERE id = '${req.query.param}'`) + res.end('OK') +}) + +app.listen(port, () => { + process.send({ port }) +}) diff --git a/packages/dd-trace/test/appsec/rasp_rules.json b/packages/dd-trace/test/appsec/rasp/resources/rasp_rules.json similarity index 53% rename from packages/dd-trace/test/appsec/rasp_rules.json rename to packages/dd-trace/test/appsec/rasp/resources/rasp_rules.json index 28930412b9a..778e4821e73 100644 --- a/packages/dd-trace/test/appsec/rasp_rules.json +++ b/packages/dd-trace/test/appsec/rasp/resources/rasp_rules.json @@ -56,6 +56,57 @@ "block", "stack_trace" ] + }, + { + "id": "rasp-sqli-rule-id-2", + "name": "SQL injection exploit", + "tags": { + "type": "sql_injection", + "category": "vulnerability_trigger", + "cwe": "89", + "capec": "1000/152/248/66", + "confidence": "0", + "module": "rasp" + }, + "conditions": [ + { + "parameters": { + "resource": [ + { + "address": "server.db.statement" + } + ], + "params": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + } + ], + "db_type": [ + { + "address": "server.db.system" + } + ] + }, + "operator": "sqli_detector" + } + ], + "transformers": [], + "on_match": [ + "block", + "stack_trace" + ] } ] } diff --git a/packages/dd-trace/test/appsec/rasp/sql_injection.integration.pg.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/sql_injection.integration.pg.plugin.spec.js new file mode 100644 index 00000000000..c4b92b3a2f3 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/sql_injection.integration.pg.plugin.spec.js @@ -0,0 +1,107 @@ +'use strict' + +const { createSandbox, FakeAgent, spawnProc } = require('../../../../../integration-tests/helpers') +const getPort = require('get-port') +const path = require('path') +const Axios = require('axios') +const { assert } = require('chai') + +// These test are here and not in the integration tests +// because they require postgres instance +describe('RASP - sql_injection - integration', () => { + let axios, sandbox, cwd, appPort, appFile, agent, proc + + before(async function () { + this.timeout(60000) + sandbox = await createSandbox( + ['express', 'pg'], + false, + [path.join(__dirname, 'resources')]) + + appPort = await getPort() + cwd = sandbox.folder + appFile = path.join(cwd, 'resources', 'postgress-app', 'index.js') + + axios = Axios.create({ + baseURL: `http://localhost:${appPort}` + }) + }) + + after(async function () { + this.timeout(60000) + await sandbox.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + proc = await spawnProc(appFile, { + cwd, + env: { + DD_TRACE_AGENT_PORT: agent.port, + APP_PORT: appPort, + DD_APPSEC_ENABLED: true, + DD_APPSEC_RASP_ENABLED: true, + DD_APPSEC_RULES: path.join(cwd, 'resources', 'rasp_rules.json') + } + }) + }) + + afterEach(async () => { + proc.kill() + await agent.stop() + }) + + it('should block using pg.Client and unhandled promise', async () => { + try { + await axios.get('/sqli/client/uncaught-promise?param=\' OR 1 = 1 --') + } catch (e) { + if (!e.response) { + throw e + } + + assert.strictEqual(e.response.status, 403) + return await agent.assertMessageReceived(({ headers, payload }) => { + assert.property(payload[0][0].meta, '_dd.appsec.json') + assert.include(payload[0][0].meta['_dd.appsec.json'], '"rasp-sqli-rule-id-2"') + }) + } + + throw new Error('Request should be blocked') + }) + + it('should block using pg.Client and unhandled query object', async () => { + try { + await axios.get('/sqli/client/uncaught-query-error?param=\' OR 1 = 1 --') + } catch (e) { + if (!e.response) { + throw e + } + + assert.strictEqual(e.response.status, 403) + return await agent.assertMessageReceived(({ headers, payload }) => { + assert.property(payload[0][0].meta, '_dd.appsec.json') + assert.include(payload[0][0].meta['_dd.appsec.json'], '"rasp-sqli-rule-id-2"') + }) + } + + throw new Error('Request should be blocked') + }) + + it('should block using pg.Pool and unhandled promise', async () => { + try { + await axios.get('/sqli/pool/uncaught-promise?param=\' OR 1 = 1 --') + } catch (e) { + if (!e.response) { + throw e + } + + assert.strictEqual(e.response.status, 403) + return await agent.assertMessageReceived(({ headers, payload }) => { + assert.property(payload[0][0].meta, '_dd.appsec.json') + assert.include(payload[0][0].meta['_dd.appsec.json'], '"rasp-sqli-rule-id-2"') + }) + } + + throw new Error('Request should be blocked') + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/sql_injection.pg.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/sql_injection.pg.plugin.spec.js new file mode 100644 index 00000000000..8f05158c22d --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/sql_injection.pg.plugin.spec.js @@ -0,0 +1,286 @@ +'use strict' + +const agent = require('../../plugins/agent') +const appsec = require('../../../src/appsec') +const { wafRunFinished } = require('../../../src/appsec/channels') +const addresses = require('../../../src/appsec/addresses') +const Config = require('../../../src/config') +const path = require('path') +const Axios = require('axios') +const { assert } = require('chai') +const { checkRaspExecutedAndNotThreat, checkRaspExecutedAndHasThreat } = require('./utils') + +describe('RASP - sql_injection', () => { + withVersions('pg', 'express', expressVersion => { + withVersions('pg', 'pg', pgVersion => { + describe('sql injection with pg', () => { + const connectionData = { + host: '127.0.0.1', + user: 'postgres', + password: 'postgres', + database: 'postgres', + application_name: 'test' + } + let server, axios, app, pg + + before(() => { + return agent.load(['express', 'http', 'pg'], { client: false }) + }) + + before(done => { + const express = require(`../../../../../versions/express@${expressVersion}`).get() + pg = require(`../../../../../versions/pg@${pgVersion}`).get() + const expressApp = express() + + expressApp.get('/', (req, res) => { + app(req, res) + }) + + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'resources', 'rasp_rules.json'), + rasp: { enabled: true } + } + })) + + server = expressApp.listen(0, () => { + const port = server.address().port + axios = Axios.create({ + baseURL: `http://localhost:${port}` + }) + done() + }) + }) + + after(() => { + appsec.disable() + server.close() + return agent.close({ ritmReset: false }) + }) + + describe('Test using pg.Client', () => { + let client + + beforeEach((done) => { + client = new pg.Client(connectionData) + client.connect(err => done(err)) + }) + + afterEach(() => { + client.end() + }) + + it('Should not detect threat', async () => { + app = (req, res) => { + client.query('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + client.query(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return await checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + + it('Should block query with promise', async () => { + app = async (req, res) => { + try { + await client.query(`SELECT * FROM users WHERE id = '${req.query.param}'`) + } catch (err) { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + } + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + }) + + describe('Test using pg.Pool', () => { + let pool + + beforeEach(() => { + pool = new pg.Pool(connectionData) + }) + + it('Should not detect threat', async () => { + app = (req, res) => { + pool.query('SELECT ' + req.query.param, (err) => { + if (err) { + res.statusCode = 500 + } + + res.end() + }) + } + + axios.get('/?param=1') + + await checkRaspExecutedAndNotThreat(agent) + }) + + it('Should block query with callback', async () => { + app = (req, res) => { + pool.query(`SELECT * FROM users WHERE id='${req.query.param}'`, (err) => { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + }) + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + + it('Should block query with promise', async () => { + app = async (req, res) => { + try { + await pool.query(`SELECT * FROM users WHERE id = '${req.query.param}'`) + } catch (err) { + if (err?.name === 'DatadogRaspAbortError') { + res.statusCode = 500 + } + res.end() + } + } + + try { + await axios.get('/?param=\' OR 1 = 1 --') + } catch (e) { + return checkRaspExecutedAndHasThreat(agent, 'rasp-sqli-rule-id-2') + } + + assert.fail('Request should be blocked') + }) + + describe('double calls', () => { + const WAFContextWrapper = require('../../../src/appsec/waf/waf_context_wrapper') + let run + + beforeEach(() => { + run = sinon.spy(WAFContextWrapper.prototype, 'run') + }) + + afterEach(() => { + sinon.restore() + }) + + async function runQueryAndIgnoreError (query) { + try { + await pool.query(query) + } catch (err) { + // do nothing + } + } + + it('should call to waf only once for sql injection using pg Pool', async () => { + app = async (req, res) => { + await runQueryAndIgnoreError('SELECT 1') + res.end() + } + + await axios.get('/') + + assert.equal(run.args.filter(arg => arg[1] === 'sql_injection').length, 1) + }) + + it('should call to waf twice for sql injection with two different queries in pg Pool', async () => { + app = async (req, res) => { + await runQueryAndIgnoreError('SELECT 1') + await runQueryAndIgnoreError('SELECT 2') + + res.end() + } + + await axios.get('/') + + assert.equal(run.args.filter(arg => arg[1] === 'sql_injection').length, 2) + }) + + it('should call to waf twice for sql injection and same query when input address is updated', async () => { + app = async (req, res) => { + await runQueryAndIgnoreError('SELECT 1') + + wafRunFinished.publish({ + payload: { + persistent: { + [addresses.HTTP_INCOMING_URL]: 'test' + } + } + }) + + await runQueryAndIgnoreError('SELECT 1') + + res.end() + } + + await axios.get('/') + + assert.equal(run.args.filter(arg => arg[1] === 'sql_injection').length, 2) + }) + + it('should call to waf once for sql injection and same query when input address is updated', async () => { + app = async (req, res) => { + await runQueryAndIgnoreError('SELECT 1') + + wafRunFinished.publish({ + payload: { + persistent: { + 'not-an-input': 'test' + } + } + }) + + await runQueryAndIgnoreError('SELECT 1') + + res.end() + } + + await axios.get('/') + + assert.equal(run.args.filter(arg => arg[1] === 'sql_injection').length, 1) + }) + }) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js b/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js new file mode 100644 index 00000000000..5467f7ef150 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/sql_injection.spec.js @@ -0,0 +1,116 @@ +'use strict' + +const { pgQueryStart } = require('../../../src/appsec/channels') +const addresses = require('../../../src/appsec/addresses') +const proxyquire = require('proxyquire') + +describe('RASP - sql_injection', () => { + let waf, datadogCore, sqli + + beforeEach(() => { + datadogCore = { + storage: { + getStore: sinon.stub() + } + } + + waf = { + run: sinon.stub() + } + + sqli = proxyquire('../../../src/appsec/rasp/sql_injection', { + '../../../../datadog-core': datadogCore, + '../waf': waf + }) + + const config = { + appsec: { + stackTrace: { + enabled: true, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + + sqli.enable(config) + }) + + afterEach(() => { + sinon.restore() + sqli.disable() + }) + + describe('analyzePgSqlInjection', () => { + it('should analyze sql injection', () => { + const ctx = { + query: { + text: 'SELECT 1' + } + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + pgQueryStart.publish(ctx) + + const persistent = { + [addresses.DB_STATEMENT]: 'SELECT 1', + [addresses.DB_SYSTEM]: 'postgresql' + } + sinon.assert.calledOnceWithExactly(waf.run, { persistent }, req, 'sql_injection') + }) + + it('should not analyze sql injection if rasp is disabled', () => { + sqli.disable() + + const ctx = { + query: { + text: 'SELECT 1' + } + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + pgQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no store', () => { + const ctx = { + query: { + text: 'SELECT 1' + } + } + datadogCore.storage.getStore.returns(undefined) + + pgQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no req', () => { + const ctx = { + query: { + text: 'SELECT 1' + } + } + datadogCore.storage.getStore.returns({}) + + pgQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze sql injection if no query', () => { + const ctx = { + query: {} + } + datadogCore.storage.getStore.returns({}) + + pgQueryStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js similarity index 74% rename from packages/dd-trace/test/appsec/rasp.express.plugin.spec.js rename to packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js index 75924c88283..26dc25219f4 100644 --- a/packages/dd-trace/test/appsec/rasp.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/rasp/ssrf.express.plugin.spec.js @@ -1,26 +1,16 @@ 'use strict' const Axios = require('axios') -const agent = require('../plugins/agent') -const appsec = require('../../src/appsec') -const Config = require('../../src/config') +const agent = require('../../plugins/agent') +const appsec = require('../../../src/appsec') +const Config = require('../../../src/config') const path = require('path') const { assert } = require('chai') +const { checkRaspExecutedAndNotThreat, checkRaspExecutedAndHasThreat } = require('./utils') function noop () {} -describe('RASP', () => { - function getWebSpan (traces) { - for (const trace of traces) { - for (const span of trace) { - if (span.type === 'web') { - return span - } - } - } - throw new Error('web span not found') - } - +describe('RASP - ssrf', () => { withVersions('express', 'express', expressVersion => { let app, server, axios @@ -29,7 +19,7 @@ describe('RASP', () => { }) before((done) => { - const express = require(`../../../../versions/express@${expressVersion}`).get() + const express = require(`../../../../../versions/express@${expressVersion}`).get() const expressApp = express() expressApp.get('/', (req, res) => { @@ -39,7 +29,7 @@ describe('RASP', () => { appsec.enable(new Config({ appsec: { enabled: true, - rules: path.join(__dirname, 'rasp_rules.json'), + rules: path.join(__dirname, 'resources', 'rasp_rules.json'), rasp: { enabled: true } } })) @@ -67,15 +57,8 @@ describe('RASP', () => { if (!e.response) { throw e } - return await agent.use((traces) => { - const span = getWebSpan(traces) - assert.property(span.meta, '_dd.appsec.json') - assert(span.meta['_dd.appsec.json'].includes('rasp-ssrf-rule-id-1')) - assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) - assert(span.metrics['_dd.appsec.rasp.duration'] > 0) - assert(span.metrics['_dd.appsec.rasp.duration_ext'] > 0) - assert.property(span.meta_struct, '_dd.stack') - }) + + return checkRaspExecutedAndHasThreat(agent, 'rasp-ssrf-rule-id-1') } assert.fail('Request should be blocked') @@ -92,11 +75,7 @@ describe('RASP', () => { axios.get('/?host=www.datadoghq.com') - await agent.use((traces) => { - const span = getWebSpan(traces) - assert.notProperty(span.meta, '_dd.appsec.json') - assert.notProperty(span.meta_struct || {}, '_dd.stack') - }) + return checkRaspExecutedAndNotThreat(agent) }) it('Should detect threat doing a GET request', async () => { @@ -137,7 +116,7 @@ describe('RASP', () => { let axiosToTest beforeEach(() => { - axiosToTest = require(`../../../../versions/axios@${axiosVersion}`).get() + axiosToTest = require(`../../../../../versions/axios@${axiosVersion}`).get() }) it('Should not detect threat', async () => { @@ -148,10 +127,7 @@ describe('RASP', () => { axios.get('/?host=www.datadoghq.com') - await agent.use((traces) => { - const span = getWebSpan(traces) - assert.notProperty(span.meta, '_dd.appsec.json') - }) + return checkRaspExecutedAndNotThreat(agent) }) it('Should detect threat doing a GET request', async () => { @@ -209,7 +185,7 @@ describe('RASP', () => { appsec.enable(new Config({ appsec: { enabled: true, - rules: path.join(__dirname, 'rasp_rules.json'), + rules: path.join(__dirname, 'resources', 'rasp_rules.json'), rasp: { enabled: true } } })) @@ -260,15 +236,7 @@ describe('RASP', () => { assert.equal(response.status, 200) - await agent.use((traces) => { - const span = getWebSpan(traces) - assert.property(span.meta, '_dd.appsec.json') - assert(span.meta['_dd.appsec.json'].includes('rasp-ssrf-rule-id-1')) - assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) - assert(span.metrics['_dd.appsec.rasp.duration'] > 0) - assert(span.metrics['_dd.appsec.rasp.duration_ext'] > 0) - assert.property(span.meta_struct, '_dd.stack') - }) + return checkRaspExecutedAndHasThreat(agent, 'rasp-ssrf-rule-id-1') }) }) }) diff --git a/packages/dd-trace/test/appsec/rasp/ssrf.spec.js b/packages/dd-trace/test/appsec/rasp/ssrf.spec.js new file mode 100644 index 00000000000..c40867ea254 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/ssrf.spec.js @@ -0,0 +1,112 @@ +'use strict' + +const proxyquire = require('proxyquire') +const { httpClientRequestStart } = require('../../../src/appsec/channels') +const addresses = require('../../../src/appsec/addresses') + +describe('RASP - ssrf.js', () => { + let waf, datadogCore, ssrf + + beforeEach(() => { + datadogCore = { + storage: { + getStore: sinon.stub() + } + } + + waf = { + run: sinon.stub() + } + + ssrf = proxyquire('../../../src/appsec/rasp/ssrf', { + '../../../../datadog-core': datadogCore, + '../waf': waf + }) + + const config = { + appsec: { + stackTrace: { + enabled: true, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + + ssrf.enable(config) + }) + + afterEach(() => { + sinon.restore() + ssrf.disable() + }) + + describe('analyzeSsrf', () => { + it('should analyze ssrf', () => { + const ctx = { + args: { + uri: 'http://example.com' + } + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + httpClientRequestStart.publish(ctx) + + const persistent = { [addresses.HTTP_OUTGOING_URL]: 'http://example.com' } + sinon.assert.calledOnceWithExactly(waf.run, { persistent }, req, 'ssrf') + }) + + it('should not analyze ssrf if rasp is disabled', () => { + ssrf.disable() + const ctx = { + args: { + uri: 'http://example.com' + } + } + const req = {} + datadogCore.storage.getStore.returns({ req }) + + httpClientRequestStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze ssrf if no store', () => { + const ctx = { + args: { + uri: 'http://example.com' + } + } + datadogCore.storage.getStore.returns(undefined) + + httpClientRequestStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze ssrf if no req', () => { + const ctx = { + args: { + uri: 'http://example.com' + } + } + datadogCore.storage.getStore.returns({}) + + httpClientRequestStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + + it('should not analyze ssrf if no url', () => { + const ctx = { + args: {} + } + datadogCore.storage.getStore.returns({}) + + httpClientRequestStart.publish(ctx) + + sinon.assert.notCalled(waf.run) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/rasp/utils.js b/packages/dd-trace/test/appsec/rasp/utils.js new file mode 100644 index 00000000000..e9353d5d815 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/utils.js @@ -0,0 +1,41 @@ +'use strict' + +const { assert } = require('chai') + +function getWebSpan (traces) { + for (const trace of traces) { + for (const span of trace) { + if (span.type === 'web') { + return span + } + } + } + throw new Error('web span not found') +} + +function checkRaspExecutedAndNotThreat (agent) { + return agent.use((traces) => { + const span = getWebSpan(traces) + assert.notProperty(span.meta, '_dd.appsec.json') + assert.notProperty(span.meta_struct || {}, '_dd.stack') + assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) + }) +} + +function checkRaspExecutedAndHasThreat (agent, ruleId) { + return agent.use((traces) => { + const span = getWebSpan(traces) + assert.property(span.meta, '_dd.appsec.json') + assert(span.meta['_dd.appsec.json'].includes(ruleId)) + assert.equal(span.metrics['_dd.appsec.rasp.rule.eval'], 1) + assert(span.metrics['_dd.appsec.rasp.duration'] > 0) + assert(span.metrics['_dd.appsec.rasp.duration_ext'] > 0) + assert.property(span.meta_struct, '_dd.stack') + }) +} + +module.exports = { + getWebSpan, + checkRaspExecutedAndNotThreat, + checkRaspExecutedAndHasThreat +} diff --git a/packages/dd-trace/test/appsec/rasp/utils.spec.js b/packages/dd-trace/test/appsec/rasp/utils.spec.js new file mode 100644 index 00000000000..255f498a117 --- /dev/null +++ b/packages/dd-trace/test/appsec/rasp/utils.spec.js @@ -0,0 +1,79 @@ +'use strict' + +const proxyquire = require('proxyquire') + +describe('RASP - utils.js', () => { + let web, utils, stackTrace, config + + beforeEach(() => { + web = { + root: sinon.stub() + } + + stackTrace = { + reportStackTrace: sinon.stub() + } + + utils = proxyquire('../../../src/appsec/rasp/utils', { + '../../plugins/util/web': web, + '../stack_trace': stackTrace + }) + + config = { + appsec: { + stackTrace: { + enabled: true, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + }) + + describe('handleResult', () => { + it('should report stack trace when generate_stack action is present in waf result', () => { + const req = {} + const rootSpan = {} + const stackId = 'test_stack_id' + const result = { + generate_stack: { + stack_id: stackId + } + } + + web.root.returns(rootSpan) + + utils.handleResult(result, req, undefined, undefined, config) + sinon.assert.calledOnceWithExactly(stackTrace.reportStackTrace, rootSpan, stackId, 42, 2) + }) + + it('should not report stack trace when no action is present in waf result', () => { + const req = {} + const result = {} + + utils.handleResult(result, req, undefined, undefined, config) + sinon.assert.notCalled(stackTrace.reportStackTrace) + }) + + it('should not report stack trace when stack trace reporting is disabled', () => { + const req = {} + const result = { + generate_stack: { + stack_id: 'stackId' + } + } + const config = { + appsec: { + stackTrace: { + enabled: false, + maxStackTraces: 2, + maxDepth: 42 + } + } + } + + utils.handleResult(result, req, undefined, undefined, config) + sinon.assert.notCalled(stackTrace.reportStackTrace) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/remote_config/index.spec.js b/packages/dd-trace/test/appsec/remote_config/index.spec.js index d954e41e15b..fd923c9a92b 100644 --- a/packages/dd-trace/test/appsec/remote_config/index.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/index.spec.js @@ -22,7 +22,9 @@ describe('Remote Config index', () => { rc = { updateCapabilities: sinon.spy(), on: sinon.spy(), - off: sinon.spy() + off: sinon.spy(), + setProductHandler: sinon.spy(), + removeProductHandler: sinon.spy() } RemoteConfigManager = sinon.stub().returns(rc) @@ -57,8 +59,8 @@ describe('Remote Config index', () => { expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) expect(rc.updateCapabilities).to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.on).to.have.been.calledWith('ASM_FEATURES') - expect(rc.on.firstCall.args[1]).to.be.a('function') + expect(rc.setProductHandler).to.have.been.calledWith('ASM_FEATURES') + expect(rc.setProductHandler.firstCall.args[1]).to.be.a('function') }) it('should listen to remote config when appsec is explicitly configured as enabled=true', () => { @@ -68,8 +70,8 @@ describe('Remote Config index', () => { expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) expect(rc.updateCapabilities).to.not.have.been.calledWith('ASM_ACTIVATION') - expect(rc.on).to.have.been.calledOnceWith('ASM_FEATURES') - expect(rc.on.firstCall.args[1]).to.be.a('function') + expect(rc.setProductHandler).to.have.been.calledOnceWith('ASM_FEATURES') + expect(rc.setProductHandler.firstCall.args[1]).to.be.a('function') }) it('should not listen to remote config when appsec is explicitly configured as enabled=false', () => { @@ -79,7 +81,7 @@ describe('Remote Config index', () => { expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) expect(rc.updateCapabilities).to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.on).to.not.have.been.called + expect(rc.setProductHandler).to.not.have.been.called }) it('should listen ASM_API_SECURITY_SAMPLE_RATE when appsec.enabled=undefined and appSecurity.enabled=true', () => { @@ -112,7 +114,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config, appsec) - listener = rc.on.firstCall.args[1] + listener = rc.setProductHandler.firstCall.args[1] }) it('should enable appsec when listener is called with apply and enabled', () => { @@ -157,7 +159,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) - listener = rc.on.firstCall.args[1] + listener = rc.setProductHandler.firstCall.args[1] }) it('should update apiSecuritySampler config', () => { @@ -226,7 +228,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) - listener = rc.on.firstCall.args[1] + listener = rc.setProductHandler.firstCall.args[1] }) it('should update config apiSecurity.requestSampling property value', () => { @@ -249,7 +251,7 @@ describe('Remote Config index', () => { remoteConfig.enableWafUpdate(config.appsec) expect(rc.updateCapabilities).to.not.have.been.called - expect(rc.on).to.not.have.been.called + expect(rc.setProductHandler).to.not.have.been.called }) it('should not enable when custom appsec rules are provided', () => { @@ -258,11 +260,11 @@ describe('Remote Config index', () => { remoteConfig.enableWafUpdate(config.appsec) expect(rc.updateCapabilities).to.not.have.been.calledWith('ASM_ACTIVATION') - expect(rc.on).to.have.been.called + expect(rc.setProductHandler).to.have.been.called }) it('should enable when using default rules', () => { - config.appsec = { enabled: true, rules: null } + config.appsec = { enabled: true, rules: null, rasp: { enabled: true } } remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) @@ -284,15 +286,19 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) - expect(rc.on).to.have.been.calledWith('ASM_DATA') - expect(rc.on).to.have.been.calledWith('ASM_DD') - expect(rc.on).to.have.been.calledWith('ASM') + expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') + expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') + expect(rc.setProductHandler).to.have.been.calledWith('ASM') expect(rc.on).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec is manually enabled', () => { - config.appsec = { enabled: true } + config.appsec = { enabled: true, rasp: { enabled: true } } remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) @@ -314,15 +320,50 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) - expect(rc.on).to.have.been.calledWith('ASM_DATA') - expect(rc.on).to.have.been.calledWith('ASM_DD') - expect(rc.on).to.have.been.calledWith('ASM') + expect(rc.setProductHandler).to.have.been.calledWith('ASM_DATA') + expect(rc.setProductHandler).to.have.been.calledWith('ASM_DD') + expect(rc.setProductHandler).to.have.been.calledWith('ASM') expect(rc.on).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec enabled is not defined', () => { - config.appsec = {} + config.appsec = { rasp: { enabled: true } } + remoteConfig.enable(config) + remoteConfig.enableWafUpdate(config.appsec) + + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, true) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, true) + }) + + it('should not activate rasp capabilities if rasp is disabled', () => { + config.appsec = { rasp: { enabled: false } } remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) @@ -346,6 +387,10 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) + expect(rc.updateCapabilities) + .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SSRF) + expect(rc.updateCapabilities) + .to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_RASP_SQLI) }) }) @@ -373,10 +418,14 @@ describe('Remote Config index', () => { .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SSRF, false) + expect(rc.updateCapabilities) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_RASP_SQLI, false) - expect(rc.off).to.have.been.calledWith('ASM_DATA') - expect(rc.off).to.have.been.calledWith('ASM_DD') - expect(rc.off).to.have.been.calledWith('ASM') + expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DATA') + expect(rc.removeProductHandler).to.have.been.calledWith('ASM_DD') + expect(rc.removeProductHandler).to.have.been.calledWith('ASM') expect(rc.off).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) }) diff --git a/packages/dd-trace/test/appsec/remote_config/manager.spec.js b/packages/dd-trace/test/appsec/remote_config/manager.spec.js index 8e5fdc6b516..f9aea97ce08 100644 --- a/packages/dd-trace/test/appsec/remote_config/manager.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/manager.spec.js @@ -149,30 +149,31 @@ describe('RemoteConfigManager', () => { }) }) - describe('on/off', () => { + describe('setProductHandler/removeProductHandler', () => { it('should update the product list and autostart or autostop', () => { - expect(rc.on('ASM_FEATURES', noop)).to.equal(rc) + expect(rc.scheduler.start).to.not.have.been.called + + rc.setProductHandler('ASM_FEATURES', noop) expect(rc.state.client.products).to.deep.equal(['ASM_FEATURES']) - expect(rc.scheduler.start).to.have.been.calledOnce + expect(rc.scheduler.start).to.have.been.called - rc.on('ASM_DATA', noop) - rc.on('ASM_DD', noop) + rc.setProductHandler('ASM_DATA', noop) + rc.setProductHandler('ASM_DD', noop) expect(rc.state.client.products).to.deep.equal(['ASM_FEATURES', 'ASM_DATA', 'ASM_DD']) - expect(rc.scheduler.start).to.have.been.calledThrice - expect(rc.off('ASM_FEATURES', noop)).to.equal(rc) + rc.removeProductHandler('ASM_FEATURES') expect(rc.state.client.products).to.deep.equal(['ASM_DATA', 'ASM_DD']) - rc.off('ASM_DATA', noop) + rc.removeProductHandler('ASM_DATA') expect(rc.scheduler.stop).to.not.have.been.called - rc.off('ASM_DD', noop) + rc.removeProductHandler('ASM_DD') - expect(rc.scheduler.stop).to.have.been.calledOnce + expect(rc.scheduler.stop).to.have.been.called expect(rc.state.client.products).to.be.empty }) }) @@ -552,63 +553,101 @@ describe('RemoteConfigManager', () => { }) describe('dispatch', () => { - beforeEach(() => { - sinon.stub(rc, 'emit') - }) - - it('should call emit for each config, catch errors, and update the state', () => { - rc.emit.onFirstCall().returns(true) - rc.emit.onSecondCall().throws(new Error('Unable to apply config')) - rc.emit.onThirdCall().returns(true) - - const list = [ - { - id: 'asm_features', - path: 'datadog/42/ASM_FEATURES/confId/config', - product: 'ASM_FEATURES', - apply_state: UNACKNOWLEDGED, - apply_error: '', - file: { asm: { enabled: true } } - }, - { - id: 'asm_data', - path: 'datadog/42/ASM_DATA/confId/config', - product: 'ASM_DATA', - apply_state: UNACKNOWLEDGED, - apply_error: '', - file: { data: [1, 2, 3] } - }, - { - id: 'asm_dd', - path: 'datadog/42/ASM_DD/confId/config', - product: 'ASM_DD', + it('should call registered handler for each config, catch errors, and update the state', (done) => { + const syncGoodNonAckHandler = sinon.spy() + const syncBadNonAckHandler = sinon.spy(() => { throw new Error('sync fn') }) + const asyncGoodHandler = sinon.spy(async () => {}) + const asyncBadHandler = sinon.spy(async () => { throw new Error('async fn') }) + const syncGoodAckHandler = sinon.spy((action, conf, id, ack) => { ack() }) + const syncBadAckHandler = sinon.spy((action, conf, id, ack) => { ack(new Error('sync ack fn')) }) + const asyncGoodAckHandler = sinon.spy((action, conf, id, ack) => { setImmediate(ack) }) + const asyncBadAckHandler = sinon.spy((action, conf, id, ack) => { + setImmediate(ack.bind(null, new Error('async ack fn'))) + }) + const unackHandler = sinon.spy((action, conf, id, ack) => {}) + + rc.setProductHandler('PRODUCT_0', syncGoodNonAckHandler) + rc.setProductHandler('PRODUCT_1', syncBadNonAckHandler) + rc.setProductHandler('PRODUCT_2', asyncGoodHandler) + rc.setProductHandler('PRODUCT_3', asyncBadHandler) + rc.setProductHandler('PRODUCT_4', syncGoodAckHandler) + rc.setProductHandler('PRODUCT_5', syncBadAckHandler) + rc.setProductHandler('PRODUCT_6', asyncGoodAckHandler) + rc.setProductHandler('PRODUCT_7', asyncBadAckHandler) + rc.setProductHandler('PRODUCT_8', unackHandler) + + const list = [] + for (let i = 0; i < 9; i++) { + list[i] = { + id: `id_${i}`, + path: `datadog/42/PRODUCT_${i}/confId/config`, + product: `PRODUCT_${i}`, apply_state: UNACKNOWLEDGED, apply_error: '', - file: { rules: [4, 5, 6] } + file: { index: i } } - ] + } rc.dispatch(list, 'apply') - expect(rc.emit).to.have.been.calledThrice - expect(rc.emit.firstCall).to.have.been - .calledWithExactly('ASM_FEATURES', 'apply', { asm: { enabled: true } }, 'asm_features') - expect(rc.emit.secondCall).to.have.been.calledWithExactly('ASM_DATA', 'apply', { data: [1, 2, 3] }, 'asm_data') - expect(rc.emit.thirdCall).to.have.been.calledWithExactly('ASM_DD', 'apply', { rules: [4, 5, 6] }, 'asm_dd') + expect(syncGoodNonAckHandler).to.have.been.calledOnceWithExactly('apply', list[0].file, list[0].id) + expect(syncBadNonAckHandler).to.have.been.calledOnceWithExactly('apply', list[1].file, list[1].id) + expect(asyncGoodHandler).to.have.been.calledOnceWithExactly('apply', list[2].file, list[2].id) + expect(asyncBadHandler).to.have.been.calledOnceWithExactly('apply', list[3].file, list[3].id) + assertAsyncHandlerCallArguments(syncGoodAckHandler, 'apply', list[4].file, list[4].id) + assertAsyncHandlerCallArguments(syncBadAckHandler, 'apply', list[5].file, list[5].id) + assertAsyncHandlerCallArguments(asyncGoodAckHandler, 'apply', list[6].file, list[6].id) + assertAsyncHandlerCallArguments(asyncBadAckHandler, 'apply', list[7].file, list[7].id) + assertAsyncHandlerCallArguments(unackHandler, 'apply', list[8].file, list[8].id) expect(list[0].apply_state).to.equal(ACKNOWLEDGED) expect(list[0].apply_error).to.equal('') expect(list[1].apply_state).to.equal(ERROR) - expect(list[1].apply_error).to.equal('Error: Unable to apply config') - expect(list[2].apply_state).to.equal(ACKNOWLEDGED) + expect(list[1].apply_error).to.equal('Error: sync fn') + expect(list[2].apply_state).to.equal(UNACKNOWLEDGED) expect(list[2].apply_error).to.equal('') + expect(list[3].apply_state).to.equal(UNACKNOWLEDGED) + expect(list[3].apply_error).to.equal('') + expect(list[4].apply_state).to.equal(ACKNOWLEDGED) + expect(list[4].apply_error).to.equal('') + expect(list[5].apply_state).to.equal(ERROR) + expect(list[5].apply_error).to.equal('Error: sync ack fn') + expect(list[6].apply_state).to.equal(UNACKNOWLEDGED) + expect(list[6].apply_error).to.equal('') + expect(list[7].apply_state).to.equal(UNACKNOWLEDGED) + expect(list[7].apply_error).to.equal('') + expect(list[8].apply_state).to.equal(UNACKNOWLEDGED) + expect(list[8].apply_error).to.equal('') + + for (let i = 0; i < list.length; i++) { + expect(rc.appliedConfigs.get(`datadog/42/PRODUCT_${i}/confId/config`)).to.equal(list[i]) + } + + setImmediate(() => { + expect(list[2].apply_state).to.equal(ACKNOWLEDGED) + expect(list[2].apply_error).to.equal('') + expect(list[3].apply_state).to.equal(ERROR) + expect(list[3].apply_error).to.equal('Error: async fn') + expect(list[6].apply_state).to.equal(ACKNOWLEDGED) + expect(list[6].apply_error).to.equal('') + expect(list[7].apply_state).to.equal(ERROR) + expect(list[7].apply_error).to.equal('Error: async ack fn') + expect(list[8].apply_state).to.equal(UNACKNOWLEDGED) + expect(list[8].apply_error).to.equal('') + done() + }) - expect(rc.appliedConfigs.get('datadog/42/ASM_FEATURES/confId/config')).to.equal(list[0]) - expect(rc.appliedConfigs.get('datadog/42/ASM_DATA/confId/config')).to.equal(list[1]) - expect(rc.appliedConfigs.get('datadog/42/ASM_DD/confId/config')).to.equal(list[2]) + function assertAsyncHandlerCallArguments (handler, ...expectedArgs) { + expect(handler).to.have.been.calledOnceWith(...expectedArgs) + expect(handler.args[0].length).to.equal(expectedArgs.length + 1) + expect(handler.args[0][handler.args[0].length - 1]).to.be.a('function') + } }) it('should delete config from state when action is unapply', () => { + const handler = sinon.spy() + rc.setProductHandler('ASM_FEATURES', handler) + rc.appliedConfigs.set('datadog/42/ASM_FEATURES/confId/config', { id: 'asm_data', path: 'datadog/42/ASM_FEATURES/confId/config', @@ -620,8 +659,7 @@ describe('RemoteConfigManager', () => { rc.dispatch([rc.appliedConfigs.get('datadog/42/ASM_FEATURES/confId/config')], 'unapply') - expect(rc.emit).to.have.been - .calledOnceWithExactly('ASM_FEATURES', 'unapply', { asm: { enabled: true } }, 'asm_data') + expect(handler).to.have.been.calledOnceWithExactly('unapply', { asm: { enabled: true } }, 'asm_data') expect(rc.appliedConfigs).to.be.empty }) }) diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index 9ca7ec95161..816b3fe89c6 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -7,6 +7,14 @@ const Reporter = require('../../../src/appsec/reporter') const web = require('../../../src/plugins/util/web') describe('WAF Manager', () => { + const knownAddresses = new Set([ + 'server.io.net.url', + 'server.request.headers.no_cookies', + 'server.request.uri.raw', + 'processor.address', + 'server.request.body', + 'waf.context.processor' + ]) let waf, WAFManager let DDWAF let config @@ -26,6 +34,7 @@ describe('WAF Manager', () => { loaded: ['rule_1'], failed: [] } } + DDWAF.prototype.knownAddresses = knownAddresses WAFManager = proxyquire('../../../src/appsec/waf/waf_manager', { '@datadog/native-appsec': { DDWAF } diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js index b422c5eeda6..cffe9718ee2 100644 --- a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -1,14 +1,21 @@ 'use strict' +const proxyquire = require('proxyquire') const WAFContextWrapper = require('../../../src/appsec/waf/waf_context_wrapper') const addresses = require('../../../src/appsec/addresses') +const { wafRunFinished } = require('../../../src/appsec/channels') describe('WAFContextWrapper', () => { + const knownAddresses = new Set([ + addresses.HTTP_INCOMING_QUERY, + addresses.HTTP_INCOMING_GRAPHQL_RESOLVER + ]) + it('Should send HTTP_INCOMING_QUERY only once', () => { const ddwafContext = { run: sinon.stub() } - const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0') + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) const payload = { persistent: { @@ -22,11 +29,11 @@ describe('WAFContextWrapper', () => { expect(ddwafContext.run).to.have.been.calledOnceWithExactly(payload, 1000) }) - it('Should send ephemeral addreses every time', () => { + it('Should send ephemeral addresses every time', () => { const ddwafContext = { run: sinon.stub() } - const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0') + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) const payload = { persistent: { @@ -50,4 +57,101 @@ describe('WAFContextWrapper', () => { } }, 1000) }) + + it('Should ignore run without known addresses', () => { + const ddwafContext = { + run: sinon.stub() + } + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) + + const payload = { + persistent: { + 'persistent-unknown-address': { key: 'value' } + }, + ephemeral: { + 'ephemeral-unknown-address': { key: 'value' } + } + } + + wafContextWrapper.run(payload) + + expect(ddwafContext.run).to.have.not.been.called + }) + + it('should publish the payload in the dc channel', () => { + const ddwafContext = { + run: sinon.stub().returns([]) + } + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) + const payload = { + persistent: { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + }, + ephemeral: { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: { anotherKey: 'anotherValue' } + } + } + const finishedCallback = sinon.stub() + + wafRunFinished.subscribe(finishedCallback) + wafContextWrapper.run(payload) + wafRunFinished.unsubscribe(finishedCallback) + + expect(finishedCallback).to.be.calledOnceWith({ payload }) + }) + + describe('Disposal context check', () => { + let log + let ddwafContext + let wafContextWrapper + + beforeEach(() => { + log = { + warn: sinon.stub() + } + + ddwafContext = { + run: sinon.stub() + } + + const ProxiedWafContextWrapper = proxyquire('../../../src/appsec/waf/waf_context_wrapper', { + '../../log': log + }) + + wafContextWrapper = new ProxiedWafContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0', knownAddresses) + }) + + afterEach(() => { + sinon.restore() + }) + + it('Should call run if context is not disposed', () => { + ddwafContext.disposed = false + + const payload = { + persistent: { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + } + } + + wafContextWrapper.run(payload) + + sinon.assert.calledOnce(ddwafContext.run) + }) + + it('Should not call run and log a warn if context is disposed', () => { + ddwafContext.disposed = true + + const payload = { + persistent: { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + } + } + + wafContextWrapper.run(payload) + + sinon.assert.notCalled(ddwafContext.run) + sinon.assert.calledOnceWithExactly(log.warn, 'Calling run on a disposed context') + }) + }) }) diff --git a/packages/dd-trace/test/appsec/waf/waf_manager.spec.js b/packages/dd-trace/test/appsec/waf/waf_manager.spec.js new file mode 100644 index 00000000000..ebb7d371049 --- /dev/null +++ b/packages/dd-trace/test/appsec/waf/waf_manager.spec.js @@ -0,0 +1,33 @@ +'use strict' + +const proxyquire = require('proxyquire') + +describe('WAFManager', () => { + let WAFManager, WAFContextWrapper, DDWAF + const knownAddresses = new Set() + + beforeEach(() => { + DDWAF = sinon.stub() + DDWAF.prototype.constructor.version = sinon.stub() + DDWAF.prototype.knownAddresses = knownAddresses + DDWAF.prototype.diagnostics = {} + DDWAF.prototype.createContext = sinon.stub() + + WAFContextWrapper = sinon.stub() + WAFManager = proxyquire('../../../src/appsec/waf/waf_manager', { + './waf_context_wrapper': WAFContextWrapper, + '@datadog/native-appsec': { DDWAF } + }) + }) + + describe('getWAFContext', () => { + it('should construct WAFContextWrapper with knownAddresses', () => { + const wafManager = new WAFManager({}, {}) + + wafManager.getWAFContext({}) + + const any = sinon.match.any + sinon.assert.calledOnceWithMatch(WAFContextWrapper, any, any, any, any, knownAddresses) + }) + }) +}) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index cd9ae1d661a..ca4d8b142d3 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -219,6 +219,7 @@ describe('Config', () => { expect(config).to.have.property('reportHostname', false) expect(config).to.have.property('scope', undefined) expect(config).to.have.property('logLevel', 'debug') + expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('traceId128BitGenerationEnabled', true) expect(config).to.have.property('traceId128BitLoggingEnabled', false) expect(config).to.have.property('spanAttributeSchema', 'v0') @@ -294,6 +295,7 @@ describe('Config', () => { { name: 'dogstatsd.hostname', value: '127.0.0.1', origin: 'calculated' }, { name: 'dogstatsd.port', value: '8125', origin: 'default' }, { name: 'dsmEnabled', value: false, origin: 'default' }, + { name: 'dynamicInstrumentationEnabled', value: false, origin: 'default' }, { name: 'env', value: undefined, origin: 'default' }, { name: 'experimental.enableGetRumData', value: false, origin: 'default' }, { name: 'experimental.exporter', value: undefined, origin: 'default' }, @@ -321,6 +323,7 @@ describe('Config', () => { { name: 'isGitUploadEnabled', value: false, origin: 'default' }, { name: 'isIntelligentTestRunnerEnabled', value: false, origin: 'default' }, { name: 'isManualApiEnabled', value: false, origin: 'default' }, + { name: 'ciVisibilityTestSessionName', value: '', origin: 'default' }, { name: 'logInjection', value: false, origin: 'default' }, { name: 'lookup', value: undefined, origin: 'default' }, { name: 'openAiLogsEnabled', value: false, origin: 'default' }, @@ -424,6 +427,7 @@ describe('Config', () => { process.env.DD_RUNTIME_METRICS_ENABLED = 'true' process.env.DD_TRACE_REPORT_HOSTNAME = 'true' process.env.DD_ENV = 'test' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' process.env.DD_TRACE_GLOBAL_TAGS = 'foo:bar,baz:qux' process.env.DD_TRACE_SAMPLE_RATE = '0.5' process.env.DD_TRACE_RATE_LIMIT = '-1' @@ -505,6 +509,7 @@ describe('Config', () => { expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') expect(config).to.have.property('runtimeMetrics', true) expect(config).to.have.property('reportHostname', true) + expect(config).to.have.property('dynamicInstrumentationEnabled', true) expect(config).to.have.property('env', 'test') expect(config).to.have.property('sampleRate', 0.5) expect(config).to.have.property('traceId128BitGenerationEnabled', true) @@ -601,6 +606,7 @@ describe('Config', () => { { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'env_var' }, { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, + { name: 'dynamicInstrumentationEnabled', value: true, origin: 'env_var' }, { name: 'env', value: 'test', origin: 'env_var' }, { name: 'experimental.enableGetRumData', value: true, origin: 'env_var' }, { name: 'experimental.exporter', value: 'log', origin: 'env_var' }, @@ -760,6 +766,7 @@ describe('Config', () => { }, experimental: { b3: true, + dynamicInstrumentationEnabled: true, traceparent: true, runtimeId: true, exporter: 'log', @@ -797,6 +804,7 @@ describe('Config', () => { expect(config).to.have.nested.property('dogstatsd.port', '5218') expect(config).to.have.property('service', 'service') expect(config).to.have.property('version', '0.1.0') + expect(config).to.have.property('dynamicInstrumentationEnabled', true) expect(config).to.have.property('env', 'test') expect(config).to.have.property('sampleRate', 0.5) expect(config).to.have.property('logger', logger) @@ -868,6 +876,7 @@ describe('Config', () => { { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, { name: 'dogstatsd.port', value: '5218', origin: 'code' }, + { name: 'dynamicInstrumentationEnabled', value: true, origin: 'code' }, { name: 'env', value: 'test', origin: 'code' }, { name: 'experimental.enableGetRumData', value: true, origin: 'code' }, { name: 'experimental.exporter', value: 'log', origin: 'code' }, @@ -1038,6 +1047,7 @@ describe('Config', () => { process.env.DD_RUNTIME_METRICS_ENABLED = 'true' process.env.DD_TRACE_REPORT_HOSTNAME = 'true' process.env.DD_ENV = 'test' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' process.env.DD_API_KEY = '123' process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = 'v0' process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'false' @@ -1111,6 +1121,7 @@ describe('Config', () => { }, experimental: { b3: false, + dynamicInstrumentationEnabled: false, traceparent: false, runtimeId: false, exporter: 'agent', @@ -1166,6 +1177,7 @@ describe('Config', () => { expect(config).to.have.property('flushMinSpans', 500) expect(config).to.have.property('service', 'test') expect(config).to.have.property('version', '1.0.0') + expect(config).to.have.property('dynamicInstrumentationEnabled', false) expect(config).to.have.property('env', 'development') expect(config).to.have.property('clientIpEnabled', true) expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') @@ -1797,6 +1809,7 @@ describe('Config', () => { delete process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED delete process.env.DD_CIVISIBILITY_FLAKY_RETRY_ENABLED delete process.env.DD_CIVISIBILITY_FLAKY_RETRY_COUNT + delete process.env.DD_TEST_SESSION_NAME delete process.env.JEST_WORKER_ID options = {} }) @@ -1822,14 +1835,14 @@ describe('Config', () => { const config = new Config(options) expect(config).to.have.property('isIntelligentTestRunnerEnabled', false) }) - it('should disable manual testing API by default', () => { + it('should enable manual testing API by default', () => { const config = new Config(options) - expect(config).to.have.property('isManualApiEnabled', false) + expect(config).to.have.property('isManualApiEnabled', true) }) - it('should enable manual testing API if DD_CIVISIBILITY_MANUAL_API_ENABLED is passed', () => { - process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED = 'true' + it('should disable manual testing API if DD_CIVISIBILITY_MANUAL_API_ENABLED is set to false', () => { + process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED = 'false' const config = new Config(options) - expect(config).to.have.property('isManualApiEnabled', true) + expect(config).to.have.property('isManualApiEnabled', false) }) it('should disable memcached command tagging by default', () => { const config = new Config(options) @@ -1881,6 +1894,11 @@ describe('Config', () => { const config = new Config(options) expect(config).to.have.property('flakyTestRetriesCount', 5) }) + it('should set the session name if DD_TEST_SESSION_NAME is set', () => { + process.env.DD_TEST_SESSION_NAME = 'my-test-session' + const config = new Config(options) + expect(config).to.have.property('ciVisibilityTestSessionName', 'my-test-session') + }) }) context('ci visibility mode is not enabled', () => { it('should not activate intelligent test runner or git metadata upload', () => { @@ -2000,4 +2018,83 @@ describe('Config', () => { } })).to.have.nested.property('appsec.apiSecurity.requestSampling', 0.1) }) + + context('payload tagging', () => { + let env + + const staticConfig = require('../src/payload-tagging/config/aws') + + beforeEach(() => { + env = process.env + }) + + afterEach(() => { + process.env = env + }) + + it('defaults', () => { + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', false) + expect(taggingConfig).to.have.property('responsesEnabled', false) + expect(taggingConfig).to.have.property('maxDepth', 10) + }) + + it('enabling requests with no additional filter', () => { + process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', true) + expect(taggingConfig).to.have.property('responsesEnabled', false) + expect(taggingConfig).to.have.property('maxDepth', 10) + const awsRules = taggingConfig.rules.aws + for (const [serviceName, service] of Object.entries(awsRules)) { + expect(service.request).to.deep.equal(staticConfig[serviceName].request) + } + }) + + it('enabling requests with an additional filter', () => { + process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = '$.foo.bar' + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', true) + expect(taggingConfig).to.have.property('responsesEnabled', false) + expect(taggingConfig).to.have.property('maxDepth', 10) + const awsRules = taggingConfig.rules.aws + for (const [, service] of Object.entries(awsRules)) { + expect(service.request).to.include('$.foo.bar') + } + }) + + it('enabling responses with no additional filter', () => { + process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', false) + expect(taggingConfig).to.have.property('responsesEnabled', true) + expect(taggingConfig).to.have.property('maxDepth', 10) + const awsRules = taggingConfig.rules.aws + for (const [serviceName, service] of Object.entries(awsRules)) { + expect(service.response).to.deep.equal(staticConfig[serviceName].response) + } + }) + + it('enabling responses with an additional filter', () => { + process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = '$.foo.bar' + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', false) + expect(taggingConfig).to.have.property('responsesEnabled', true) + expect(taggingConfig).to.have.property('maxDepth', 10) + const awsRules = taggingConfig.rules.aws + for (const [, service] of Object.entries(awsRules)) { + expect(service.response).to.include('$.foo.bar') + } + }) + + it('overriding max depth', () => { + process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' + process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' + process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = 7 + const taggingConfig = new Config().cloudPayloadTagging + expect(taggingConfig).to.have.property('requestsEnabled', true) + expect(taggingConfig).to.have.property('responsesEnabled', true) + expect(taggingConfig).to.have.property('maxDepth', 7) + }) + }) }) diff --git a/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js b/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js new file mode 100644 index 00000000000..db602ef83aa --- /dev/null +++ b/packages/dd-trace/test/datastreams/schemas/schema_builder.spec.js @@ -0,0 +1,57 @@ +'use strict' + +require('../../setup/tap') + +const { SchemaBuilder } = require('../../../src/datastreams/schemas/schema_builder') +const { expect } = require('chai') + +class Iterator { + iterateOverSchema (builder) { + builder.addProperty('person', 'name', false, 'string', 'name of the person', null, null, null) + builder.addProperty('person', 'phone_numbers', true, 'string', null, null, null, null) + builder.addProperty('person', 'person_name', false, 'string', null, null, null, null) + builder.addProperty('person', 'address', false, 'object', null, '#/components/schemas/address', null, null) + builder.addProperty('address', 'zip', false, 'number', null, null, 'int', null) + builder.addProperty('address', 'street', false, 'string', null, null, null, null) + } +} + +describe('SchemaBuilder', () => { + it('should convert schema correctly to JSON', () => { + const builder = new SchemaBuilder(new Iterator()) + + const shouldExtractPerson = builder.shouldExtractSchema('person', 0) + const shouldExtractAddress = builder.shouldExtractSchema('address', 1) + const shouldExtractPerson2 = builder.shouldExtractSchema('person', 0) + const shouldExtractTooDeep = builder.shouldExtractSchema('city', 11) + const schema = builder.build() + + const expectedSchema = { + components: { + schemas: { + person: { + properties: { + name: { description: 'name of the person', type: 'string' }, + phone_numbers: { items: { type: 'string' }, type: 'array' }, + person_name: { type: 'string' }, + address: { $ref: '#/components/schemas/address', type: 'object' } + }, + type: 'object' + }, + address: { + properties: { zip: { format: 'int', type: 'number' }, street: { type: 'string' } }, + type: 'object' + } + } + }, + openapi: '3.0.0' + } + + expect(JSON.parse(schema.definition)).to.deep.equal(expectedSchema) + expect(schema.id).to.equal('9510078321201428652') + expect(shouldExtractPerson).to.be.true + expect(shouldExtractAddress).to.be.true + expect(shouldExtractPerson2).to.be.false + expect(shouldExtractTooDeep).to.be.false + }) +}) diff --git a/packages/dd-trace/test/datastreams/schemas/schema_sampler.spec.js b/packages/dd-trace/test/datastreams/schemas/schema_sampler.spec.js new file mode 100644 index 00000000000..80e288a66b6 --- /dev/null +++ b/packages/dd-trace/test/datastreams/schemas/schema_sampler.spec.js @@ -0,0 +1,39 @@ +'use strict' + +require('../../setup/tap') + +const { SchemaSampler } = require('../../../src/datastreams/schemas/schema_sampler') +const { expect } = require('chai') + +describe('SchemaSampler', () => { + it('samples with correct weights', () => { + const currentTimeMs = 100000 + const sampler = new SchemaSampler() + + const canSample1 = sampler.canSample(currentTimeMs) + const weight1 = sampler.trySample(currentTimeMs) + + const canSample2 = sampler.canSample(currentTimeMs + 1000) + const weight2 = sampler.trySample(currentTimeMs + 1000) + + const canSample3 = sampler.canSample(currentTimeMs + 2000) + const weight3 = sampler.trySample(currentTimeMs + 2000) + + const canSample4 = sampler.canSample(currentTimeMs + 30000) + const weight4 = sampler.trySample(currentTimeMs + 30000) + + const canSample5 = sampler.canSample(currentTimeMs + 30001) + const weight5 = sampler.trySample(currentTimeMs + 30001) + + expect(canSample1).to.be.true + expect(weight1).to.equal(1) + expect(canSample2).to.be.false + expect(weight2).to.equal(0) + expect(canSample3).to.be.false + expect(weight3).to.equal(0) + expect(canSample4).to.be.true + expect(weight4).to.equal(3) + expect(canSample5).to.be.false + expect(weight5).to.equal(0) + }) +}) diff --git a/packages/dd-trace/test/debugger/devtools_client/status.spec.js b/packages/dd-trace/test/debugger/devtools_client/status.spec.js new file mode 100644 index 00000000000..728279c7eca --- /dev/null +++ b/packages/dd-trace/test/debugger/devtools_client/status.spec.js @@ -0,0 +1,102 @@ +'use strict' + +require('../../setup/tap') + +const ddsource = 'dd_debugger' +const service = 'my-service' +const runtimeId = 'my-runtime-id' + +describe('diagnostic message http request caching', () => { + let statusproxy, request + + const acks = [ + ['ackReceived', 'RECEIVED'], + ['ackInstalled', 'INSTALLED'], + ['ackEmitting', 'EMITTING'], + ['ackError', 'ERROR', new Error('boom')] + ] + + beforeEach(() => { + request = sinon.spy() + request['@noCallThru'] = true + + statusproxy = proxyquire('../src/debugger/devtools_client/status', { + './config': { service, runtimeId, '@noCallThru': true }, + '../../exporters/common/request': request + }) + }) + + for (const [ackFnName, status, err] of acks) { + describe(ackFnName, () => { + let ackFn, exception + + beforeEach(() => { + if (err) { + ackFn = statusproxy[ackFnName].bind(null, err) + // Use `JSON.stringify` to remove any fields that are `undefined` + exception = JSON.parse(JSON.stringify({ + type: err.code, + message: err.message, + stacktrace: err.stack + })) + } else { + ackFn = statusproxy[ackFnName] + exception = undefined + } + }) + + it('should only call once if no change', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + }) + + it('should call again if version changes', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'foo', version: 1 }) + expect(request).to.have.been.calledTwice + assertRequestData(request, { probeId: 'foo', version: 1, status, exception }) + }) + + it('should call again if probeId changes', () => { + ackFn({ id: 'foo', version: 0 }) + expect(request).to.have.been.calledOnce + assertRequestData(request, { probeId: 'foo', version: 0, status, exception }) + + ackFn({ id: 'bar', version: 0 }) + expect(request).to.have.been.calledTwice + assertRequestData(request, { probeId: 'bar', version: 0, status, exception }) + }) + }) + } +}) + +function assertRequestData (request, { probeId, version, status, exception }) { + const payload = getFormPayload(request) + const diagnostics = { probeId, runtimeId, version, status } + + // Error requests will also contain an `exception` property + if (exception) diagnostics.exception = exception + + expect(payload).to.deep.equal({ ddsource, service, debugger: { diagnostics } }) + + const opts = getRequestOptions(request) + expect(opts).to.have.property('method', 'POST') + expect(opts).to.have.property('path', '/debugger/v1/diagnostics') +} + +function getRequestOptions (request) { + return request.lastCall.args[1] +} + +function getFormPayload (request) { + const form = request.lastCall.args[0] + const payload = form._data[form._data.length - 2] // the last element is an empty line + return JSON.parse(payload) +} diff --git a/packages/dd-trace/test/exporters/common/request.spec.js b/packages/dd-trace/test/exporters/common/request.spec.js index a94eca0382f..55bcb603a27 100644 --- a/packages/dd-trace/test/exporters/common/request.spec.js +++ b/packages/dd-trace/test/exporters/common/request.spec.js @@ -311,6 +311,36 @@ describe('request', function () { }) }) + it('should calculate correct Content-Length header for multi-byte characters', (done) => { + const sandbox = sinon.createSandbox() + sandbox.spy(http, 'request') + + const body = 'æøå' + const charLength = body.length + const byteLength = Buffer.byteLength(body, 'utf-8') + + expect(charLength).to.be.below(byteLength) + + nock('http://test:123').post('/').reply(200, 'OK') + + request( + body, + { + host: 'test', + port: 123, + method: 'POST', + headers: { 'Content-Type': 'text/plain; charset=utf-8' } + }, + (err, res) => { + expect(res).to.equal('OK') + const { headers } = http.request.getCall(0).args[0] + sandbox.restore() + expect(headers['Content-Length']).to.equal(byteLength) + done(err) + } + ) + }) + describe('when intercepting http', () => { const sandbox = sinon.createSandbox() diff --git a/packages/dd-trace/test/payload-tagging/index.spec.js b/packages/dd-trace/test/payload-tagging/index.spec.js new file mode 100644 index 00000000000..a4f4da8108e --- /dev/null +++ b/packages/dd-trace/test/payload-tagging/index.spec.js @@ -0,0 +1,220 @@ +const { + PAYLOAD_TAG_REQUEST_PREFIX, + PAYLOAD_TAG_RESPONSE_PREFIX +} = require('../../src/constants') +const { tagsFromObject } = require('../../src/payload-tagging/tagging') +const { computeTags } = require('../../src/payload-tagging') + +const { expect } = require('chai') + +const defaultOpts = { maxDepth: 10, prefix: 'http.payload' } + +describe('Payload tagger', () => { + describe('tag count cutoff', () => { + it('should generate many tags when not reaching the cap', () => { + const belowCap = 200 + const input = { foo: Object.fromEntries([...Array(belowCap).keys()].map(i => [i, i])) } + const tagCount = Object.entries(tagsFromObject(input, defaultOpts)).length + expect(tagCount).to.equal(belowCap) + }) + + it('should stop generating tags once the cap is reached', () => { + const aboveCap = 759 + const input = { foo: Object.fromEntries([...Array(aboveCap).keys()].map(i => [i, i])) } + const tagCount = Object.entries(tagsFromObject(input, defaultOpts)).length + expect(tagCount).to.not.equal(aboveCap) + expect(tagCount).to.equal(758) + }) + }) + + describe('best-effort redacting of keys', () => { + it('should redact disallowed keys', () => { + const input = { + foo: { + bar: { + token: 'tokenpleaseredact', + authorization: 'pleaseredact', + valid: 'valid' + }, + baz: { + password: 'shouldgo', + 'x-authorization': 'shouldbegone', + data: 'shouldstay' + } + } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.token': 'redacted', + 'http.payload.foo.bar.authorization': 'redacted', + 'http.payload.foo.bar.valid': 'valid', + 'http.payload.foo.baz.password': 'redacted', + 'http.payload.foo.baz.x-authorization': 'redacted', + 'http.payload.foo.baz.data': 'shouldstay' + }) + }) + + it('should redact banned keys even if they are objects', () => { + const input = { + foo: { + authorization: { + token: 'tokenpleaseredact', + authorization: 'pleaseredact', + valid: 'valid' + }, + baz: { + password: 'shouldgo', + 'x-authorization': 'shouldbegone', + data: 'shouldstay' + } + } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.authorization': 'redacted', + 'http.payload.foo.baz.password': 'redacted', + 'http.payload.foo.baz.x-authorization': 'redacted', + 'http.payload.foo.baz.data': 'shouldstay' + }) + }) + }) + + describe('escaping', () => { + it('should escape `.` characters in individual keys', () => { + const input = { 'foo.bar': { baz: 'quux' } } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo\\.bar.baz': 'quux' + }) + }) + }) + + describe('parsing', () => { + it('should transform null values to "null" string', () => { + const input = { foo: 'bar', baz: null } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'bar', + 'http.payload.baz': 'null' + }) + }) + + it('should transform undefined values to "undefined" string', () => { + const input = { foo: 'bar', baz: undefined } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'bar', + 'http.payload.baz': 'undefined' + }) + }) + + it('should transform boolean values to strings', () => { + const input = { foo: true, bar: false } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'true', + 'http.payload.bar': 'false' + }) + }) + + it('should decode buffers as UTF-8', () => { + const input = { foo: Buffer.from('bar') } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.foo': 'bar' }) + }) + + it('should provide tags from simple JSON objects, casting to strings where necessary', () => { + const input = { + foo: { bar: { baz: 1, quux: 2 } }, + asimplestring: 'isastring', + anullvalue: null, + anundefined: undefined + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.baz': '1', + 'http.payload.foo.bar.quux': '2', + 'http.payload.asimplestring': 'isastring', + 'http.payload.anullvalue': 'null', + 'http.payload.anundefined': 'undefined' + }) + }) + + it('should index tags when encountering arrays', () => { + const input = { foo: { bar: { list: ['v0', 'v1', 'v2'] } } } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.list.0': 'v0', + 'http.payload.foo.bar.list.1': 'v1', + 'http.payload.foo.bar.list.2': 'v2' + }) + }) + + it('should not replace a real value at max depth', () => { + const input = { + 1: { 2: { 3: { 4: { 5: { 6: { 7: { 8: { 9: { 10: 11 } } } } } } } } } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.1.2.3.4.5.6.7.8.9.10': '11' }) + }) + + it('should truncate paths beyond max depth', () => { + const input = { + 1: { 2: { 3: { 4: { 5: { 6: { 7: { 8: { 9: { 10: { 11: 'too much' } } } } } } } } } } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.1.2.3.4.5.6.7.8.9.10': 'truncated' }) + }) + }) +}) + +describe('Tagging orchestration', () => { + it('should use the request config when given the request prefix', () => { + const config = { + request: ['$.request'], + response: ['$.response'], + expand: [] + } + const input = { + request: 'foo', + response: 'bar' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: PAYLOAD_TAG_REQUEST_PREFIX }) + expect(tags).to.have.property(`${PAYLOAD_TAG_REQUEST_PREFIX}.request`, 'redacted') + expect(tags).to.have.property(`${PAYLOAD_TAG_REQUEST_PREFIX}.response`, 'bar') + }) + + it('should use the response config when given the response prefix', () => { + const config = { + request: ['$.request'], + response: ['$.response'], + expand: [] + } + const input = { + request: 'foo', + response: 'bar' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: PAYLOAD_TAG_RESPONSE_PREFIX }) + expect(tags).to.have.property(`${PAYLOAD_TAG_RESPONSE_PREFIX}.response`, 'redacted') + expect(tags).to.have.property(`${PAYLOAD_TAG_RESPONSE_PREFIX}.request`, 'foo') + }) + + it('should apply expansion rules', () => { + const config = { + request: [], + response: [], + expand: ['$.request', '$.response', '$.invalid'] + } + const input = { + request: '{ "foo": "bar" }', + response: '{ "baz": "quux" }', + invalid: '{ invalid JSON }', + untargeted: '{ "foo": "bar" }' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: 'foo' }) + expect(tags).to.have.property('foo.request.foo', 'bar') + expect(tags).to.have.property('foo.response.baz', 'quux') + expect(tags).to.have.property('foo.invalid', '{ invalid JSON }') + expect(tags).to.have.property('foo.untargeted', '{ "foo": "bar" }') + }) +}) diff --git a/packages/dd-trace/test/payload_tagging.spec.js b/packages/dd-trace/test/payload_tagging.spec.js new file mode 100644 index 00000000000..630c773d567 --- /dev/null +++ b/packages/dd-trace/test/payload_tagging.spec.js @@ -0,0 +1,222 @@ +require('./setup/tap') + +const { + PAYLOAD_TAG_REQUEST_PREFIX, + PAYLOAD_TAG_RESPONSE_PREFIX +} = require('../src/constants') +const { tagsFromObject } = require('../src/payload-tagging/tagging') +const { computeTags } = require('../src/payload-tagging') + +const { expect } = require('chai') + +const defaultOpts = { maxDepth: 10, prefix: 'http.payload' } + +describe('Payload tagger', () => { + describe('tag count cutoff', () => { + it('should generate many tags when not reaching the cap', () => { + const belowCap = 200 + const input = { foo: Object.fromEntries([...Array(belowCap).keys()].map(i => [i, i])) } + const tagCount = Object.entries(tagsFromObject(input, defaultOpts)).length + expect(tagCount).to.equal(belowCap) + }) + + it('should stop generating tags once the cap is reached', () => { + const aboveCap = 759 + const input = { foo: Object.fromEntries([...Array(aboveCap).keys()].map(i => [i, i])) } + const tagCount = Object.entries(tagsFromObject(input, defaultOpts)).length + expect(tagCount).to.not.equal(aboveCap) + expect(tagCount).to.equal(758) + }) + }) + + describe('best-effort redacting of keys', () => { + it('should redact disallowed keys', () => { + const input = { + foo: { + bar: { + token: 'tokenpleaseredact', + authorization: 'pleaseredact', + valid: 'valid' + }, + baz: { + password: 'shouldgo', + 'x-authorization': 'shouldbegone', + data: 'shouldstay' + } + } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.token': 'redacted', + 'http.payload.foo.bar.authorization': 'redacted', + 'http.payload.foo.bar.valid': 'valid', + 'http.payload.foo.baz.password': 'redacted', + 'http.payload.foo.baz.x-authorization': 'redacted', + 'http.payload.foo.baz.data': 'shouldstay' + }) + }) + + it('should redact banned keys even if they are objects', () => { + const input = { + foo: { + authorization: { + token: 'tokenpleaseredact', + authorization: 'pleaseredact', + valid: 'valid' + }, + baz: { + password: 'shouldgo', + 'x-authorization': 'shouldbegone', + data: 'shouldstay' + } + } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.authorization': 'redacted', + 'http.payload.foo.baz.password': 'redacted', + 'http.payload.foo.baz.x-authorization': 'redacted', + 'http.payload.foo.baz.data': 'shouldstay' + }) + }) + }) + + describe('escaping', () => { + it('should escape `.` characters in individual keys', () => { + const input = { 'foo.bar': { baz: 'quux' } } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo\\.bar.baz': 'quux' + }) + }) + }) + + describe('parsing', () => { + it('should transform null values to "null" string', () => { + const input = { foo: 'bar', baz: null } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'bar', + 'http.payload.baz': 'null' + }) + }) + + it('should transform undefined values to "undefined" string', () => { + const input = { foo: 'bar', baz: undefined } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'bar', + 'http.payload.baz': 'undefined' + }) + }) + + it('should transform boolean values to strings', () => { + const input = { foo: true, bar: false } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo': 'true', + 'http.payload.bar': 'false' + }) + }) + + it('should decode buffers as UTF-8', () => { + const input = { foo: Buffer.from('bar') } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.foo': 'bar' }) + }) + + it('should provide tags from simple JSON objects, casting to strings where necessary', () => { + const input = { + foo: { bar: { baz: 1, quux: 2 } }, + asimplestring: 'isastring', + anullvalue: null, + anundefined: undefined + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.baz': '1', + 'http.payload.foo.bar.quux': '2', + 'http.payload.asimplestring': 'isastring', + 'http.payload.anullvalue': 'null', + 'http.payload.anundefined': 'undefined' + }) + }) + + it('should index tags when encountering arrays', () => { + const input = { foo: { bar: { list: ['v0', 'v1', 'v2'] } } } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ + 'http.payload.foo.bar.list.0': 'v0', + 'http.payload.foo.bar.list.1': 'v1', + 'http.payload.foo.bar.list.2': 'v2' + }) + }) + + it('should not replace a real value at max depth', () => { + const input = { + 1: { 2: { 3: { 4: { 5: { 6: { 7: { 8: { 9: { 10: 11 } } } } } } } } } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.1.2.3.4.5.6.7.8.9.10': '11' }) + }) + + it('should truncate paths beyond max depth', () => { + const input = { + 1: { 2: { 3: { 4: { 5: { 6: { 7: { 8: { 9: { 10: { 11: 'too much' } } } } } } } } } } + } + const tags = tagsFromObject(input, defaultOpts) + expect(tags).to.deep.equal({ 'http.payload.1.2.3.4.5.6.7.8.9.10': 'truncated' }) + }) + }) +}) + +describe('Tagging orchestration', () => { + it('should use the request config when given the request prefix', () => { + const config = { + request: ['$.request'], + response: ['$.response'], + expand: [] + } + const input = { + request: 'foo', + response: 'bar' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: PAYLOAD_TAG_REQUEST_PREFIX }) + expect(tags).to.have.property(`${PAYLOAD_TAG_REQUEST_PREFIX}.request`, 'redacted') + expect(tags).to.have.property(`${PAYLOAD_TAG_REQUEST_PREFIX}.response`, 'bar') + }) + + it('should use the response config when given the response prefix', () => { + const config = { + request: ['$.request'], + response: ['$.response'], + expand: [] + } + const input = { + request: 'foo', + response: 'bar' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: PAYLOAD_TAG_RESPONSE_PREFIX }) + expect(tags).to.have.property(`${PAYLOAD_TAG_RESPONSE_PREFIX}.response`, 'redacted') + expect(tags).to.have.property(`${PAYLOAD_TAG_RESPONSE_PREFIX}.request`, 'foo') + }) + + it('should apply expansion rules', () => { + const config = { + request: [], + response: [], + expand: ['$.request', '$.response', '$.invalid'] + } + const input = { + request: '{ "foo": "bar" }', + response: '{ "baz": "quux" }', + invalid: '{ invalid JSON }', + untargeted: '{ "foo": "bar" }' + } + const tags = computeTags(config, input, { maxDepth: 10, prefix: 'foo' }) + expect(tags).to.have.property('foo.request.foo', 'bar') + expect(tags).to.have.property('foo.response.baz', 'quux') + expect(tags).to.have.property('foo.invalid', '{ invalid JSON }') + expect(tags).to.have.property('foo.untargeted', '{ "foo": "bar" }') + }) +}) diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 80b3b2147f2..eddbe0f887c 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -266,7 +266,7 @@ "mocha": [ { "name": "mocha", - "versions": [">=5.2.0"] + "versions": [">=5.2.0", ">=8.0.0"] }, { "name": "mocha-each", @@ -337,6 +337,10 @@ { "name": "pg-native", "versions": ["3.0.0"] + }, + { + "name": "express", + "versions": [">=4"] } ], "pino": [ @@ -362,6 +366,12 @@ "versions": ["^4"] } ], + "rhea": [ + { + "name": "amqp10", + "versions": ["^3"] + } + ], "sequelize": [ { "name": "express", diff --git a/packages/dd-trace/test/plugins/util/env.spec.js b/packages/dd-trace/test/plugins/util/env.spec.js index d3cd7bf47e3..5a799897df4 100644 --- a/packages/dd-trace/test/plugins/util/env.spec.js +++ b/packages/dd-trace/test/plugins/util/env.spec.js @@ -9,7 +9,8 @@ const { OS_PLATFORM, OS_VERSION, RUNTIME_NAME, - RUNTIME_VERSION + RUNTIME_VERSION, + DD_HOST_CPU_COUNT } = require('../../../src/plugins/util/env') describe('env', () => { @@ -22,7 +23,8 @@ describe('env', () => { [OS_ARCHITECTURE]: process.arch, [OS_PLATFORM]: process.platform, [RUNTIME_NAME]: 'node', - [OS_VERSION]: os.release() + [OS_VERSION]: os.release(), + [DD_HOST_CPU_COUNT]: os.cpus().length } ) }) diff --git a/packages/dd-trace/test/profiling/exporters/agent.spec.js b/packages/dd-trace/test/profiling/exporters/agent.spec.js index a1878f0579f..b318456eebd 100644 --- a/packages/dd-trace/test/profiling/exporters/agent.spec.js +++ b/packages/dd-trace/test/profiling/exporters/agent.spec.js @@ -81,7 +81,6 @@ describe('exporters/agent', function () { expect(req.files[0]).to.have.property('size', req.files[0].buffer.length) const event = JSON.parse(req.files[0].buffer.toString()) - process._rawDebug(JSON.stringify(event)) expect(event).to.have.property('attachments') expect(event.attachments).to.have.lengthOf(2) expect(event.attachments[0]).to.equal('wall.pprof') @@ -270,7 +269,7 @@ describe('exporters/agent', function () { try { await exporter.export({ profiles, start, end, tags }) } catch (err) { - expect(err.message).to.match(/^Profiler agent export back-off period expired$/) + expect(err.message).to.match(/^HTTP Error 500$/) failed = true } expect(failed).to.be.true diff --git a/packages/dd-trace/test/proxy.spec.js b/packages/dd-trace/test/proxy.spec.js index b7df0c6a647..a21e2f4226a 100644 --- a/packages/dd-trace/test/proxy.spec.js +++ b/packages/dd-trace/test/proxy.spec.js @@ -1,7 +1,5 @@ 'use strict' -const EventEmitter = require('events') - require('./setup/tap') describe('TracerProxy', () => { @@ -28,6 +26,7 @@ describe('TracerProxy', () => { let pluginManager let flare let remoteConfig + let handlers let rc let dogStatsD let noopDogStatsDClient @@ -170,7 +169,11 @@ describe('TracerProxy', () => { enable: sinon.stub() } - rc = new EventEmitter() + handlers = new Map() + rc = { + setProductHandler (product, handler) { handlers.set(product, handler) }, + removeProductHandler (product) { handlers.delete(product) } + } remoteConfig.enable.returns(rc) @@ -253,7 +256,7 @@ describe('TracerProxy', () => { proxy.init() - rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + handlers.get('APM_TRACING')('apply', { lib_config: conf }) expect(config.configure).to.have.been.calledWith(conf) expect(tracer.configure).to.have.been.calledWith(config) @@ -265,7 +268,7 @@ describe('TracerProxy', () => { proxy.init() - rc.emit('AGENT_CONFIG', 'apply', { + handlers.get('AGENT_CONFIG')('apply', { config: { log_level: logLevel }, @@ -285,7 +288,7 @@ describe('TracerProxy', () => { proxy.init() - rc.emit('AGENT_TASK', 'apply', { + handlers.get('AGENT_TASK')('apply', { args: task, task_type: 'tracer_flare', uuid: 'd53fc8a4-8820-47a2-aa7d-d565582feb81' @@ -305,8 +308,8 @@ describe('TracerProxy', () => { proxy.init() - rc.emit('AGENT_CONFIG', 'apply', conf) - rc.emit('AGENT_CONFIG', 'unapply', conf) + handlers.get('AGENT_CONFIG')('apply', conf) + handlers.get('AGENT_CONFIG')('unapply', conf) expect(flare.disable).to.have.been.called }) @@ -328,12 +331,12 @@ describe('TracerProxy', () => { expect(iast.enable).to.not.have.been.called let conf = { tracing_enabled: false } - rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + handlers.get('APM_TRACING')('apply', { lib_config: conf }) expect(appsec.disable).to.not.have.been.called expect(iast.disable).to.not.have.been.called conf = { tracing_enabled: true } - rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + handlers.get('APM_TRACING')('apply', { lib_config: conf }) expect(DatadogTracer).to.have.been.calledOnce expect(AppsecSdk).to.have.been.calledOnce expect(appsec.enable).to.not.have.been.called @@ -364,12 +367,12 @@ describe('TracerProxy', () => { expect(iast.enable).to.have.been.calledOnceWithExactly(config, tracer) let conf = { tracing_enabled: false } - rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + handlers.get('APM_TRACING')('apply', { lib_config: conf }) expect(appsec.disable).to.have.been.called expect(iast.disable).to.have.been.called conf = { tracing_enabled: true } - rc.emit('APM_TRACING', 'apply', { lib_config: conf }) + handlers.get('APM_TRACING')('apply', { lib_config: conf }) expect(appsec.enable).to.have.been.calledTwice expect(appsec.enable.secondCall).to.have.been.calledWithExactly(config) expect(iast.enable).to.have.been.calledTwice diff --git a/packages/dd-trace/test/span_stats.spec.js b/packages/dd-trace/test/span_stats.spec.js index 3ffdd4899ab..94aa0e4573b 100644 --- a/packages/dd-trace/test/span_stats.spec.js +++ b/packages/dd-trace/test/span_stats.spec.js @@ -234,7 +234,8 @@ describe('SpanStatsProcessor', () => { port: 8126, url: new URL('http://127.0.0.1:8126'), env: 'test', - tags: { tag: 'some tag' } + tags: { tag: 'some tag' }, + version: '1.0.0' } it('should construct', () => { @@ -253,6 +254,7 @@ describe('SpanStatsProcessor', () => { expect(processor.enabled).to.equal(config.stats.enabled) expect(processor.env).to.equal(config.env) expect(processor.tags).to.deep.equal(config.tags) + expect(processor.version).to.equal(config.version) }) it('should construct a disabled instance if appsec standalone is enabled', () => { @@ -306,7 +308,7 @@ describe('SpanStatsProcessor', () => { expect(exporter.export).to.be.calledWith({ Hostname: hostname(), Env: config.env, - Version: version, + Version: config.version, Stats: [{ Start: 12340000000000, Duration: 10000000000, @@ -331,4 +333,22 @@ describe('SpanStatsProcessor', () => { Sequence: processor.sequence }) }) + + it('should export on interval with default version', () => { + const versionlessConfig = { ...config } + delete versionlessConfig.version + const processor = new SpanStatsProcessor(versionlessConfig) + processor.onInterval() + + expect(exporter.export).to.be.calledWith({ + Hostname: hostname(), + Env: config.env, + Version: version, + Stats: [], + Lang: 'javascript', + TracerVersion: pkg.version, + RuntimeID: processor.tags['runtime-id'], + Sequence: processor.sequence + }) + }) }) diff --git a/yarn.lock b/yarn.lock index b133bdb94c3..cf7cba3f3f4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -503,10 +503,10 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== +"@jridgewell/resolve-uri@^3.0.3", "@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== "@jridgewell/set-array@^1.0.1": version "1.1.2" @@ -518,7 +518,7 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== -"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": +"@jridgewell/trace-mapping@^0.3.17": version "0.3.20" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" integrity "sha1-cuRXB88kD6awgdA2b4JlsM0QGX8= sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==" @@ -526,6 +526,24 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jsep-plugin/assignment@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jsep-plugin/assignment/-/assignment-1.2.1.tgz#07277bdd7862451a865d391e2142efba33f46c9b" + integrity sha512-gaHqbubTi29aZpVbBlECRpmdia+L5/lh2BwtIJTmtxdbecEyyX/ejAOg7eQDGNvGOUmPY7Z2Yxdy9ioyH/VJeA== + +"@jsep-plugin/regex@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@jsep-plugin/regex/-/regex-1.0.3.tgz#3aeaa2e5fa45d89de116aeafbfa41c95935b7f6d" + integrity sha512-XfZgry4DwEZvSFtS/6Y+R48D7qJYJK6R9/yJFyUFHCIUMEEHuJ4X95TDgJp5QkmzfLYvapMPzskV5HpIDrREug== + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -1531,7 +1549,7 @@ deep-eql@^4.1.2: deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== default-require-extensions@^3.0.0: @@ -1968,7 +1986,7 @@ espree@^9.6.0, espree@^9.6.1: esprima@^4.0.0, esprima@~4.0.0: version "4.0.1" - resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.2: @@ -2064,7 +2082,7 @@ fast-json-stable-stringify@^2.0.0: fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== fastq@^1.6.0: @@ -2537,10 +2555,10 @@ import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-in-the-middle@^1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.8.1.tgz#8b51c2cc631b64e53e958d7048d2d9463ce628f8" - integrity sha512-yhRwoHtiLGvmSozNOALgjRPFI6uYsds60EoMqqnXyyv+JOIW/BrrLejuTGBt+bq0T5tLzOHrN0T7xYTm4Qt/ng== +import-in-the-middle@1.11.2: + version "1.11.2" + resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.11.2.tgz#dd848e72b63ca6cd7c34df8b8d97fc9baee6174f" + integrity sha512-gK6Rr6EykBcc6cVWRSBR5TWf8nn6hZMYSRYqCcHa0l0d1fPK7JSYo6+Mlmck76jIX9aL/IZ71c06U2VpFwl1zA== dependencies: acorn "^8.8.2" acorn-import-attributes "^1.9.5" @@ -2949,6 +2967,11 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" +jsep@^1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/jsep/-/jsep-1.3.8.tgz#facb6eb908d085d71d950bd2b24b757c7b8a46d7" + integrity sha512-qofGylTGgYj9gZFsHuyWAN4jr35eJ66qJCK4eKDnldohuUoQFbU3iZn2zjvEbd9wOAhP9Wx5DsAAduTyE1PSWQ== + jsesc@^2.5.1: version "2.5.2" resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" @@ -2981,6 +3004,15 @@ json5@^2.2.3: resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== +jsonpath-plus@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/jsonpath-plus/-/jsonpath-plus-9.0.0.tgz#bb8703ee481531142bca8dee9a42fe72b8358a7f" + integrity sha512-bqE77VIDStrOTV/czspZhTn+o27Xx9ZJRGVkdVShEtPoqsIx5yALv3lWVU6y+PqYvWPJNWE7ORCQheQkEe0DDA== + dependencies: + "@jsep-plugin/assignment" "^1.2.1" + "@jsep-plugin/regex" "^1.0.3" + jsep "^1.3.8" + jszip@^3.5.0: version "3.10.1" resolved "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz" @@ -3622,11 +3654,16 @@ path-parse@^1.0.7: resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7, path-to-regexp@^0.1.2: +path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@^0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== + path-to-regexp@^1.7.0: version "1.8.0" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" @@ -3950,6 +3987,11 @@ reusify@^1.0.4: resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rfdc@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.1.tgz#2b6d4df52dffe8bb346992a10ea9451f24373a8f" + integrity sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg== + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" @@ -4166,7 +4208,7 @@ source-map-support@^0.5.16: source-map@^0.6.0, source-map@^0.6.1: version "0.6.1" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@^0.7.4: