diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3d682f6baa..4c36850207 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -131,7 +131,7 @@ jobs: build_buddies_images: ${{ contains(github.event.pull_request.labels.*.name, 'build-buddies-images') }} build_proxy_image: ${{ contains(github.event.pull_request.labels.*.name, 'build-proxy-image') }} build_lib_injection_app_images: ${{ contains(github.event.pull_request.labels.*.name, 'build-lib-injection-app-images') }} - _experimental_parametric_job_count: ${{ matrix.version == 'dev' && 2 || 1 }} # test both use cases + parametric_job_count: ${{ matrix.version == 'dev' && 2 || 1 }} # test both use cases skip_empty_scenarios: true system_tests_docker_mode: @@ -157,7 +157,7 @@ jobs: runs-on: ubuntu-latest needs: - system_tests - if: always() + if: '!cancelled()' steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 diff --git a/.github/workflows/compute-workflow-parameters.yml b/.github/workflows/compute-workflow-parameters.yml index 5dbf715d83..372b45da53 100644 --- a/.github/workflows/compute-workflow-parameters.yml +++ b/.github/workflows/compute-workflow-parameters.yml @@ -15,7 +15,7 @@ on: description: "Comma-separated list of scenarios groups to run" type: string default: "" - _experimental_parametric_job_count: + parametric_job_count: description: "*EXPERIMENTAL* : How many jobs should be used to run PARAMETRIC scenario" default: 1 required: false @@ -55,9 +55,9 @@ on: externalprocessing_scenarios: description: "" value: ${{ jobs.main.outputs.externalprocessing_scenarios }} - _experimental_parametric_job_matrix: + parametric_job_matrix: description: "" - value: ${{ jobs.main.outputs._experimental_parametric_job_matrix }} + value: ${{ jobs.main.outputs.parametric_job_matrix }} jobs: main: @@ -73,7 +73,8 @@ jobs: opentelemetry_weblogs: ${{ steps.main.outputs.opentelemetry_weblogs }} parametric_scenarios: ${{ steps.main.outputs.parametric_scenarios }} externalprocessing_scenarios: ${{ steps.main.outputs.externalprocessing_scenarios }} - _experimental_parametric_job_matrix: ${{ steps.main.outputs._experimental_parametric_job_matrix }} + parametric_job_matrix: ${{ steps.main.outputs.parametric_job_matrix }} + parametric_job_count: ${{ steps.main.outputs.parametric_job_count }} steps: - name: Checkout uses: actions/checkout@v4 @@ -88,7 +89,7 @@ jobs: python utils/scripts/compute-workflow-parameters.py ${{ inputs.library }} \ -s "${{ inputs.scenarios }}" \ -g "${{ inputs.scenarios_groups }}" \ + --parametric-job-count ${{ inputs.parametric_job_count }} \ --ci-environment "${{ inputs._ci_environment }}" >> $GITHUB_OUTPUT env: PYTHONPATH: "." - _EXPERIMENTAL_PARAMETRIC_JOB_COUNT: ${{ inputs._experimental_parametric_job_count }} diff --git a/.github/workflows/run-parametric.yml b/.github/workflows/run-parametric.yml index 5cc526789e..0eba0e8cea 100644 --- a/.github/workflows/run-parametric.yml +++ b/.github/workflows/run-parametric.yml @@ -16,15 +16,25 @@ on: default: 'custom' required: false type: string - _experimental_job_count: + job_count: description: "How many job should be spawned for the parametric test" default: 1 required: false type: number - _experimental_job_matrix: + job_matrix: # github action syntax is not very powerfull, it require a job to compute this. # => save on job, by asking the caller to compute this list - description: "Job matrix, JSON array of number from 1 to _experimental_job_count" + description: "Job matrix, JSON array of number from 1 to job_count" + default: '[1]' + required: false + type: string + _experimental_job_count: + description: "DEPRECATED" + default: 1 + required: false + type: number + _experimental_job_matrix: + description: "DEPRECATED" default: '[1]' required: false type: string @@ -39,7 +49,7 @@ jobs: strategy: fail-fast: false matrix: - job_instance: ${{ fromJson( inputs._experimental_job_matrix ) }} + job_instance: ${{ fromJson( inputs.job_matrix ) }} env: SYSTEM_TESTS_REPORT_ENVIRONMENT: ${{ inputs.ci_environment }} SYSTEM_TESTS_REPORT_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} @@ -57,7 +67,7 @@ jobs: name: ${{ inputs.binaries_artifact }} path: binaries/ - name: Run PARAMETRIC scenario - run: ./run.sh PARAMETRIC -L ${{ inputs.library }} --splits=${{ inputs._experimental_job_count }} --group=${{ matrix.job_instance }} + run: ./run.sh PARAMETRIC -L ${{ inputs.library }} --splits=${{ inputs.job_count }} --group=${{ matrix.job_instance }} - name: Compress logs id: compress_logs if: always() diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 5288c4a189..3b4ef146e4 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -56,8 +56,13 @@ on: default: false required: false type: boolean + parametric_job_count: + description: "How many jobs should be used to run PARAMETRIC scenario" + default: 1 + required: false + type: number _experimental_parametric_job_count: - description: "*EXPERIMENTAL* : How many jobs should be used to run PARAMETRIC scenario" + description: "*DEPRECATED*" default: 1 required: false type: number @@ -70,7 +75,7 @@ jobs: library: ${{ inputs.library }} scenarios: ${{ inputs.scenarios }} scenarios_groups: ${{ inputs.scenarios_groups }} - _experimental_parametric_job_count: ${{ inputs._experimental_parametric_job_count }} + parametric_job_count: ${{ inputs.parametric_job_count }} _ci_environment: ${{ inputs.ci_environment }} parametric: @@ -83,8 +88,8 @@ jobs: library: ${{ inputs.library }} binaries_artifact: ${{ inputs.binaries_artifact }} ci_environment: ${{ inputs.ci_environment }} - _experimental_job_count: ${{ inputs._experimental_parametric_job_count }} - _experimental_job_matrix: ${{ needs.compute_parameters.outputs._experimental_parametric_job_matrix }} + job_count: ${{ inputs.parametric_job_count }} + job_matrix: ${{ needs.compute_parameters.outputs.parametric_job_matrix }} graphql: needs: diff --git a/.vscode/settings.json b/.vscode/settings.json index 750c534a07..7239911026 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,5 +8,10 @@ ], "python.testing.pytestEnabled": true, "ruff.enable": true, + "ruff.interpreter": [ + "${workspaceFolder}/venv/bin/python", + "-m", + "ruff" + ], "pylint.ignorePatterns": ["*"] } diff --git a/docs/execute/binaries.md b/docs/execute/binaries.md index a38003046d..bb54ea33ae 100644 --- a/docs/execute/binaries.md +++ b/docs/execute/binaries.md @@ -34,6 +34,12 @@ Create a file `golang-load-from-go-get` under the `binaries` directory that spec * `gopkg.in/DataDog/dd-trace-go.v1@v1.67.0` Test the 1.67.0 release * `gopkg.in/DataDog/dd-trace-go.v1@` Test un-merged changes +To change Orchestrion version, create a file `orchestrion-load-from-go-get` under the `binaries` directory that specifies the target build. The content of this file will be installed by the weblog or parametric app via `go get` when the test image is built. +* Content example: + * `github.com/DataDog/orchestrion@main` Test the main branch + * `github.com/DataDog/orchestrion@v1.1.0` Test the 1.1.0 release + * `github.com/DataDog/orchestrion@` Test un-merged changes + ## Java library Follow these steps to run Parametric tests with a custom Java Tracer version: diff --git a/manifests/cpp.yml b/manifests/cpp.yml index 8185d0cc4d..84f55829f9 100644 --- a/manifests/cpp.yml +++ b/manifests/cpp.yml @@ -145,7 +145,10 @@ tests/: test_inferred_proxy.py: Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: - Test_Otel_Drop_In: missing_feature + Test_Otel_Drop_In: irrelevant (library does not implement OpenTelemetry) + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: irrelevant (library does not implement OpenTelemetry) parametric/: test_128_bit_traceids.py: Test_128_Bit_Traceids: v1.0.1.dev @@ -261,8 +264,7 @@ tests/: test_distributed.py: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) - Test_Span_Links_From_Conflicting_Contexts: missing_feature - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: missing_feature + Test_Span_Links_From_Conflicting_Contexts: missing_feature (baggage should be implemented and conflicting trace contexts should generate span link) Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: irrelevant diff --git a/manifests/dotnet.yml b/manifests/dotnet.yml index 0f4825c6f3..9b4ee0d979 100644 --- a/manifests/dotnet.yml +++ b/manifests/dotnet.yml @@ -409,6 +409,9 @@ tests/: Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: v3.9.0 parametric/: test_config_consistency.py: Test_Config_Dogstatsd: missing_feature (does not support hostname) @@ -512,8 +515,7 @@ tests/: test_distributed.py: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) - Test_Span_Links_From_Conflicting_Contexts: missing_feature - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: missing_feature + Test_Span_Links_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: @@ -522,6 +524,10 @@ tests/: Test_Propagate_Legacy: v2.26.0 test_ipv6.py: missing_feature (APMAPI-869) test_library_conf.py: + Test_ExtractBehavior_Default: missing_feature (conflicting trace contexts should generate span link) + Test_ExtractBehavior_Ignore: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart_With_Extract_First: missing_feature (extract behavior not implemented) Test_HeaderTags: v2.27.0 Test_HeaderTags_Colon_Leading: v2.1.0 Test_HeaderTags_Colon_Trailing: v3.0.0 diff --git a/manifests/golang.yml b/manifests/golang.yml index 93560674bd..ff78eb7e7e 100644 --- a/manifests/golang.yml +++ b/manifests/golang.yml @@ -22,6 +22,7 @@ tests/: Test_API_Security_Sampling_Rate: '*': v1.60.0 net-http: irrelevant (net-http doesn't handle path params) + net-http-orchestrion: irrelevant (net-http doesn't handle path params) Test_API_Security_Sampling_With_Delay: missing_feature test_schemas.py: Test_Scanners: missing_feature @@ -32,6 +33,7 @@ tests/: Test_Schema_Request_Path_Parameters: '*': v1.60.0 net-http: irrelevant (net-http cannot list path params) + net-http-orchestrion: irrelevant (net-http cannot list path params) Test_Schema_Request_Query_Parameters: v1.60.0 Test_Schema_Response_Body: missing_feature Test_Schema_Response_Body_env_var: missing_feature @@ -217,6 +219,7 @@ tests/: '*': v1.36.0 gin: v1.37.0 net-http: irrelevant (net-http doesn't handle path params) + net-http-orchestrion: irrelevant (net-http doesn't handle path params) Test_ResponseStatus: '*': v1.36.0 gin: v1.37.0 @@ -337,21 +340,27 @@ tests/: Test_Blocking_request_cookies: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_request_headers: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_request_method: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_request_path_params: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_request_query: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_request_uri: '*': v1.51.0 net-http: irrelevant + net-http-orchestrion: irrelevant Test_Blocking_response_headers: missing_feature Test_Blocking_response_status: missing_feature Test_Blocking_user_id: v1.51.0 @@ -457,21 +466,26 @@ tests/: Test_Kinesis_PROPAGATION_VIA_MESSAGE_ATTRIBUTES: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) test_rabbitmq.py: Test_RabbitMQ_Trace_Context_Propagation: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) test_sns_to_sqs.py: Test_SNS_Propagation: "*": irrelevant net-http: missing_feature + net-http-orchestrion: missing_feature (Endpoint not implemented) test_sqs.py: Test_SQS_PROPAGATION_VIA_AWS_XRAY_HEADERS: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_SQS_PROPAGATION_VIA_MESSAGE_ATTRIBUTES: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) test_db_integrations_sql.py: Test_MsSql: missing_feature Test_MySql: missing_feature @@ -492,31 +506,44 @@ tests/: Test_DsmKinesis: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_DsmRabbitmq: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_DsmRabbitmq_FanoutExchange: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_DsmRabbitmq_TopicExchange: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_DsmSNS: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_DsmSQS: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_Dsm_Manual_Checkpoint_Inter_Process: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) Test_Dsm_Manual_Checkpoint_Intra_Process: "*": irrelevant net-http: missing_feature (Endpoint not implemented) + net-http-orchestrion: missing_feature (Endpoint not implemented) test_inferred_proxy.py: Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: + '*': incomplete_test_app (endpoint not implemented) + net-http: v1.70.1 parametric/: test_config_consistency.py: Test_Config_Dogstatsd: v1.72.0-dev @@ -592,7 +619,9 @@ tests/: Test_Config_ClientTagQueryString_Configured: v1.72.0-dev Test_Config_ClientTagQueryString_Empty: v1.72.0-dev Test_Config_HttpClientErrorStatuses_Default: v1.69.0 - Test_Config_HttpClientErrorStatuses_FeatureFlagCustom: v1.69.0 + Test_Config_HttpClientErrorStatuses_FeatureFlagCustom: + '*': v1.69.0 + net-http-orchestrion: v1.72.0-dev Test_Config_HttpServerErrorStatuses_Default: v1.67.0 Test_Config_HttpServerErrorStatuses_FeatureFlagCustom: "*": v1.69.0 @@ -617,7 +646,6 @@ tests/: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: missing_feature - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: missing_feature Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Synthetics_APM_Datadog: bug (APMAPI-901) # the incoming headers are considered invalid test_graphql.py: missing_feature @@ -626,6 +654,10 @@ tests/: Test_Propagate: v1.48.0-rc.1 Test_Propagate_Legacy: v1.41.0 test_library_conf.py: + Test_ExtractBehavior_Default: missing_feature (baggage should be implemented and conflicting trace contexts should generate span link in v1.71.0) + Test_ExtractBehavior_Ignore: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart_With_Extract_First: missing_feature (extract behavior not implemented) Test_HeaderTags: v1.53.0 Test_HeaderTags_Colon_Leading: v1.53.0 Test_HeaderTags_Colon_Trailing: v1.70.0 diff --git a/manifests/java.yml b/manifests/java.yml index 516ad33e20..156b2df8b5 100644 --- a/manifests/java.yml +++ b/manifests/java.yml @@ -1597,6 +1597,11 @@ tests/: spring-boot: v1.39.0 test_sql.py: Test_Sql: bug (APMAPI-729) + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: + '*': incomplete_test_app (endpoint not implemented) + spring-boot: v1.39.0 parametric/: test_config_consistency.py: Test_Config_Dogstatsd: missing_feature (default hostname is inconsistent) @@ -1728,7 +1733,6 @@ tests/: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: v1.43.0 - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: v1.43.0 Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: @@ -1737,6 +1741,10 @@ tests/: Test_Propagate_Legacy: missing_feature test_ipv6.py: missing_feature (APMAPI-869) test_library_conf.py: + Test_ExtractBehavior_Default: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Ignore: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart_With_Extract_First: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) Test_HeaderTags: missing_feature Test_HeaderTags_Colon_Leading: v0.102.0 Test_HeaderTags_Colon_Trailing: v0.102.0 diff --git a/manifests/nodejs.yml b/manifests/nodejs.yml index 322fc67948..e93ba7c908 100644 --- a/manifests/nodejs.yml +++ b/manifests/nodejs.yml @@ -787,6 +787,12 @@ tests/: express5: *ref_5_26_0 test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: + '*': incomplete_test_app (endpoint not implemented) + express4: *ref_5_26_0 + express5: *ref_5_26_0 parametric/: test_128_bit_traceids.py: Test_128_Bit_Traceids: *ref_3_0_0 @@ -908,7 +914,6 @@ tests/: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: v5.27.0 - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: v5.27.0 Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Synthetics_APM_Datadog: '*': *ref_5_25_0 @@ -924,6 +929,10 @@ tests/: Test_Propagate: *ref_3_2_0 Test_Propagate_Legacy: *ref_3_2_0 test_library_conf.py: + Test_ExtractBehavior_Default: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Ignore: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart_With_Extract_First: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) Test_HeaderTags: *ref_4_11_0 Test_HeaderTags_Colon_Leading: *ref_4_11_0 Test_HeaderTags_Colon_Trailing: *ref_4_11_0 diff --git a/manifests/php.yml b/manifests/php.yml index c270c562e9..52b0509978 100644 --- a/manifests/php.yml +++ b/manifests/php.yml @@ -167,17 +167,17 @@ tests/: Test_Cmdi_UrlQuery: missing_feature Test_Cmdi_Waf_Version: v1.6.2 test_lfi.py: - Test_Lfi_BodyJson: missing_feature - Test_Lfi_BodyUrlEncoded: missing_feature - Test_Lfi_BodyXml: missing_feature - Test_Lfi_Capability: missing_feature - Test_Lfi_Mandatory_SpanTags: missing_feature - Test_Lfi_Optional_SpanTags: missing_feature - Test_Lfi_RC_CustomAction: missing_feature + Test_Lfi_BodyJson: v1.6.2 + Test_Lfi_BodyUrlEncoded: v1.6.2 + Test_Lfi_BodyXml: v1.6.2 + Test_Lfi_Capability: v1.7.0 + Test_Lfi_Mandatory_SpanTags: v1.6.2 + Test_Lfi_Optional_SpanTags: v1.6.2 + Test_Lfi_RC_CustomAction: missing_feature # It works but missing this APPSEC-56457 Test_Lfi_Rules_Version: v1.6.2 - Test_Lfi_StackTrace: missing_feature + Test_Lfi_StackTrace: v1.6.2 Test_Lfi_Telemetry: missing_feature - Test_Lfi_UrlQuery: missing_feature + Test_Lfi_UrlQuery: v1.6.2 Test_Lfi_Waf_Version: v1.6.2 test_shi.py: Test_Shi_BodyJson: missing_feature @@ -205,17 +205,17 @@ tests/: Test_Sqli_UrlQuery: missing_feature Test_Sqli_Waf_Version: v1.6.2 test_ssrf.py: - Test_Ssrf_BodyJson: missing_feature - Test_Ssrf_BodyUrlEncoded: missing_feature - Test_Ssrf_BodyXml: missing_feature - Test_Ssrf_Capability: missing_feature - Test_Ssrf_Mandatory_SpanTags: missing_feature - Test_Ssrf_Optional_SpanTags: missing_feature - Test_Ssrf_Rules_Version: v1.6.2 - Test_Ssrf_StackTrace: missing_feature + Test_Ssrf_BodyJson: v1.7.0 + Test_Ssrf_BodyUrlEncoded: v1.7.0 + Test_Ssrf_BodyXml: v1.7.0 + Test_Ssrf_Capability: v1.7.0 + Test_Ssrf_Mandatory_SpanTags: v1.7.0 + Test_Ssrf_Optional_SpanTags: v1.7.0 + Test_Ssrf_Rules_Version: v1.7.0 + Test_Ssrf_StackTrace: v1.7.0 Test_Ssrf_Telemetry: missing_feature - Test_Ssrf_UrlQuery: missing_feature - Test_Ssrf_Waf_Version: v1.6.2 + Test_Ssrf_UrlQuery: v1.7.0 + Test_Ssrf_Waf_Version: v1.7.0 waf/: test_addresses.py: Test_BodyJson: v0.98.1 # TODO what is the earliest version? @@ -382,6 +382,9 @@ tests/: Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: incomplete_test_app (endpoint not implemented) parametric/: test_128_bit_traceids.py: Test_128_Bit_Traceids: v0.84.0 @@ -493,7 +496,6 @@ tests/: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: missing_feature - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: missing_feature Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: @@ -501,6 +503,10 @@ tests/: Test_Propagate: v0.85.0 Test_Propagate_Legacy: v0.85.0 test_library_conf.py: + Test_ExtractBehavior_Default: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Ignore: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) + Test_ExtractBehavior_Restart_With_Extract_First: incomplete_test_app (/make_distant_call endpoint is not correctly implemented) Test_HeaderTags: v0.68.2 Test_HeaderTags_Colon_Leading: v0.74.0 Test_HeaderTags_Colon_Trailing: v0.74.0 diff --git a/manifests/python.yml b/manifests/python.yml index ad04bc8eb8..339ddf1fd9 100644 --- a/manifests/python.yml +++ b/manifests/python.yml @@ -764,6 +764,11 @@ tests/: Test_AWS_API_Gateway_Inferred_Span_Creation: missing_feature test_otel_drop_in.py: Test_Otel_Drop_In: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: + '*': incomplete_test_app (endpoint not implemented) + flask-poc: v2.19.0 parametric/: test_128_bit_traceids.py: Test_128_Bit_Traceids: v2.6.0 @@ -913,16 +918,19 @@ tests/: Test_DistributedHttp: '*': missing_feature (Missing on weblog) flask-poc: v1.5.0-rc2 # actual version unknown - Test_Span_Links_Flags_From_Conflicting_Contexts: v2.17.0 + Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: v2.17.0 - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: v2.17.0 - Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: v2.17.0 + Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: Test_Basic: v1.5.0-rc1 Test_Propagate: v1.9.0 Test_Propagate_Legacy: v1.5.0-rc1 test_library_conf.py: + Test_ExtractBehavior_Default: missing_feature (trace context with only baggage should still be propagated) + Test_ExtractBehavior_Ignore: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart_With_Extract_First: missing_feature (extract behavior not implemented) Test_HeaderTags: v0.53 Test_HeaderTags_Colon_Leading: v1.2.1 # actual version unknown Test_HeaderTags_Colon_Trailing: v2.8.0 diff --git a/manifests/ruby.yml b/manifests/ruby.yml index 2226134854..a11008de44 100644 --- a/manifests/ruby.yml +++ b/manifests/ruby.yml @@ -405,6 +405,11 @@ tests/: TestK8sLibInjectioProfilingClusterEnabled: missing_feature TestK8sLibInjectioProfilingClusterOverride: missing_feature TestK8sLibInjectioProfilingDisabledByDefault: missing_feature + otel/: + test_context_propagation.py: + Test_Otel_Context_Propagation_Default_Propagator_Api: + '*': incomplete_test_app (endpoint not implemented) + rails70: v2.0.0 parametric/: test_config_consistency.py: Test_Config_Dogstatsd: missing_feature @@ -515,7 +520,6 @@ tests/: Test_DistributedHttp: missing_feature Test_Span_Links_Flags_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) Test_Span_Links_From_Conflicting_Contexts: missing_feature - Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: missing_feature Test_Span_Links_Omit_Tracestate_From_Conflicting_Contexts: missing_feature (implementation specs have not been determined) test_graphql.py: missing_feature test_identify.py: @@ -524,6 +528,10 @@ tests/: Test_Propagate_Legacy: missing_feature test_ipv6.py: missing_feature (APMAPI-869) test_library_conf.py: + Test_ExtractBehavior_Default: missing_feature (baggage should be implemented and conflicting trace contexts should generate span link) + Test_ExtractBehavior_Ignore: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart: missing_feature (extract behavior not implemented) + Test_ExtractBehavior_Restart_With_Extract_First: missing_feature (extract behavior not implemented) Test_HeaderTags: v1.13.0 Test_HeaderTags_Colon_Leading: v1.13.0 Test_HeaderTags_Colon_Trailing: v1.13.0 diff --git a/pyproject.toml b/pyproject.toml index 0a01e3280a..008e14298e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,14 @@ [project] name = 'system_tests' version = '0.0.1' +dynamic = ["dependencies"] [tool.setuptools] packages = ["tests", "utils", "manifests"] +[tool.setuptools.dynamic] +dependencies = {file = ["requirements.txt"]} + [tool.pytest.ini_options] addopts = "--json-report --json-report-indent=2 --color=yes --no-header --junitxml=reportJunit.xml -r Xf" testpaths = [ @@ -92,13 +96,7 @@ ignore = [ "PLR0915", # too many statements, may be replaced by a higher default value "PLR1714", "PLR2004", - "PTH100", - "PTH110", - "PTH113", - "PTH116", "PTH118", - "PTH120", - "PTH122", # os.path.splitext(), but not really easier to read ? "PTH123", # `open()` should be replaced by `Path.open()` "RUF012", "S202", diff --git a/requirements.txt b/requirements.txt index 734a865429..a297d876bc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -click==7.1.2 # TODO update this requests==2.28.1 pytest==7.1.3 pytest-json-report==1.5.0 diff --git a/run.sh b/run.sh index b5e5e5ee6b..2fd5227a87 100755 --- a/run.sh +++ b/run.sh @@ -328,6 +328,11 @@ function main() { run_mode='direct' fi + # check if runner is installed and up to date + if [[ "${run_mode}" == "direct" ]] && ! is_using_nix && ! diff requirements.txt venv/requirements.txt; then + ./build.sh -i runner + fi + # ensure environment if [[ "${run_mode}" == "docker" ]] || is_using_nix; then : # no venv needed diff --git a/tests/auto_inject/test_auto_inject_install.py b/tests/auto_inject/test_auto_inject_install.py index 636cb64bb2..71b01f0121 100644 --- a/tests/auto_inject/test_auto_inject_install.py +++ b/tests/auto_inject/test_auto_inject_install.py @@ -48,7 +48,11 @@ def test_profiling(self, virtual_machine): @scenarios.host_auto_injection_install_script_profiling class TestHostAutoInjectInstallScriptProfiling(base.AutoInjectBaseTest): @parametrize_virtual_machines( - bugs=[{"vm_cpu": "arm64", "weblog_variant": "test-app-dotnet", "reason": "PROF-10783"}] + bugs=[ + {"vm_cpu": "arm64", "weblog_variant": "test-app-dotnet", "reason": "PROF-10783"}, + {"vm_name": "Ubuntu_24_amd64", "weblog-variant": "test-app-nodejs", "reason": "PROF-11264"}, + {"vm_name": "Ubuntu_24_arm64", "weblog-variant": "test-app-nodejs", "reason": "PROF-11264"}, + ] ) def test_profiling(self, virtual_machine): logger.info(f"Launching test_install for : [{virtual_machine.name}]...") diff --git a/tests/debugger/test_debugger_expression_language.py b/tests/debugger/test_debugger_expression_language.py index 47f2c81355..2a98ce39bd 100644 --- a/tests/debugger/test_debugger_expression_language.py +++ b/tests/debugger/test_debugger_expression_language.py @@ -62,11 +62,11 @@ def _validate_expression_language_messages(self, expected_message_map): ############ test ############ ############ access variables ############ def setup_expression_language_access_variables(self): + language, method = self.get_tracer()["language"], "Expression" message_map, probes = self._create_expression_probes( - methodName="Expression", + methodName=method, expressions=[ ["Accessing input", "asd", Dsl("ref", "inputValue")], - ["Accessing return", ".*Great success number 3", Dsl("ref", "@return")], ["Accessing local", 3, Dsl("ref", "localValue")], ["Accessing complex object int", 1, Dsl("getmember", [Dsl("ref", "testStruct"), "IntValue"])], ["Accessing complex object double", 1.1, Dsl("getmember", [Dsl("ref", "testStruct"), "DoubleValue"])], @@ -90,8 +90,8 @@ def setup_expression_language_access_variables(self): 2, Dsl("index", [Dsl("getmember", [Dsl("ref", "testStruct"), "Dictionary"]), "two"]), ], - ["Accessing duration", r"\d+(\.\d+)?", Dsl("ref", "@duration")], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -100,11 +100,30 @@ def setup_expression_language_access_variables(self): def test_expression_language_access_variables(self): self._assert(expected_response=200) + def setup_expression_language_contextual_variables(self): + message_map, probes = self._create_expression_probes( + methodName="Expression", + expressions=[ + ["Accessing return", ".*Great success number 3", Dsl("ref", "@return")], + ["Accessing duration", r"\d+(\.\d+)?", Dsl("ref", "@duration")], + ], + # We only capture @return and @duration in the context of a method probe. + lines=[], + ) + + self.message_map = message_map + self._setup(probes, "/debugger/expression?inputValue=asd") + + def test_expression_language_contextual_variables(self): + self._assert(expected_response=200) + ############ access exception ############ def setup_expression_language_access_exception(self): + language, method = self.get_tracer()["language"], "ExpressionException" message_map, probes = self._create_expression_probes( - methodName="ExpressionException", + methodName=method, expressions=[["Accessing exception", ".*Hello from exception", Dsl("ref", "@exception")]], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -115,8 +134,9 @@ def test_expression_language_access_exception(self): ############ comparison operators ############ def setup_expression_language_comparison_operators(self): + language, method = self.get_tracer()["language"], "ExpressionOperators" message_map, probes = self._create_expression_probes( - methodName="ExpressionOperators", + methodName=method, expressions=[ ["intValue eq 5", True, Dsl("eq", [Dsl("ref", "intValue"), 5])], ["intValue ne 0", True, Dsl("ne", [Dsl("ref", "intValue"), 0])], @@ -166,6 +186,7 @@ def setup_expression_language_comparison_operators(self): ["strValue le a", False, Dsl("le", [Dsl("ref", "strValue"), "a"])], ["strValue ge z", False, Dsl("ge", [Dsl("ref", "strValue"), "z"])], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -176,8 +197,9 @@ def test_expression_language_comparison_operators(self): ############ intance of ############ def setup_expression_language_instance_of(self): + language, method = self.get_tracer()["language"], "ExpressionOperators" message_map, probes = self._create_expression_probes( - methodName="ExpressionOperators", + methodName=method, expressions=[ ["intValue instanceof int", True, Dsl("instanceof", [Dsl("ref", "intValue"), self._get_type("int")])], [ @@ -217,6 +239,7 @@ def setup_expression_language_instance_of(self): ], ["pii instanceof string", False, Dsl("instanceof", [Dsl("ref", "pii"), self._get_type("string")])], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -228,8 +251,9 @@ def test_expression_language_instance_of(self): ############ logical operators ############ def setup_expression_language_logical_operators(self): + language, method = self.get_tracer()["language"], "ExpressionOperators" message_map, probes = self._create_expression_probes( - methodName="ExpressionOperators", + methodName=method, expressions=[ [ "intValue eq 5 and strValue ne 5", @@ -254,6 +278,7 @@ def setup_expression_language_logical_operators(self): ], ["not intValue eq 10", False, Dsl("not", Dsl("eq", [Dsl("ref", "intValue"), 5]))], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -265,8 +290,9 @@ def test_expression_language_logical_operators(self): ############ string operations ############ def setup_expression_language_string_operations(self): + language, method = self.get_tracer()["language"], "StringOperations" message_map, probes = self._create_expression_probes( - methodName="StringOperations", + methodName=method, expressions=[ ##### isempty ["strValue isEmpty", False, Dsl("isEmpty", Dsl("ref", "strValue"))], @@ -300,6 +326,7 @@ def setup_expression_language_string_operations(self): ["emptyString matches empty", True, Dsl("matches", [Dsl("ref", "emptyString"), ""])], ["emptyString matches some", False, Dsl("matches", [Dsl("ref", "emptyString"), "foo"])], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -315,8 +342,9 @@ def test_expression_language_string_operations(self): ## all collection are filled with incremented number values (e.g at the [0] = 0; [1] = 1) def setup_expression_language_collection_operations(self): + language, method = self.get_tracer()["language"], "CollectionOperations" message_map, probes = self._create_expression_probes( - methodName="CollectionOperations", + methodName=method, expressions=[ ##### len ["Array0 len", 0, Dsl("len", Dsl("ref", "a0"))], @@ -374,6 +402,7 @@ def setup_expression_language_collection_operations(self): Dsl("len", Dsl("filter", [Dsl("ref", "l5"), Dsl("lt", [Dsl("ref", "@it"), 2])])), ], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -384,8 +413,9 @@ def test_expression_language_collection_operations(self): self._assert(expected_response=200) def setup_expression_language_hash_operations(self): + language, method = self.get_tracer()["language"], "CollectionOperations" message_map, probes = self._create_expression_probes( - methodName="CollectionOperations", + methodName=method, expressions=[ ## at the app there are 3 types of collections are created - array, list and hash. ## the number at the end of variable means the length of the collection @@ -517,6 +547,7 @@ def setup_expression_language_hash_operations(self): ), ], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -529,13 +560,15 @@ def test_expression_language_hash_operations(self): ############ nulls ############ def setup_expression_language_nulls_true(self): + language, method = self.get_tracer()["language"], "Nulls" message_map, probes = self._create_expression_probes( - methodName="Nulls", + methodName=method, expressions=[ ["intValue eq null", True, Dsl("eq", [Dsl("ref", "intValue"), None])], ["strValue eq null", True, Dsl("eq", [Dsl("ref", "strValue"), None])], ["pii eq null", True, Dsl("eq", [Dsl("ref", "pii"), None])], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -546,13 +579,15 @@ def test_expression_language_nulls_true(self): self._assert(expected_response=200) def setup_expression_language_nulls_false(self): + language, method = self.get_tracer()["language"], "Nulls" message_map, probes = self._create_expression_probes( - methodName="Nulls", + methodName=method, expressions=[ ["intValue eq null", False, Dsl("eq", [Dsl("ref", "intValue"), None])], ["strValue eq null", False, Dsl("eq", [Dsl("ref", "strValue"), None])], ["pii eq null", False, Dsl("eq", [Dsl("ref", "pii"), None])], ], + lines=self._method_and_language_to_line_number(method, language), ) self.message_map = message_map @@ -614,28 +649,53 @@ def _get_hash_value_property_name(self): else: return "value" - def _create_expression_probes(self, methodName, expressions): + def _method_and_language_to_line_number(self, method, language): + """ + _method_and_language_to_line_number returns the respective line number given the method and language + """ + return { + "Expression": {"java": [71], "dotnet": [74], "python": [72]}, + # The `@exception` variable is not available in the context of line probes. + "ExpressionException": {}, + "ExpressionOperators": {"java": [82], "dotnet": [90], "python": [87]}, + "StringOperations": {"java": [87], "dotnet": [97], "python": [96]}, + "CollectionOperations": {"java": [114], "dotnet": [114], "python": [123]}, + "Nulls": {"java": [130], "dotnet": [127], "python": [136]}, + }.get(method, {}).get(language, []) + + def _create_expression_probes(self, methodName, expressions, lines=[]): probes = [] expected_message_map = {} - - for expression in expressions: - expression_to_test, expected_result, dsl = expression - message = f"Expression to test: '{expression_to_test}'. Result is: " - - if isinstance(expected_result, bool): - expected_result = "[Tt]rue" if expected_result else "[Ff]alse" - elif isinstance(expected_result, str) and expected_result and expected_result != "": - expected_result = f"[']?{expected_result}[']?" - else: - expected_result = str(expected_result) - - probe = debugger.read_probes("expression_probe_base")[0] - probe["id"] = debugger.generate_probe_id("log") - probe["where"]["methodName"] = methodName - probe["segments"] = Segment().add_str(message).add_dsl(dsl).to_dict() - probes.append(probe) - - expected_message_map[probe["id"]] = message + expected_result + prob_types = ["method"] + if len(lines) > 0: + prob_types.append("line") + + for probe_type in prob_types: + for expression in expressions: + expression_to_test, expected_result, dsl = expression + message = f"Expression to test: '{expression_to_test}'. Result is: " + + if isinstance(expected_result, bool): + expected_result = "[Tt]rue" if expected_result else "[Ff]alse" + elif isinstance(expected_result, str) and expected_result and expected_result != "": + expected_result = f"[']?{expected_result}[']?" + else: + expected_result = str(expected_result) + + probe = debugger.read_probes("expression_probe_base")[0] + probe["id"] = debugger.generate_probe_id("log") + if probe_type == "method": + probe["where"]["methodName"] = methodName + if probe_type == "line": + del probe["where"]["methodName"] + probe["where"]["lines"] = lines + probe["where"]["sourceFile"] = "ACTUAL_SOURCE_FILE" + probe["where"]["typeName"] = None + + probe["segments"] = Segment().add_str(message).add_dsl(dsl).to_dict() + probes.append(probe) + + expected_message_map[probe["id"]] = message + expected_result return expected_message_map, probes diff --git a/tests/otel/test_context_propagation.py b/tests/otel/test_context_propagation.py new file mode 100644 index 0000000000..32d579e171 --- /dev/null +++ b/tests/otel/test_context_propagation.py @@ -0,0 +1,41 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2024 Datadog, Inc. + +import json +from utils import weblog, interfaces, scenarios, features, incomplete_test_app + + +@features.otel_propagators_api +@scenarios.apm_tracing_e2e_otel +class Test_Otel_Context_Propagation_Default_Propagator_Api: + def setup_propagation_extract(self): + extract_headers = { + "traceparent": "00-11111111111111110000000000000002-000000000000000a-01", + "tracestate": "dd=s:2;p:000000000000000a,foo=1", + "baggage": "foo=1", + } + self.r = weblog.get("/otel_drop_in_default_propagator_extract", headers=extract_headers) + + @incomplete_test_app(library="nodejs", reason="Node.js extract endpoint doesn't seem to be working.") + @incomplete_test_app(library="ruby", reason="Ruby extract seems to fail even though it should be supported") + def test_propagation_extract(self): + content = json.loads(self.r.text) + + assert content["trace_id"] == 2 + assert content["span_id"] == 10 + assert content["tracestate"] and not content["tracestate"].isspace() + # assert content["baggage"] and not content["baggage"].isspace() + + def setup_propagation_inject(self): + inject_headers = { + "baggage": "foo=2", + } + self.r = weblog.get("/otel_drop_in_default_propagator_inject") + + @incomplete_test_app(library="nodejs", reason="Node.js inject endpoint doesn't seem to be working.") + def test_propagation_inject(self): + content = json.loads(self.r.text) + + assert content["traceparent"] and not content["traceparent"].isspace() + # assert content["baggage"] and not content["baggage"].isspace() diff --git a/tests/test_distributed.py b/tests/test_distributed.py index 095d374504..ea149d2e14 100644 --- a/tests/test_distributed.py +++ b/tests/test_distributed.py @@ -28,7 +28,7 @@ def test_main(self): assert "x-datadog-trace-id" not in data["request_headers"] -@scenarios.tracing_config_nondefault +@scenarios.default @features.w3c_headers_injection_and_extraction @bug( context.library < "java@1.44.0" and context.weblog_variant == "spring-boot-3-native", @@ -43,12 +43,12 @@ class Test_Span_Links_From_Conflicting_Contexts: def setup_span_links_from_conflicting_contexts(self): extract_headers = { - "traceparent": "00-11111111111111110000000000000002-000000003ade68b1-01", - "tracestate": "dd=s:2;p:000000000000000a,foo=1", "x-datadog-parent-id": "10", "x-datadog-trace-id": "2", "x-datadog-tags": "_dd.p.tid=2222222222222222", "x-datadog-sampling-priority": "2", + "traceparent": "00-11111111111111110000000000000002-000000003ade68b1-01", + "tracestate": "dd=s:2;p:000000000000000a,foo=1", "x-b3-traceid": "11111111111111110000000000000003", "x-b3-spanid": "a2fb4a1d1a96d312", "x-b3-sampled": "0", @@ -62,24 +62,18 @@ def test_span_links_from_conflicting_contexts(self): for _, _, span in interfaces.library.get_spans(self.req, full_trace=True) if _retrieve_span_links(span) is not None and span["trace_id"] == 2 - and span["parent_id"] == 987654321 # Only fetch the trace that is related to the header extractions + and span["parent_id"] == 10 # Only fetch the trace that is related to the header extractions ] assert len(trace) == 1 span = trace[0] links = _retrieve_span_links(span) - assert len(links) == 2 + assert len(links) == 1 link1 = links[0] assert link1["trace_id"] == 2 - assert link1["span_id"] == 10 - assert link1["attributes"] == {"reason": "terminated_context", "context_headers": "datadog"} - assert link1["trace_id_high"] == 2459565876494606882 - - link2 = links[1] - assert link2["trace_id"] == 3 - assert link2["span_id"] == 11744061942159299346 - assert link2["attributes"] == {"reason": "terminated_context", "context_headers": "b3multi"} - assert link2["trace_id_high"] == 1229782938247303441 + assert link1["span_id"] == 987654321 + assert link1["attributes"] == {"reason": "terminated_context", "context_headers": "tracecontext"} + assert link1["trace_id_high"] == 1229782938247303441 """Datadog and tracecontext headers, trace-id does match, Datadog is primary context we want to make sure there's no span link since they match""" @@ -135,59 +129,7 @@ def test_no_span_links_from_invalid_trace_id(self): assert len(trace) == 0 -@scenarios.tracing_config_nondefault_2 -@features.w3c_headers_injection_and_extraction -@bug( - context.library < "java@1.44.0" and context.weblog_variant == "spring-boot-3-native", - reason="APMAPI-928", - force_skip=True, -) -class Test_Span_Links_From_Conflicting_Contexts_Datadog_Precedence: - """Verify headers containing conflicting trace context information are added as span links with Datadog headers taking precedence""" - - def setup_span_links_from_conflicting_contexts_datadog_precedence(self): - extract_headers = { - "traceparent": "00-11111111111111110000000000000001-000000003ade68b1-01", - "tracestate": "dd=s:2;t.tid:1111111111111111,foo=1", - "x-datadog-trace-id": "4", - "x-datadog-parent-id": "987654323", - "x-datadog-sampling-priority": "2", - "x-datadog-tags": "_dd.p.tid=1111111111111111", - "x-b3-traceid": "11111111111111110000000000000003", - "x-b3-spanid": "a2fb4a1d1a96d312", - "x-b3-sampled": "1", - } - - self.req = weblog.get("/make_distant_call", params={"url": "http://weblog:7777"}, headers=extract_headers) - - def test_span_links_from_conflicting_contexts_datadog_precedence(self): - trace = [ - span - for _, _, span in interfaces.library.get_spans(self.req, full_trace=True) - if _retrieve_span_links(span) is not None - and span["trace_id"] == 4 - and span["parent_id"] == 987654323 # Only fetch the trace that is related to the header extractions - ] - - assert len(trace) == 1 - span = trace[0] - links = _retrieve_span_links(span) - assert len(links) == 2 - link1 = links[0] - assert link1["trace_id"] == 1 - assert link1["span_id"] == 987654321 - assert link1["attributes"] == {"reason": "terminated_context", "context_headers": "tracecontext"} - assert link1["tracestate"] == "dd=s:2;t.tid:1111111111111111,foo=1" - assert link1["trace_id_high"] == 1229782938247303441 - - link2 = links[1] - assert link2["trace_id"] == 3 - assert link2["span_id"] == 11744061942159299346 - assert link2["attributes"] == {"reason": "terminated_context", "context_headers": "b3multi"} - assert link2["trace_id_high"] == 1229782938247303441 - - -@scenarios.tracing_config_nondefault +@scenarios.default @features.w3c_headers_injection_and_extraction @bug( context.library < "java@1.44.0" and context.weblog_variant == "spring-boot-3-native", @@ -205,9 +147,6 @@ def setup_span_links_flags_from_conflicting_contexts(self): "x-datadog-trace-id": "2", "x-datadog-tags": "_dd.p.tid=2222222222222222", "x-datadog-sampling-priority": "2", - "x-b3-traceid": "11111111111111110000000000000003", - "x-b3-spanid": "a2fb4a1d1a96d312", - "x-b3-sampled": "0", } self.req = weblog.get("/make_distant_call", params={"url": "http://weblog:7777"}, headers=extract_headers) @@ -231,11 +170,8 @@ def test_span_links_flags_from_conflicting_contexts(self): link1 = span_links[0] assert link1["flags"] == 1 | TRACECONTEXT_FLAGS_SET - link2 = span_links[1] - assert link2["flags"] == 0 | TRACECONTEXT_FLAGS_SET - -@scenarios.tracing_config_nondefault +@scenarios.default @features.w3c_headers_injection_and_extraction @bug( context.library < "java@1.44.0" and context.weblog_variant == "spring-boot-3-native", @@ -249,10 +185,9 @@ def setup_span_links_omit_tracestate_from_conflicting_contexts(self): extract_headers = { "traceparent": "00-11111111111111110000000000000002-000000003ade68b1-01", "tracestate": "dd=s:2;p:000000000000000a,foo=1", - "x-datadog-parent-id": "10", - "x-datadog-trace-id": "2", - "x-datadog-tags": "_dd.p.tid=2222222222222222", - "x-datadog-sampling-priority": "2", + "x-b3-traceid": "22222222222222220000000000000002", + "x-b3-spanid": "000000000000000a", + "x-b3-sampled": "1", } self.req = weblog.get("/make_distant_call", params={"url": "http://weblog:7777"}, headers=extract_headers) diff --git a/tests/test_library_conf.py b/tests/test_library_conf.py index 95a1211103..e2d112bf3e 100644 --- a/tests/test_library_conf.py +++ b/tests/test_library_conf.py @@ -2,10 +2,11 @@ # This product includes software developed at Datadog (https://www.datadoghq.com/). # Copyright 2021 Datadog, Inc. -from utils import weblog, interfaces, scenarios, features +from utils import weblog, interfaces, scenarios, features, missing_feature from utils._context.header_tag_vars import * from utils import remote_config as rc import json +import pprint # basic / legacy tests, just tests user-agent can be received as a tag @@ -250,3 +251,424 @@ def get_rc_params(self, header_tags): } id = hash(json.dumps(config)) return f"datadog/2/APM_TRACING/{id}/config", config + + +# The Datadog specific tracecontext flags to mark flags are set +TRACECONTEXT_FLAGS_SET = 1 << 31 + + +def retrieve_span_links(span): + if span.get("spanLinks") is not None: + return span["spanLinks"] + + if span["meta"].get("_dd.span_links") is None: + return None + + # Convert span_links tags into msgpack v0.4 format + json_links = json.loads(span["meta"].get("_dd.span_links")) + links = [] + for json_link in json_links: + link = {} + link["traceID"] = int(json_link["trace_id"][-16:], base=16) + link["spanID"] = int(json_link["span_id"], base=16) + if len(json_link["trace_id"]) > 16: + link["traceIDHigh"] = int(json_link["trace_id"][:16], base=16) + if "attributes" in json_link: + link["attributes"] = json_link.get("attributes") + if "tracestate" in json_link: + link["tracestate"] = json_link.get("tracestate") + elif "trace_state" in json_link: + link["tracestate"] = json_link.get("trace_state") + if "flags" in json_link: + link["flags"] = json_link.get("flags") | TRACECONTEXT_FLAGS_SET + else: + link["flags"] = 0 + links.append(link) + return links + + +@scenarios.default +@features.context_propagation_extract_behavior +class Test_ExtractBehavior_Default: + def setup_single_tracecontext(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-11111111111111110000000000000001-0000000000000001-01", + "tracestate": "dd=s:2;t.dm:-4,foo=1", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_single_tracecontext(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert span.get("traceID") == "1" + assert span.get("parentID") == "1" + assert retrieve_span_links(span) is None + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] == "1" + assert "_dd.p.tid=1111111111111111" in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] + + def setup_multiple_tracecontexts(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "2", + "x-datadog-parent-id": "2", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-12345678901234567890123456789012-1234567890123456-01", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_multiple_tracecontexts(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert span.get("traceID") == "2" + assert span.get("parentID") == "2" + + # Test the extracted span links: One span link per conflicting trace context + span_links = retrieve_span_links(span) + assert len(span_links) == 1 + + # Assert the W3C Trace Context (conflicting trace context) span link + link = span_links[0] + assert int(link["traceID"]) == 8687463697196027922 # int(0x7890123456789012) + assert int(link["spanID"]) == 1311768467284833366 # int (0x1234567890123456) + assert int(link["traceIDHigh"]) == 1311768467284833366 # int(0x1234567890123456) + assert link["attributes"] == {"reason": "terminated_context", "context_headers": "tracecontext"} + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] == "2" + assert "_dd.p.tid=1111111111111111" in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] + + +@scenarios.tracing_config_nondefault +@features.context_propagation_extract_behavior +class Test_ExtractBehavior_Restart: + def setup_single_tracecontext(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-11111111111111110000000000000001-0000000000000001-01", + "tracestate": "dd=s:2;t.dm:-4,foo=1", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_single_tracecontext(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert span.get("traceID") != "1" + assert span.get("parentID") is None + + # Test the extracted span links: One span link for the incoming (Datadog trace context). + # In the case that span links are generated for conflicting trace contexts, those span links + # are not included in the new trace context + span_links = retrieve_span_links(span) + assert len(span_links) == 1 + + # Assert the Datadog (restarted) span link + link = span_links[0] + assert int(link["traceID"]) == 1 + assert int(link["spanID"]) == 1 + assert int(link["traceIDHigh"]) == 1229782938247303441 + assert link["attributes"] == {"reason": "propagation_behavior_extract", "context_headers": "datadog"} + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "1" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] + + def setup_multiple_tracecontexts(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-12345678901234567890123456789012-1234567890123456-01", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_multiple_tracecontexts(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert ( + span.get("traceID") != "1" and span.get("traceID") != "8687463697196027922" # Lower 64-bits of traceparent + ) + assert span.get("parentID") is None + + # Test the extracted span links: One span link for the incoming (Datadog trace context). + # In the case that span links are generated for conflicting trace contexts, those span links + # are not included in the new trace context + span_links = retrieve_span_links(span) + assert len(span_links) == 1 + + # Assert the Datadog (restarted) span link + link = span_links[0] + assert int(link["traceID"]) == 1 + assert int(link["spanID"]) == 1 + assert int(link["traceIDHigh"]) == 1229782938247303441 + assert link["attributes"] == {"reason": "propagation_behavior_extract", "context_headers": "datadog"} + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "1" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] + + +@scenarios.tracing_config_nondefault_2 +@features.context_propagation_extract_behavior +class Test_ExtractBehavior_Ignore: + def setup_single_tracecontext(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-11111111111111110000000000000001-0000000000000001-01", + "tracestate": "dd=s:1;t.dm:-4,foo=1", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_single_tracecontext(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the local span context + span = spans[0] + assert span.get("traceID") != "1" + assert span.get("parentID") is None + assert retrieve_span_links(span) is None + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "1" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "baggage" not in data["request_headers"] + + def setup_multiple_tracecontexts(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "2", + "x-datadog-parent-id": "2", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-12345678901234567890123456789012-1234567890123456-01", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_multiple_tracecontexts(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the local span context + span = spans[0] + assert ( + span.get("traceID") != "1" and span.get("traceID") != "8687463697196027922" # Lower 64-bits of traceparent + ) + assert span.get("parentID") is None + assert retrieve_span_links(span) is None + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "2" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "baggage" not in data["request_headers"] + + +@scenarios.tracing_config_nondefault_3 +@features.context_propagation_extract_behavior +class Test_ExtractBehavior_Restart_With_Extract_First: + def setup_single_tracecontext(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-11111111111111110000000000000001-0000000000000001-01", + "tracestate": "dd=s:2;t.dm:-4,foo=1", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_single_tracecontext(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert span.get("traceID") != "1" + assert span.get("parentID") is None + + # Test the extracted span links: One span link for the incoming (Datadog trace context). + # In the case that span links are generated for conflicting trace contexts, those span links + # are not included in the new trace context + span_links = retrieve_span_links(span) + assert len(span_links) == 1 + + # Assert the Datadog (restarted) span link + link = span_links[0] + assert int(link["traceID"]) == 1 + assert int(link["spanID"]) == 1 + assert int(link["traceIDHigh"]) == 1229782938247303441 + assert link["attributes"] == {"reason": "propagation_behavior_extract", "context_headers": "datadog"} + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "1" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] + + def setup_multiple_tracecontexts(self): + self.r = weblog.get( + "/make_distant_call", + params={"url": "http://weblog:7777/"}, + headers={ + "x-datadog-trace-id": "1", + "x-datadog-parent-id": "1", + "x-datadog-sampling-priority": "2", + "x-datadog-tags": "_dd.p.tid=1111111111111111,_dd.p.dm=-4", + "traceparent": "00-12345678901234567890123456789012-1234567890123456-01", + "baggage": "key1=value1", + }, + ) + + @missing_feature( + library="cpp", + reason="baggage is not implemented, also remove DD_TRACE_PROPAGATION_STYLE_EXTRACT workaround in containers.py", + ) + def test_multiple_tracecontexts(self): + interfaces.library.assert_trace_exists(self.r) + spans = interfaces.agent.get_spans_list(self.r) + assert len(spans) == 1, "Agent received the incorrect amount of spans" + + # Test the extracted span context + span = spans[0] + assert ( + span.get("traceID") != "1" and span.get("traceID") != "8687463697196027922" # Lower 64-bits of traceparent + ) + assert span.get("parentID") is None + + # Test the extracted span links: One span link for the incoming (Datadog trace context). + # In the case that span links are generated for conflicting trace contexts, those span links + # are not included in the new trace context + span_links = retrieve_span_links(span) + assert len(span_links) == 1 + + # Assert the Datadog (restarted) span link + link = span_links[0] + assert int(link["traceID"]) == 1 + assert int(link["spanID"]) == 1 + assert int(link["traceIDHigh"]) == 1229782938247303441 + + # Test the next outbound span context + assert self.r.status_code == 200 + data = json.loads(self.r.text) + assert data is not None + + assert data["request_headers"]["x-datadog-trace-id"] != "1" + assert "_dd.p.tid=1111111111111111" not in data["request_headers"]["x-datadog-tags"] + assert "key1=value1" in data["request_headers"]["baggage"] diff --git a/tests/test_sampling_rates.py b/tests/test_sampling_rates.py index dc3624cd98..bb7628fef7 100644 --- a/tests/test_sampling_rates.py +++ b/tests/test_sampling_rates.py @@ -66,6 +66,7 @@ def setup_sampling_rates(self): @flaky(context.weblog_variant == "spring-boot-3-native", reason="APMAPI-736") @flaky(library="golang", reason="APMAPI-736") @flaky(library="ruby", reason="APMAPI-736") + @flaky(library="nodejs", reason="APMAPI-1120") def test_sampling_rates(self): """Basic test""" interfaces.library.assert_all_traces_requests_forwarded(self.paths) diff --git a/tests/test_semantic_conventions.py b/tests/test_semantic_conventions.py index b18aad50ba..f296bb5936 100644 --- a/tests/test_semantic_conventions.py +++ b/tests/test_semantic_conventions.py @@ -36,6 +36,7 @@ "graphql-go": "graphql-go/graphql", "jersey-grizzly2": {"jakarta-rs.request": "jakarta-rs-controller", "grizzly.request": ["grizzly", "jakarta-rs"]}, "net-http": "net/http", + "net-http-orchestrion": "net/http", "sinatra": {"rack.request": "rack"}, "spring-boot": { "servlet.request": "tomcat-server", diff --git a/tests/test_standard_tags.py b/tests/test_standard_tags.py index b085a147ac..6b286a72b5 100644 --- a/tests/test_standard_tags.py +++ b/tests/test_standard_tags.py @@ -209,7 +209,7 @@ def test_route(self): if context.library == "nodejs": tags["http.route"] = "/sample_rate_route/:i" if context.library == "golang": - if context.weblog_variant == "net-http": + if "net-http" in context.weblog_variant: # net/http doesn't support parametrized routes but a path catches anything down the tree. tags["http.route"] = "/sample_rate_route/" if context.weblog_variant in ("gin", "echo", "uds-echo"): diff --git a/utils/_context/_scenarios/__init__.py b/utils/_context/_scenarios/__init__.py index f90cb0ba82..62af784157 100644 --- a/utils/_context/_scenarios/__init__.py +++ b/utils/_context/_scenarios/__init__.py @@ -525,7 +525,8 @@ class _Scenarios: "DD_TRACE_KAFKA_ENABLED": "false", # most common endpoint and integration (missing for PHP). "DD_TRACE_KAFKAJS_ENABLED": "false", # In Node the integration is kafkajs. "DD_TRACE_PDO_ENABLED": "false", # Use PDO for PHP, - "DD_TRACE_PROPAGATION_STYLE_EXTRACT": "tracecontext,datadog,b3multi", + "DD_TRACE_PROPAGATION_STYLE_EXTRACT": "datadog,tracecontext,b3multi,baggage", + "DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT": "restart", "DD_LOGS_INJECTION": "true", }, appsec_enabled=False, # disable ASM to test non asm client ip tagging @@ -545,7 +546,8 @@ class _Scenarios: "DD_TRACE_PDO_ENABLED": "true", # Use PDO for PHP "DD_TRACE_CLIENT_IP_HEADER": "custom-ip-header", "DD_TRACE_CLIENT_IP_ENABLED": "true", - "DD_TRACE_PROPAGATION_STYLE_EXTRACT": "datadog,tracecontext,b3multi", + "DD_TRACE_PROPAGATION_STYLE_EXTRACT": "datadog,tracecontext,b3multi,baggage", + "DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT": "ignore", }, include_kafka=True, include_postgres_db=True, @@ -557,6 +559,9 @@ class _Scenarios: weblog_env={ "DD_TRACE_HTTP_CLIENT_TAG_QUERY_STRING": "false", "DD_TRACE_CLIENT_IP_HEADER": "custom-ip-header", + "DD_TRACE_PROPAGATION_STYLE_EXTRACT": "datadog,tracecontext,b3multi,baggage", + "DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT": "restart", + "DD_TRACE_PROPAGATION_EXTRACT_FIRST": "true", "DD_LOGS_INJECTION": "true", "DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED": "false", }, diff --git a/utils/_context/_scenarios/auto_injection.py b/utils/_context/_scenarios/auto_injection.py index ed366cc085..843b681490 100644 --- a/utils/_context/_scenarios/auto_injection.py +++ b/utils/_context/_scenarios/auto_injection.py @@ -1,6 +1,7 @@ -import os -import json import copy +import json +import os +from pathlib import Path from utils._context.library_version import LibraryVersion from utils.tools import logger from utils.virtual_machine.utils import get_tested_apps_vms, generate_gitlab_pipeline @@ -275,10 +276,10 @@ def _check_test_environment(self): base_folder = "utils/build/virtual_machine" weblog_provision_file = f"{base_folder}/weblogs/{self._library.library}/provision_{self._weblog}.yml" - assert os.path.isfile(weblog_provision_file), f"Weblog Provision file not found: {weblog_provision_file}" + assert Path(weblog_provision_file).is_file(), f"Weblog Provision file not found: {weblog_provision_file}" provision_file = f"{base_folder}/provisions/{self.vm_provision_name}/provision.yml" - assert os.path.isfile(provision_file), f"Provision file not found: {provision_file}" + assert Path(provision_file).is_file(), f"Provision file not found: {provision_file}" assert os.getenv("DD_API_KEY_ONBOARDING") is not None, "DD_API_KEY_ONBOARDING is not set" assert os.getenv("DD_APP_KEY_ONBOARDING") is not None, "DD_APP_KEY_ONBOARDING is not set" diff --git a/utils/_context/_scenarios/core.py b/utils/_context/_scenarios/core.py index 8386fe63c3..435f5afc90 100644 --- a/utils/_context/_scenarios/core.py +++ b/utils/_context/_scenarios/core.py @@ -19,7 +19,6 @@ class ScenarioGroup(Enum): IPV6 = "ipv6" LIB_INJECTION = "lib-injection" OPEN_TELEMETRY = "open-telemetry" - PARAMETRIC = "parametric" PROFILING = "profiling" SAMPLING = "sampling" ONBOARDING = "onboarding" @@ -51,7 +50,7 @@ def __init__(self, name, github_workflow, doc, scenario_groups=None) -> None: self.replay = False self.doc = doc self.rc_api_enabled = False - self.github_workflow = github_workflow + self.github_workflow = github_workflow # TODO: rename this to workflow, as it may not be a github workflow self.scenario_groups = scenario_groups or [] self.scenario_groups = list(set(self.scenario_groups)) # removes duplicates diff --git a/utils/_context/_scenarios/parametric.py b/utils/_context/_scenarios/parametric.py index 1d780e3c65..d834896ad5 100644 --- a/utils/_context/_scenarios/parametric.py +++ b/utils/_context/_scenarios/parametric.py @@ -120,7 +120,7 @@ def __init__(self, name, doc) -> None: name, doc=doc, github_workflow="parametric", - scenario_groups=[ScenarioGroup.ALL, ScenarioGroup.PARAMETRIC, ScenarioGroup.PARAMETRIC], + scenario_groups=[ScenarioGroup.ALL], ) self._parametric_tests_confs = ParametricScenario.PersistentParametricTestConf(self) @@ -215,7 +215,7 @@ def _build_apm_test_server_image(self) -> str: apm_test_server_definition: APMLibraryTestServer = self.apm_test_server_definition log_path = f"{self.host_log_folder}/outputs/docker_build_log.log" - Path.mkdir(os.path.dirname(log_path), exist_ok=True, parents=True) + Path.mkdir(Path(log_path).parent, exist_ok=True, parents=True) # Write dockerfile to the build directory # Note that this needs to be done as the context cannot be @@ -378,7 +378,7 @@ def node_library_factory() -> APMLibraryTestServer: with open("./binaries/nodejs-load-from-local", encoding="utf-8") as f: path = f.read().strip(" \r\n") source = os.path.join(_get_base_directory(), path) - volumes[os.path.abspath(source)] = "/volumes/dd-trace-js" + volumes[Path(source).resolve()] = "/volumes/dd-trace-js" except FileNotFoundError: logger.info("No local dd-trace-js found, do not mount any volume") diff --git a/utils/_context/containers.py b/utils/_context/containers.py index 08316ecf7c..c17a4aee0e 100644 --- a/utils/_context/containers.py +++ b/utils/_context/containers.py @@ -822,11 +822,28 @@ def configure(self, replay): self.appsec_rules_file = (self.image.env | self.environment).get("DD_APPSEC_RULES", None) + # Workaround: Once the dd-trace-go fix is merged that avoids a go panic for + # DD_TRACE_PROPAGATION_EXTRACT_FIRST=true when context propagation fails, + # we can remove the DD_TRACE_PROPAGATION_EXTRACT_FIRST=false override + if library == "golang": + self.environment["DD_TRACE_PROPAGATION_EXTRACT_FIRST"] = "false" + + # Workaround: We may want to define baggage in our list of propagators, but the cpp library + # has strict checks on tracer startup that will fail to launch the application + # when it encounters unfamiliar configurations. Override the configuration that the cpp + # weblog container sees so we can still run tests + if library == "cpp": + extract_config = self.environment.get("DD_TRACE_PROPAGATION_STYLE_EXTRACT") + if extract_config and "baggage" in extract_config: + self.environment["DD_TRACE_PROPAGATION_STYLE_EXTRACT"] = extract_config.replace("baggage", "").strip( + "," + ) + if library == "nodejs": try: with open("./binaries/nodejs-load-from-local", encoding="utf-8") as f: path = f.read().strip(" \r\n") - self.kwargs["volumes"][os.path.abspath(path)] = { + self.kwargs["volumes"][Path(path).resolve()] = { "bind": "/volumes/dd-trace-js", "mode": "ro", } @@ -1073,7 +1090,7 @@ def start(self, network: Network) -> Container: # _otel_config_host_path is mounted in the container, and depending on umask, # it might have no read permissions for other users, which is required within # the container. So set them here. - prev_mode = os.stat(self._otel_config_host_path).st_mode + prev_mode = Path(self._otel_config_host_path).stat().st_mode new_mode = prev_mode | stat.S_IROTH if prev_mode != new_mode: Path(self._otel_config_host_path).chmod(new_mode) diff --git a/utils/_context/virtual_machines.py b/utils/_context/virtual_machines.py index d0d1c72c6f..35084db399 100644 --- a/utils/_context/virtual_machines.py +++ b/utils/_context/virtual_machines.py @@ -146,7 +146,7 @@ def get_ip(self): def _load_runtime_from_logs(self): """Load the runtime version from the test_components.log""" vms_tested_components_file = f"{context.scenario.host_log_folder}/tested_components.log" - if os.path.isfile(vms_tested_components_file): + if Path(vms_tested_components_file).is_file(): # Get the machine ip machine_ip = self.get_ip() # Read the file line by line looking for line with the ip @@ -168,7 +168,7 @@ def _load_ip_from_logs(self): """Load the ip address from the logs""" vms_desc_file = f"{context.scenario.host_log_folder}/vms_desc.log" logger.info(f"Loading ip for {self.name} from {vms_desc_file}") - if os.path.isfile(vms_desc_file): + if Path(vms_desc_file).is_file(): with open(vms_desc_file) as f: for line in f: if self.name in line: @@ -178,7 +178,7 @@ def _load_ip_from_logs(self): def get_log_folder(self): vm_folder = f"{context.scenario.host_log_folder}/{self.name}" - if not os.path.exists(vm_folder): + if not Path(vm_folder).exists(): Path.mkdir(vm_folder) return vm_folder diff --git a/utils/_features.py b/utils/_features.py index c165472189..ff70aaca8b 100644 --- a/utils/_features.py +++ b/utils/_features.py @@ -2345,6 +2345,15 @@ def envoy_external_processing(test_object): pytest.mark.features(feature_id=350)(test_object) return test_object + @staticmethod + def context_propagation_extract_behavior(test_object): + """Context Propagation: Extraction behavior can be configured at the service level + + https://feature-parity.us1.prod.dog/#/?feature=353 + """ + pytest.mark.features(feature_id=343)(test_object) + return test_object + @staticmethod def iast_sink_email_html_injection(test_object): """IAST Sink: Email HTML injection @@ -2372,5 +2381,14 @@ def debugger_code_origins(test_object): pytest.mark.features(feature_id=360)(test_object) return test_object + @staticmethod + def otel_propagators_api(test_object): + """OpenTelemetry Propagators API + + https://feature-parity.us1.prod.dog/#/?feature=361 + """ + pytest.mark.features(feature_id=361)(test_object) + return test_object + features = _Features() diff --git a/utils/build/build.sh b/utils/build/build.sh index 86ee5c3d66..e71c9be53b 100755 --- a/utils/build/build.sh +++ b/utils/build/build.sh @@ -155,9 +155,11 @@ build() { fi fi source venv/bin/activate - python -m pip install --upgrade pip wheel + python -m pip install --upgrade pip setuptools==75.8.0 fi - pip install -r requirements.txt + python -m pip install -e . + cp requirements.txt venv/requirements.txt + elif [[ $IMAGE_NAME == runner ]] && [[ $DOCKER_MODE == 1 ]]; then docker buildx build \ diff --git a/utils/build/docker/dotnet/weblog/Controllers/LoginController.cs b/utils/build/docker/dotnet/weblog/Controllers/AccountController.cs similarity index 57% rename from utils/build/docker/dotnet/weblog/Controllers/LoginController.cs rename to utils/build/docker/dotnet/weblog/Controllers/AccountController.cs index afc4601533..3abc4b7fb1 100644 --- a/utils/build/docker/dotnet/weblog/Controllers/LoginController.cs +++ b/utils/build/docker/dotnet/weblog/Controllers/AccountController.cs @@ -1,113 +1,155 @@ -#if DDTRACE_2_23_0_OR_GREATER - -#nullable enable -using System; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Amazon.SecurityToken.Model; -using Datadog.Trace.AppSec; -using Microsoft.AspNetCore.Identity; -using Microsoft.AspNetCore.Mvc; -using weblog.Models; - -namespace weblog; - -[Route("login")] -public class LoginController : Controller -{ - private readonly SignInManager _signInManager; - private readonly UserManager _userManager; - private readonly IUserStore _userStore; - - public LoginController(SignInManager signInManager, UserManager userManager, - IUserStore userStore) - { - _signInManager = signInManager; - _userManager = userManager; - _userStore = userStore; - } - - [HttpGet("")] - public async Task IndexGet([FromQuery] LoginModel? loginQuery) - { - if (loginQuery?.Auth == "basic") - { - if (loginQuery is { SdkEvent: "success" }) - { - EventTrackingSdk.TrackUserLoginSuccessEvent(loginQuery.SdkUser); - } - else if (loginQuery is { SdkEvent: "failure" }) - { - EventTrackingSdk.TrackUserLoginFailureEvent(loginQuery.SdkUser, loginQuery.SdkUserExists ?? false); - } - - var authorizationHeader = this.Request.Headers["Authorization"][0]; - - if(authorizationHeader is not null) - { - var authBase64Decoded = Encoding.UTF8.GetString( - Convert.FromBase64String(authorizationHeader.Replace("Basic ", "", - StringComparison.OrdinalIgnoreCase))); - var authSplit = authBase64Decoded.Split(new[] { ':' }, 2); - var result = await _signInManager.PasswordSignInAsync(authSplit[0], authSplit[1], false, lockoutOnFailure: false); - if (result.Succeeded) - { - return Content("Successfully login as " + authSplit[0]); - } - } - - Response.StatusCode = 401; - return Content("Invalid login attempt"); - } - - if (User.Identity?.IsAuthenticated == true) - { - return Content($"Logged in as{User.Identity.Name}"); - } - - return Content("Logged in"); - } - - [HttpPost] - public async Task Index(LoginModel model) - { - if (ModelState.IsValid) - { - if (model is { SdkEvent: "success" }) - { - EventTrackingSdk.TrackUserLoginSuccessEvent(model.SdkUser); - } - else if (model is { SdkEvent: "failure" }) - { - EventTrackingSdk.TrackUserLoginFailureEvent(model.SdkUser, model.SdkUserExists ?? false); - } - - if(model is { UserName: not null, Password: not null }) - { - // This doesn't count login failures towards account lockout - // To enable password failures to trigger account lockout, set lockoutOnFailure: true - var result = await _signInManager.PasswordSignInAsync(model.UserName, model.Password, false, - lockoutOnFailure: false); - if (result.Succeeded) - { - return Content("Successfully login as " + model.UserName); - } - } - - Response.StatusCode = 401; - return Content("Invalid login attempt"); - } - - // If we got this far, something failed, redisplay form - return RedirectToAction(nameof(Index)); - } - - [HttpPost("logout")] - public IActionResult LogOut() - { - _signInManager.SignOutAsync(); - return RedirectToAction(nameof(Index)); - } -} -#endif +#if DDTRACE_2_23_0_OR_GREATER + +#nullable enable +using System; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Amazon.SecurityToken.Model; +using Datadog.Trace.AppSec; +using Microsoft.AspNetCore.Identity; +using Microsoft.AspNetCore.Mvc; +using weblog.Models; + +namespace weblog; + +public class AccountController : Controller +{ + private readonly SignInManager _signInManager; + private readonly UserManager _userManager; + private readonly IUserStore _userStore; + + /// + public AccountController(SignInManager signInManager, UserManager userManager, + IUserStore userStore) + { + _signInManager = signInManager; + _userManager = userManager; + _userStore = userStore; + } + + [HttpGet("login")] + public async Task IndexGet([FromQuery] LoginModel? loginQuery) + { + if (loginQuery?.Auth == "basic") + { + if (loginQuery is { SdkEvent: "success" }) + { + EventTrackingSdk.TrackUserLoginSuccessEvent(loginQuery.SdkUser); + } + else if (loginQuery is { SdkEvent: "failure" }) + { + EventTrackingSdk.TrackUserLoginFailureEvent(loginQuery.SdkUser, loginQuery.SdkUserExists ?? false); + } + + var authorizationHeader = this.Request.Headers["Authorization"][0]; + + if (authorizationHeader is not null) + { + var authBase64Decoded = Encoding.UTF8.GetString( + Convert.FromBase64String(authorizationHeader.Replace("Basic ", "", + StringComparison.OrdinalIgnoreCase))); + var authSplit = authBase64Decoded.Split(new[] { ':' }, 2); + var result = + await _signInManager.PasswordSignInAsync(authSplit[0], authSplit[1], false, + lockoutOnFailure: false); + if (result.Succeeded) + { + return Content("Successfully login as " + authSplit[0]); + } + } + + Response.StatusCode = 401; + return Content("Invalid login attempt"); + } + + if (User.Identity?.IsAuthenticated == true) + { + return Content($"Logged in as{User.Identity.Name}"); + } + + return Content("Logged in"); + } + + [HttpPost("login")] + public async Task Index(LoginModel model) + { + if (ModelState.IsValid) + { + if (string.IsNullOrEmpty(model.SdkTrigger) || model.SdkTrigger == "before") + { + if (model is { SdkEvent: "success" }) + { + EventTrackingSdk.TrackUserLoginSuccessEvent(model.SdkUser); + } + else if (model is { SdkEvent: "failure" }) + { + EventTrackingSdk.TrackUserLoginFailureEvent(model.SdkUser, model.SdkUserExists ?? false); + } + } + + if (model is { UserName: not null, Password: not null }) + { + // This doesn't count login failures towards account lockout + // To enable password failures to trigger account lockout, set lockoutOnFailure: true + var result = await _signInManager.PasswordSignInAsync(model.UserName, model.Password, false, + lockoutOnFailure: false); + + if (model.SdkTrigger == "after") + { + if (model is { SdkEvent: "success" }) + { + EventTrackingSdk.TrackUserLoginSuccessEvent(model.SdkUser); + } + else if (model is { SdkEvent: "failure" }) + { + EventTrackingSdk.TrackUserLoginFailureEvent(model.SdkUser, model.SdkUserExists ?? false); + } + } + + if (result.Succeeded) + { + return Content("Successfully login as " + model.UserName); + } + } + + Response.StatusCode = 401; + return Content("Invalid login attempt"); + } + + // If we got this far, something failed, redisplay form + return RedirectToAction(nameof(Index)); + } + + [HttpPost("signup")] + public async Task Signup(LoginModel model) + { + if (ModelState.IsValid) + { + if (model is { UserName: not null, Password: not null }) + { + var user = new IdentityUser { UserName = model.UserName, Id = "new-user" }; + var result = await _userManager.CreateAsync(user, model.Password); + + if (result.Succeeded) + { + return Content("Successfully registered as " + model.UserName); + } + } + + Response.StatusCode = 401; + return Content("Invalid signup attempt"); + } + + // If we got this far, something failed, redisplay form + return RedirectToAction(nameof(Index)); + } + + [HttpPost("logout")] + public IActionResult LogOut() + { + _signInManager.SignOutAsync(); + return RedirectToAction(nameof(Index)); + } +} +#endif \ No newline at end of file diff --git a/utils/build/docker/dotnet/weblog/Controllers/SessionController.cs b/utils/build/docker/dotnet/weblog/Controllers/SessionController.cs index a38b7a49ec..8842ae7e04 100644 --- a/utils/build/docker/dotnet/weblog/Controllers/SessionController.cs +++ b/utils/build/docker/dotnet/weblog/Controllers/SessionController.cs @@ -1,4 +1,5 @@ #if DDTRACE_2_7_0_OR_GREATER +using System; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.Formatters; using Microsoft.AspNetCore.Mvc.ModelBinding; @@ -17,7 +18,8 @@ public class SessionController : Controller [HttpGet("new")] public IActionResult New() { - return Content($"Session created"); + HttpContext.Session.Set(Guid.NewGuid().ToString(), [1, 2, 3, 4, 5]); + return Content(HttpContext.Session.Id); } [HttpGet("user")] diff --git a/utils/build/docker/dotnet/weblog/Endpoints/MakeDistantCallEndpoint.cs b/utils/build/docker/dotnet/weblog/Endpoints/MakeDistantCallEndpoint.cs index 69e32ab005..a1c459ca4f 100644 --- a/utils/build/docker/dotnet/weblog/Endpoints/MakeDistantCallEndpoint.cs +++ b/utils/build/docker/dotnet/weblog/Endpoints/MakeDistantCallEndpoint.cs @@ -1,6 +1,7 @@ using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Http; using System.Collections.Generic; +using System.Linq; using System.Text.Json.Serialization; namespace weblog @@ -28,9 +29,9 @@ private class EndpointResponse [JsonPropertyName("status_code")] public int StatusCode { get; set; } [JsonPropertyName("request_headers")] - public IEnumerable>>? RequestHeaders { get; set; } + public Dictionary? RequestHeaders { get; set; } [JsonPropertyName("response_headers")] - public IEnumerable>>? ResponseHeaders { get; set; } + public Dictionary? ResponseHeaders { get; set; } } public void Register(Microsoft.AspNetCore.Routing.IEndpointRouteBuilder routeBuilder) @@ -53,8 +54,8 @@ public void Register(Microsoft.AspNetCore.Routing.IEndpointRouteBuilder routeBui { Url = parameters.Url, StatusCode = (int)response.StatusCode, - RequestHeaders = response.RequestMessage?.Headers, - ResponseHeaders = response.Headers, + RequestHeaders = response.RequestMessage?.Headers.Select(kvp => new KeyValuePair(kvp.Key, kvp.Value.First())).ToDictionary(), + ResponseHeaders = response.Headers.Select(kvp => new KeyValuePair(kvp.Key, kvp.Value.First())).ToDictionary(), }; await context.Response.WriteAsJsonAsync(endpointResponse); diff --git a/utils/build/docker/dotnet/weblog/Endpoints/OtelDropInEndpoint.cs b/utils/build/docker/dotnet/weblog/Endpoints/OtelDropInEndpoint.cs new file mode 100644 index 0000000000..a26b950ec0 --- /dev/null +++ b/utils/build/docker/dotnet/weblog/Endpoints/OtelDropInEndpoint.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text.Json; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using OpenTelemetry; +using OpenTelemetry.Context.Propagation; + +namespace weblog +{ + public class OtelDropInEndpoint : ISystemTestEndpoint + { + public void Register(Microsoft.AspNetCore.Routing.IEndpointRouteBuilder routeBuilder) + { + routeBuilder.MapGet("/otel_drop_in_default_propagator_extract", async context => + { + var parentContext = OpenTelemetryInstrumentation.Propagator.Extract(default, context.Request.Headers, (carrier, key) => + { + return carrier.TryGetValue(key, out var value) && value.Count >= 1 ? new[] { value[0] } : null; + }); + + var ddTraceId = Convert.ToUInt64(parentContext.ActivityContext.TraceId.ToHexString().Substring(16), 16); + var ddSpanId = Convert.ToUInt64(parentContext.ActivityContext.SpanId.ToHexString(), 16); + + var data = new + { + trace_id = ddTraceId, + span_id = ddSpanId, + tracestate = parentContext.ActivityContext.TraceState, + baggage = parentContext.Baggage + }; + + await context.Response.WriteAsync(JsonSerializer.Serialize(data)); + }); + + routeBuilder.MapGet("/otel_drop_in_default_propagator_inject", async context => + { + var headersDict = new Dictionary(); + OpenTelemetryInstrumentation.Propagator.Inject(new PropagationContext(Activity.Current.Context, Baggage.Current), headersDict, (carrier, key, value) => + { + carrier[key] = value; + }); + + await context.Response.WriteAsync(JsonSerializer.Serialize(headersDict)); + }); + } + } +} diff --git a/utils/build/docker/dotnet/weblog/Models/LoginModel.cs b/utils/build/docker/dotnet/weblog/Models/LoginModel.cs index b10b82cde7..7bf1e00ff3 100644 --- a/utils/build/docker/dotnet/weblog/Models/LoginModel.cs +++ b/utils/build/docker/dotnet/weblog/Models/LoginModel.cs @@ -20,4 +20,6 @@ public class LoginModel [FromQuery(Name = "sdk_user")] public string? SdkUser { get; set; } [FromQuery(Name = "sdk_user_exists")] public bool? SdkUserExists { get; set; } + + [FromQuery(Name = "sdk_trigger")] public string? SdkTrigger { get; set; } } diff --git a/utils/build/docker/dotnet/weblog/OpenTelemetryInstrumentation.cs b/utils/build/docker/dotnet/weblog/OpenTelemetryInstrumentation.cs new file mode 100644 index 0000000000..a2797162b5 --- /dev/null +++ b/utils/build/docker/dotnet/weblog/OpenTelemetryInstrumentation.cs @@ -0,0 +1,10 @@ +using System.Diagnostics; +using OpenTelemetry.Context.Propagation; + +namespace weblog +{ + public static class OpenTelemetryInstrumentation + { + public static TextMapPropagator Propagator { get; } = Propagators.DefaultTextMapPropagator; + } +} diff --git a/utils/build/docker/dotnet/weblog/app.csproj b/utils/build/docker/dotnet/weblog/app.csproj index b17f904def..1e5f4cd0c7 100644 --- a/utils/build/docker/dotnet/weblog/app.csproj +++ b/utils/build/docker/dotnet/weblog/app.csproj @@ -44,5 +44,6 @@ + diff --git a/utils/build/docker/golang/app/internal/common/http_client_default.go b/utils/build/docker/golang/app/internal/common/http_client_default.go new file mode 100644 index 0000000000..62b7c98cff --- /dev/null +++ b/utils/build/docker/golang/app/internal/common/http_client_default.go @@ -0,0 +1,18 @@ +// Unless explicitly stated otherwise all files in this repository are licensed +// under the Apache License Version 2.0. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2024 Datadog, Inc. + +//go:build !orchestrion + +package common + +import ( + "net/http" + + httptrace "gopkg.in/DataDog/dd-trace-go.v1/contrib/net/http" +) + +func httpClient() *http.Client { + return httptrace.WrapClient(http.DefaultClient, httptrace.RTWithPropagation(true)) +} diff --git a/utils/build/docker/golang/app/internal/common/http_client_orchestrion.go b/utils/build/docker/golang/app/internal/common/http_client_orchestrion.go new file mode 100644 index 0000000000..a079523784 --- /dev/null +++ b/utils/build/docker/golang/app/internal/common/http_client_orchestrion.go @@ -0,0 +1,16 @@ +// Unless explicitly stated otherwise all files in this repository are licensed +// under the Apache License Version 2.0. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2024 Datadog, Inc. + +//go:build orchestrion + +package common + +import ( + "net/http" +) + +func httpClient() *http.Client { + return http.DefaultClient +} diff --git a/utils/build/docker/golang/app/internal/common/standalone_asm.go b/utils/build/docker/golang/app/internal/common/standalone_asm.go index 28bed3c060..a0c4fb8e9e 100644 --- a/utils/build/docker/golang/app/internal/common/standalone_asm.go +++ b/utils/build/docker/golang/app/internal/common/standalone_asm.go @@ -11,12 +11,10 @@ import ( "log" "net/http" "strings" - - httptrace "gopkg.in/DataDog/dd-trace-go.v1/contrib/net/http" ) func Requestdownstream(w http.ResponseWriter, r *http.Request) { - client := httptrace.WrapClient(http.DefaultClient, httptrace.RTWithPropagation(true)) + client := httpClient() req, _ := http.NewRequest(http.MethodGet, "http://127.0.0.1:7777/returnheaders", nil) req = req.WithContext(r.Context()) res, err := client.Do(req) diff --git a/utils/build/docker/golang/app/net-http-orchestrion/main.go b/utils/build/docker/golang/app/net-http-orchestrion/main.go new file mode 100644 index 0000000000..5e1147becb --- /dev/null +++ b/utils/build/docker/golang/app/net-http-orchestrion/main.go @@ -0,0 +1,649 @@ +package main + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "math/rand" + "net/http" + "net/http/httptest" + "os" + "os/signal" + "strconv" + "strings" + "syscall" + "time" + + "weblog/internal/common" + "weblog/internal/grpc" + "weblog/internal/rasp" + + "github.com/Shopify/sarama" + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/propagation" + "go.opentelemetry.io/otel/trace" + "gopkg.in/DataDog/dd-trace-go.v1/appsec" + "gopkg.in/DataDog/dd-trace-go.v1/datastreams" + "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/opentelemetry" + "gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer" +) + +func main() { + mux := http.NewServeMux() + + mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + // "/" is the default route when the others don't match + // cf. documentation at https://pkg.go.dev/net/http#ServeMux + // Therefore, we need to check the URL path to only handle the `/` case + if r.URL.Path != "/" { + w.WriteHeader(http.StatusNotFound) + return + } + w.WriteHeader(http.StatusOK) + }) + + mux.HandleFunc("/stats-unique", func(w http.ResponseWriter, r *http.Request) { + if c := r.URL.Query().Get("code"); c != "" { + if code, err := strconv.Atoi(c); err == nil { + w.WriteHeader(code) + return + } + } + w.WriteHeader(http.StatusOK) + }) + + mux.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) { + + healthCheck, err := common.GetHealtchCheck() + if err != nil { + http.Error(w, "Can't get JSON data", http.StatusInternalServerError) + } + + jsonData, err := json.Marshal(healthCheck) + if err != nil { + http.Error(w, "Can't build JSON data", http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Write(jsonData) + }) + + mux.HandleFunc("/waf", func(w http.ResponseWriter, r *http.Request) { + body, err := common.ParseBody(r) + if err == nil { + appsec.MonitorParsedHTTPBody(r.Context(), body) + } + w.Write([]byte("Hello, WAF!\n")) + }) + + mux.HandleFunc("/waf/", func(w http.ResponseWriter, r *http.Request) { + body, err := common.ParseBody(r) + if err == nil { + appsec.MonitorParsedHTTPBody(r.Context(), body) + } + write(w, r, []byte("Hello, WAF!")) + }) + + mux.HandleFunc("/users", func(w http.ResponseWriter, r *http.Request) { + userId := r.URL.Query().Get("user") + if err := appsec.SetUser(r.Context(), userId); err != nil { + return + } + w.Write([]byte("Hello, user!")) + }) + + mux.HandleFunc("/sample_rate_route/", func(w http.ResponseWriter, r *http.Request) { + // net/http mux doesn't support advanced patterns, but the given prefix will match any /sample_rate_route/{i} + w.Write([]byte("OK")) + }) + + mux.HandleFunc("/tag_value/{tag_value}/{status_code}", func(w http.ResponseWriter, r *http.Request) { + tag := r.PathValue("tag_value") + status, _ := strconv.Atoi(r.PathValue("status_code")) + span, _ := tracer.SpanFromContext(r.Context()) + span.SetTag("appsec.events.system_tests_appsec_event.value", tag) + for key, values := range r.URL.Query() { + for _, value := range values { + w.Header().Add(key, value) + } + } + w.WriteHeader(status) + w.Write([]byte("Value tagged")) + + switch { + case r.Header.Get("Content-Type") == "application/json": + body, _ := io.ReadAll(r.Body) + var bodyMap map[string]any + if err := json.Unmarshal(body, &bodyMap); err == nil { + appsec.MonitorParsedHTTPBody(r.Context(), bodyMap) + } + case r.ParseForm() == nil: + appsec.MonitorParsedHTTPBody(r.Context(), r.PostForm) + } + }) + + mux.HandleFunc("/status", func(w http.ResponseWriter, r *http.Request) { + if c := r.URL.Query().Get("code"); c != "" { + if code, err := strconv.Atoi(c); err == nil { + w.WriteHeader(code) + } + } + w.Write([]byte("OK")) + }) + + mux.HandleFunc("/make_distant_call", func(w http.ResponseWriter, r *http.Request) { + url := r.URL.Query().Get("url") + if url == "" { + w.Write([]byte("OK")) + return + } + + req, _ := http.NewRequestWithContext(r.Context(), http.MethodGet, url, nil) + res, err := http.DefaultClient.Do(req) + if err != nil { + log.Fatalln("client.Do", err) + } + + defer res.Body.Close() + + requestHeaders := make(map[string]string, len(req.Header)) + for key, values := range req.Header { + requestHeaders[key] = strings.Join(values, ",") + } + + responseHeaders := make(map[string]string, len(res.Header)) + for key, values := range res.Header { + responseHeaders[key] = strings.Join(values, ",") + } + + jsonResponse, err := json.Marshal(struct { + URL string `json:"url"` + StatusCode int `json:"status_code"` + RequestHeaders map[string]string `json:"request_headers"` + ResponseHeaders map[string]string `json:"response_headers"` + }{URL: url, StatusCode: res.StatusCode, RequestHeaders: requestHeaders, ResponseHeaders: responseHeaders}) + if err != nil { + log.Fatalln(err) + } + w.Header().Set("Content-Type", "application/json") + w.Write(jsonResponse) + }) + + mux.HandleFunc("/headers", headers) + mux.HandleFunc("/headers/", headers) + + identify := func(w http.ResponseWriter, r *http.Request) { + if span, ok := tracer.SpanFromContext(r.Context()); ok { + tracer.SetUser( + span, "usr.id", tracer.WithUserEmail("usr.email"), + tracer.WithUserName("usr.name"), tracer.WithUserSessionID("usr.session_id"), + tracer.WithUserRole("usr.role"), tracer.WithUserScope("usr.scope"), + ) + } + w.Write([]byte("Hello, identify!")) + } + mux.HandleFunc("/identify/", identify) + mux.HandleFunc("/identify", identify) + mux.HandleFunc("/identify-propagate", func(w http.ResponseWriter, r *http.Request) { + if span, ok := tracer.SpanFromContext(r.Context()); ok { + tracer.SetUser(span, "usr.id", tracer.WithPropagation()) + } + w.Write([]byte("Hello, identify-propagate!")) + }) + + mux.HandleFunc("/kafka/produce", func(w http.ResponseWriter, r *http.Request) { + var message = "Test" + + topic := r.URL.Query().Get("topic") + if len(topic) == 0 { + w.Write([]byte("missing param 'topic'")) + w.WriteHeader(422) + return + } + + _, _, err := kafkaProduce(topic, message) + if err != nil { + w.Write([]byte(err.Error())) + w.WriteHeader(500) + return + } + + w.Write([]byte("OK")) + w.WriteHeader(200) + }) + + mux.HandleFunc("/kafka/consume", func(w http.ResponseWriter, r *http.Request) { + topic := r.URL.Query().Get("topic") + if len(topic) == 0 { + w.Write([]byte("missing param 'topic'")) + w.WriteHeader(422) + return + } + + timeout, err := strconv.ParseInt(r.URL.Query().Get("timeout"), 10, 0) + if err != nil { + timeout = 20 + } + + message, status, err := kafkaConsume(topic, timeout) + if err != nil { + panic(err) + } + + w.Write([]byte(message)) + w.WriteHeader(status) + }) + + mux.HandleFunc("/user_login_success_event", func(w http.ResponseWriter, r *http.Request) { + uquery := r.URL.Query() + uid := "system_tests_user" + if q := uquery.Get("event_user_id"); q != "" { + uid = q + } + appsec.TrackUserLoginSuccessEvent(r.Context(), uid, map[string]string{"metadata0": "value0", "metadata1": "value1"}) + }) + + mux.HandleFunc("/user_login_failure_event", func(w http.ResponseWriter, r *http.Request) { + uquery := r.URL.Query() + uid := "system_tests_user" + if q := uquery.Get("event_user_id"); q != "" { + uid = q + } + exists := true + if q := uquery.Get("event_user_exists"); q != "" { + parsed, err := strconv.ParseBool(q) + if err != nil { + exists = parsed + } + } + appsec.TrackUserLoginFailureEvent(r.Context(), uid, exists, map[string]string{"metadata0": "value0", "metadata1": "value1"}) + }) + + mux.HandleFunc("/custom_event", func(w http.ResponseWriter, r *http.Request) { + uquery := r.URL.Query() + name := "system_tests_event" + if q := uquery.Get("event_name"); q != "" { + name = q + } + appsec.TrackCustomEvent(r.Context(), name, map[string]string{"metadata0": "value0", "metadata1": "value1"}) + }) + + //orchestrion:ignore + mux.HandleFunc("/e2e_otel_span", func(w http.ResponseWriter, r *http.Request) { + parentName := r.URL.Query().Get("parentName") + childName := r.URL.Query().Get("childName") + + tags := []attribute.KeyValue{} + // We need to propagate the user agent header to retain the mapping between the system-tests/weblog request id + // and the traces/spans that will be generated below, so that we can reference to them in our tests. + // See https://github.com/DataDog/system-tests/blob/2d6ae4d5bf87d55855afd36abf36ee710e7d8b3c/utils/interfaces/_core.py#L156 + userAgent := r.UserAgent() + tags = append(tags, attribute.String("http.useragent", userAgent)) + + if r.URL.Query().Get("shouldIndex") == "1" { + tags = append(tags, + attribute.Int("_dd.filter.kept", 1), + attribute.String("_dd.filter.id", "system_tests_e2e"), + ) + } + + p := opentelemetry.NewTracerProvider() + oteltracer := p.Tracer("") + otel.SetTracerProvider(p) + otel.SetTextMapPropagator(propagation.TraceContext{}) + defer p.ForceFlush(time.Second, func(ok bool) {}) + + // Parent span will have the following traits : + // - spanId of 10000 + // - tags {'attributes':'values'} + // - tags necessary to retain the mapping between the system-tests/weblog request id and the traces/spans + // - error tag with 'testing_end_span_options' message + parentCtx, parentSpan := oteltracer.Start(opentelemetry.ContextWithStartOptions(context.Background(), + tracer.WithSpanID(10000)), parentName, + trace.WithAttributes(tags...)) + parentSpan.SetAttributes(attribute.String("attributes", "values")) + opentelemetry.EndOptions(parentSpan, tracer.WithError(errors.New("testing_end_span_options"))) + + // Child span will have the following traits : + // - tags necessary to retain the mapping between the system-tests/weblog request id and the traces/spans + // - duration of one second + // - span kind of SpanKind - Internal + start := time.Now() + _, childSpan := oteltracer.Start(parentCtx, childName, trace.WithTimestamp(start), trace.WithAttributes(tags...), trace.WithSpanKind(trace.SpanKindInternal)) + childSpan.End(trace.WithTimestamp(start.Add(time.Second))) + parentSpan.End() + + w.Write([]byte("OK")) + }) + + //orchestrion:ignore + mux.HandleFunc("/e2e_otel_span/mixed_contrib", func(w http.ResponseWriter, r *http.Request) { + parentName := r.URL.Query().Get("parentName") + + tags := []attribute.KeyValue{} + // We need to propagate the user agent header to retain the mapping between the system-tests/weblog request id + // and the traces/spans that will be generated below, so that we can reference to them in our tests. + // See https://github.com/DataDog/system-tests/blob/2d6ae4d5bf87d55855afd36abf36ee710e7d8b3c/utils/interfaces/_core.py#L156 + userAgent := r.UserAgent() + tags = append(tags, attribute.String("http.useragent", userAgent)) + + if r.URL.Query().Get("shouldIndex") == "1" { + tags = append(tags, + attribute.Int("_dd.filter.kept", 1), + attribute.String("_dd.filter.id", "system_tests_e2e"), + ) + } + + p := opentelemetry.NewTracerProvider() + tracer := p.Tracer("") + otel.SetTracerProvider(p) + otel.SetTextMapPropagator(propagation.TraceContext{}) + defer p.ForceFlush(time.Second, func(ok bool) {}) + + parentCtx, parentSpan := tracer.Start(context.Background(), parentName, trace.WithAttributes(tags...)) + + h := otelhttp.NewHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedSpan := trace.SpanFromContext(r.Context()) + // Need to propagate the user agent header to retain the mapping between + // the system-tests/weblog request id and the traces/spans + receivedSpan.SetAttributes(tags...) + if receivedSpan.SpanContext().TraceID() != parentSpan.SpanContext().TraceID() { + log.Fatalln("error in distributed tracing: Datadog OTel API and Otel net/http package span are not connected") + w.WriteHeader(500) + return + } + }), "testOperation") + testServer := httptest.NewServer(h) + defer testServer.Close() + + // Need to propagate the user agent header to retain the mapping between + // the system-tests/weblog request id and the traces/spans + c := http.Client{Transport: otelhttp.NewTransport(nil, otelhttp.WithSpanOptions(trace.WithAttributes(tags...)))} + req, err := http.NewRequestWithContext(parentCtx, http.MethodGet, testServer.URL, nil) + if err != nil { + log.Fatalln(err) + w.WriteHeader(500) + return + } + resp, err := c.Do(req) + _ = resp.Body.Close() // Need to close body to cause otel span to end + if err != nil { + log.Fatalln(err) + w.WriteHeader(500) + return + } + parentSpan.End() + + w.Write([]byte("OK")) + }) + + mux.HandleFunc("/read_file", func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Query().Get("file") + content, err := os.ReadFile(path) + + if err != nil { + log.Fatalln(err) + w.WriteHeader(500) + return + } + w.Write([]byte(content)) + }) + + mux.HandleFunc("/dsm", func(w http.ResponseWriter, r *http.Request) { + var message = "Test DSM Context Propagation" + + integration := r.URL.Query().Get("integration") + if len(integration) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'integration'")) + return + } + + if integration == "kafka" { + queue := r.URL.Query().Get("queue") + if len(queue) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'queue' for kafka dsm")) + return + } + + _, _, err := kafkaProduce(queue, message) + if err != nil { + w.WriteHeader(500) + w.Write([]byte(err.Error())) + return + } + + timeout, err := strconv.ParseInt(r.URL.Query().Get("timeout"), 10, 0) + if err != nil { + timeout = 20 + } + + _, _, err = kafkaConsume(queue, timeout) + if err != nil { + w.WriteHeader(500) + w.Write([]byte(err.Error())) + return + } + } + + w.WriteHeader(200) + w.Write([]byte("ok")) + }) + + mux.HandleFunc("/dsm/inject", func(w http.ResponseWriter, r *http.Request) { + topic := r.URL.Query().Get("topic") + if len(topic) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'topic'")) + return + } + intType := r.URL.Query().Get("integration") + if len(intType) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'integration'")) + return + } + + edges := []string{"direction:out", "topic:" + topic, "type:" + intType} + carrier := make(carrier) + ctx := context.Background() + ctx, ok := tracer.SetDataStreamsCheckpoint(ctx, edges...) + if !ok { + w.WriteHeader(422) + w.Write([]byte("failed to create DSM checkpoint")) + return + } + datastreams.InjectToBase64Carrier(ctx, carrier) + + jsonData, err := json.Marshal(carrier) + if err != nil { + w.WriteHeader(422) + w.Write([]byte("failed to convert carrier to JSON")) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + w.Write(jsonData) + }) + + mux.HandleFunc("/dsm/extract", func(w http.ResponseWriter, r *http.Request) { + topic := r.URL.Query().Get("topic") + if len(topic) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'topic'")) + return + } + intType := r.URL.Query().Get("integration") + if len(intType) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'integration'")) + return + } + rawCtx := r.URL.Query().Get("ctx") + if len(rawCtx) == 0 { + w.WriteHeader(422) + w.Write([]byte("missing param 'ctx'")) + return + } + carrier := make(carrier) + err := json.Unmarshal([]byte(rawCtx), &carrier) + if err != nil { + w.WriteHeader(422) + w.Write([]byte("failed to parse JSON")) + return + } + + edges := []string{"direction:in", "topic:" + topic, "type:" + intType} + ctx := datastreams.ExtractFromBase64Carrier(context.Background(), carrier) + _, ok := tracer.SetDataStreamsCheckpoint(ctx, edges...) + if !ok { + w.WriteHeader(422) + w.Write([]byte("failed to create DSM checkpoint")) + return + } + + w.WriteHeader(200) + w.Write([]byte("ok")) + }) + + mux.HandleFunc("/session/new", func(w http.ResponseWriter, r *http.Request) { + sessionID := strconv.Itoa(rand.Int()) + w.Header().Add("Set-Cookie", "session="+sessionID+"; Path=/; Max-Age=3600; Secure; HttpOnly") + }) + + mux.HandleFunc("/session/user", func(w http.ResponseWriter, r *http.Request) { + user := r.URL.Query().Get("sdk_user") + cookie, err := r.Cookie("session") + if err != nil { + w.WriteHeader(500) + w.Write([]byte("missing session cookie")) + } + appsec.TrackUserLoginSuccessEvent(r.Context(), user, map[string]string{}, tracer.WithUserSessionID(cookie.Value)) + }) + + mux.HandleFunc("/requestdownstream", common.Requestdownstream) + mux.HandleFunc("/returnheaders", common.Returnheaders) + + mux.HandleFunc("/rasp/lfi", rasp.LFI) + mux.HandleFunc("/rasp/ssrf", rasp.SSRF) + mux.HandleFunc("/rasp/sqli", rasp.SQLi) + + srv := &http.Server{ + Addr: ":7777", + Handler: mux, + } + + common.InitDatadog() + go grpc.ListenAndServe() + go func() { + if err := srv.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + log.Fatal(err) + } + }() + + c := make(chan os.Signal, 1) + signal.Notify(c, syscall.SIGTERM) + <-c + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if err := srv.Shutdown(ctx); err != nil { + log.Fatalf("HTTP shutdown error: %v", err) + } +} + +type carrier map[string]string + +func (c carrier) Set(key, val string) { + c[key] = val +} + +func (c carrier) ForeachKey(handler func(key, val string) error) error { + for k, v := range c { + if err := handler(k, v); err != nil { + return err + } + } + return nil +} + +//dd:span span.name:child.span +func write(w http.ResponseWriter, _ *http.Request, d []byte) { + w.Write(d) +} + +func headers(w http.ResponseWriter, r *http.Request) { + //Data used for header content is irrelevant here, only header presence is checked + w.Header().Set("content-type", "text/plain") + w.Header().Set("content-length", "42") + w.Header().Set("content-language", "en-US") + w.Write([]byte("Hello, headers!")) +} + +func kafkaProduce(topic, message string) (int32, int64, error) { + var server = "kafka:9092" + + cfg := sarama.NewConfig() + cfg.Producer.Return.Successes = true + + producer, err := sarama.NewSyncProducer([]string{server}, cfg) + if err != nil { + return 0, 0, err + } + defer producer.Close() + + msg := &sarama.ProducerMessage{ + Topic: topic, + Partition: 0, + Value: sarama.StringEncoder(message), + } + + partition, offset, err := producer.SendMessage(msg) + if err != nil { + return 0, 0, err + } + + log.Printf("PRODUCER SENT MESSAGE TO (partition offset): %d %d", partition, offset) + return partition, offset, nil +} + +func kafkaConsume(topic string, timeout int64) (string, int, error) { + var server = "kafka:9092" + cfg := sarama.NewConfig() + + consumer, err := sarama.NewConsumer([]string{server}, cfg) + if err != nil { + return "", 0, err + } + defer consumer.Close() + + partitionConsumer, err := consumer.ConsumePartition(topic, 0, sarama.OffsetOldest) + if err != nil { + return "", 0, err + } + defer partitionConsumer.Close() + + timeOutTimer := time.NewTimer(time.Duration(timeout) * time.Second) + defer timeOutTimer.Stop() + log.Printf("CONSUMING MESSAGES from topic: %s", topic) + for { + select { + case receivedMsg := <-partitionConsumer.Messages(): + responseOutput := fmt.Sprintf("Consumed message.\n\tOffset: %s\n\tMessage: %s\n", fmt.Sprint(receivedMsg.Offset), string(receivedMsg.Value)) + log.Print(responseOutput) + return responseOutput, 200, nil + case <-timeOutTimer.C: + timedOutMessage := "TimeOut" + log.Print(timedOutMessage) + return timedOutMessage, 408, nil + } + } +} diff --git a/utils/build/docker/golang/app/net-http/main.go b/utils/build/docker/golang/app/net-http/main.go index ebbfd3dff2..497d05f01e 100644 --- a/utils/build/docker/golang/app/net-http/main.go +++ b/utils/build/docker/golang/app/net-http/main.go @@ -32,6 +32,7 @@ import ( "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" oteltrace "go.opentelemetry.io/otel/trace" + otelbaggage "go.opentelemetry.io/otel/baggage" "gopkg.in/DataDog/dd-trace-go.v1/appsec" httptrace "gopkg.in/DataDog/dd-trace-go.v1/contrib/net/http" @@ -537,6 +538,79 @@ func main() { w.Write([]byte("ok")) }) + mux.HandleFunc("/otel_drop_in_default_propagator_extract", func(w http.ResponseWriter, r *http.Request) { + // Differing from other languages, the user must set the text map propagator because dd-trace-go + // doesn't automatically instrument at runtime (not including Orchestrion) + otel.SetTextMapPropagator(propagation.NewCompositeTextMapPropagator(propagation.TraceContext{}, propagation.Baggage{})) + + httpCarrier := HttpCarrier{header: r.Header} + + propagator := otel.GetTextMapPropagator() + ctx := propagator.Extract(r.Context(), httpCarrier) + + spanContext := oteltrace.SpanContextFromContext(ctx) + baggage := otelbaggage.FromContext(ctx) + + base := 16 + bitSize := 64 + result := make(map[string]any, 4) + + num, err := strconv.ParseInt(spanContext.TraceID().String()[16:], base, bitSize) + if err == nil { + result["trace_id"] = num + } + + num, err = strconv.ParseInt(spanContext.SpanID().String(), base, bitSize) + if err == nil { + result["span_id"] = num + } + + result["tracestate"] = spanContext.TraceState().String() + result["baggage"] = baggage.String() + + jsonData, err := json.Marshal(result) + if err != nil { + w.WriteHeader(422) + w.Write([]byte("failed to convert carrier to JSON")) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + w.Write(jsonData) + }) + + mux.HandleFunc("/otel_drop_in_default_propagator_inject", func(w http.ResponseWriter, r *http.Request) { + // Differing from other languages, the user must set the text map propagator because dd-trace-go + // doesn't automatically instrument at runtime (not including Orchestrion) + otel.SetTextMapPropagator(propagation.NewCompositeTextMapPropagator(propagation.TraceContext{}, propagation.Baggage{})) + + ctx := context.Background() + p := ddotel.NewTracerProvider() + tracer := p.Tracer("") + otel.SetTracerProvider(p) + + _, span := tracer.Start(ddotel.ContextWithStartOptions(ctx), "main") + newCtx := oteltrace.ContextWithSpan(ctx, span) + + propagator := otel.GetTextMapPropagator() + mapCarrier := make(MapCarrier) + propagator.Inject(newCtx, mapCarrier) + + jsonData, err := json.Marshal(mapCarrier) + span.End() + + if err != nil { + w.WriteHeader(422) + w.Write([]byte("failed to convert carrier to JSON")) + return + } + + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + w.Write(jsonData) + }) + mux.HandleFunc("/session/new", func(w http.ResponseWriter, r *http.Request) { sessionID := strconv.Itoa(rand.Int()) w.Header().Add("Set-Cookie", "session="+sessionID+"; Path=/; Max-Age=3600; Secure; HttpOnly") @@ -598,6 +672,44 @@ func (c carrier) ForeachKey(handler func(key, val string) error) error { return nil } +type MapCarrier map[string]string + +func (c MapCarrier) Get(key string) string { + return c[key] +} + +func (c MapCarrier) Set(key, val string) { + c[key] = val +} + +func (c MapCarrier) Keys() []string { + keys := make([]string, 0, len(c)) + for k := range c { + keys = append(keys, k) + } + return keys +} + +type HttpCarrier struct { + header http.Header +} + +func (c HttpCarrier) Get(key string) string { + return c.header.Get(key) +} + +func (c HttpCarrier) Set(key, val string) { + c.header.Set(key, val) +} + +func (c HttpCarrier) Keys() []string { + keys := make([]string, 0, len(c.header)) + for k := range c.header { + keys = append(keys, k) + } + return keys +} + func write(w http.ResponseWriter, r *http.Request, d []byte) { span, _ := ddtracer.StartSpanFromContext(r.Context(), "child.span") defer span.Finish() diff --git a/utils/build/docker/golang/install_ddtrace.sh b/utils/build/docker/golang/install_ddtrace.sh index ab12a42b72..7a254255eb 100755 --- a/utils/build/docker/golang/install_ddtrace.sh +++ b/utils/build/docker/golang/install_ddtrace.sh @@ -9,6 +9,8 @@ if [ -e "/binaries/dd-trace-go" ]; then elif [ -e "/binaries/golang-load-from-go-get" ]; then echo "Install from go get -d $(cat /binaries/golang-load-from-go-get)" go get -v -d "$(cat /binaries/golang-load-from-go-get)" + # Pin that version with a `replace` directive so nothing else can override it. + go mod edit -replace "gopkg.in/DataDog/dd-trace-go.v1=$(cat /binaries/golang-load-from-go-get)" else echo "Installing production dd-trace-version" @@ -20,10 +22,9 @@ go mod tidy # Read the library version out of the version.go file lib_mod_dir=$(go list -f '{{.Dir}}' -m gopkg.in/DataDog/dd-trace-go.v1) -version=$(sed -nrE 's#.*"v(.*)".*#\1#p' $lib_mod_dir/internal/version/version.go) # Parse the version string content "v.*" -echo $version > SYSTEM_TESTS_LIBRARY_VERSION +version=$(sed -nrE 's#.*"v(.*)".*#\1#p' "${lib_mod_dir}/internal/version/version.go") # Parse the version string content "v.*" +echo "${version}" > SYSTEM_TESTS_LIBRARY_VERSION -rules_mod_dir=$(go list -f '{{.Dir}}' -m github.com/DataDog/appsec-internal-go) - - -echo "dd-trace-go version: $(cat /app/SYSTEM_TESTS_LIBRARY_VERSION)" +# Output the version of dd-trace-go (per go.mod, as well as the built-in tag). +echo "dd-trace-go go.mod version: $(go list -f '{{ .Version }}' -m gopkg.in/DataDog/dd-trace-go.v1)" +echo "dd-trace-go tag: ${version}" diff --git a/utils/build/docker/golang/install_orchestrion.sh b/utils/build/docker/golang/install_orchestrion.sh new file mode 100755 index 0000000000..88d9f303c6 --- /dev/null +++ b/utils/build/docker/golang/install_orchestrion.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -euv + +if [ -e "/binaries/orchestrion" ]; then + echo "Install from folder /binaries/orchestrion" + go mod edit -replace github.com/DataDog/orchestrion=/binaries/orchestrion + go -C /binaries/orchestrion build -o "$(go env GOPATH)/bin/orchestrion" . +elif [ -e "/binaries/orchestrion-load-from-go-get" ]; then + echo "Install from go get -d $(cat /binaries/orchestrion-load-from-go-get)" + go install "$(cat /binaries/orchestrion-load-from-go-get)" +else + echo "Installing production orchestrion" + go install github.com/DataDog/orchestrion@latest +fi + +orchestrion pin + +output="$(orchestrion version)" +version="${output#"orchestrion "}" +echo "$version" > SYSTEM_TESTS_ORCHESTRION_VERSION + +echo "orchestrion version: $(cat /app/SYSTEM_TESTS_ORCHESTRION_VERSION)" diff --git a/utils/build/docker/golang/net-http-orchestrion.Dockerfile b/utils/build/docker/golang/net-http-orchestrion.Dockerfile new file mode 100644 index 0000000000..a1f32026b9 --- /dev/null +++ b/utils/build/docker/golang/net-http-orchestrion.Dockerfile @@ -0,0 +1,42 @@ +FROM golang:1.22 AS build + +# print important lib versions +RUN go version && curl --version + +# download go dependencies +RUN mkdir -p /app +COPY utils/build/docker/golang/app/go.mod utils/build/docker/golang/app/go.sum /app/ +WORKDIR /app +RUN go mod download && go mod verify + +# copy the app code +COPY utils/build/docker/golang/app /app + +# download the proper tracer version +COPY utils/build/docker/golang/install_*.sh binaries* /binaries/ +RUN /binaries/install_ddtrace.sh && /binaries/install_orchestrion.sh + +RUN orchestrion go build -v -tags appsec,orchestrion -o weblog ./net-http-orchestrion + +# ============================================================================== + +FROM golang:1.22 + +COPY --from=build /app/weblog /app/weblog +COPY --from=build /app/SYSTEM_TESTS_LIBRARY_VERSION /app/SYSTEM_TESTS_LIBRARY_VERSION + +WORKDIR /app + +RUN printf "#!/bin/bash\nexec ./weblog" > app.sh +RUN chmod +x app.sh +CMD ["./app.sh"] + +# Datadog setup +ENV DD_LOGGING_RATE="0" \ + DD_TRACE_HEADER_TAGS="user-agent" \ + DD_DATA_STREAMS_ENABLED="true" \ + # Set up the environment so the profiler starts appropriately... + DD_ENV="system-tests" \ + DD_SERVICE="weblog" \ + DD_VERSION="1.0" \ + DD_PROFILING_ENABLED="true" diff --git a/utils/build/docker/java/spring-boot/src/main/java/com/datadoghq/system_tests/springboot/App.java b/utils/build/docker/java/spring-boot/src/main/java/com/datadoghq/system_tests/springboot/App.java index 0767b37c0d..4bbabf87ff 100644 --- a/utils/build/docker/java/spring-boot/src/main/java/com/datadoghq/system_tests/springboot/App.java +++ b/utils/build/docker/java/spring-boot/src/main/java/com/datadoghq/system_tests/springboot/App.java @@ -32,13 +32,19 @@ import org.springframework.web.server.ResponseStatusException; +import io.opentelemetry.api.baggage.Baggage; import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.propagation.ContextPropagators; +import io.opentelemetry.context.propagation.TextMapGetter; +import io.opentelemetry.context.propagation.TextMapPropagator; import io.opentelemetry.context.propagation.TextMapSetter; import io.opentelemetry.context.Scope; +import io.opentelemetry.context.Context; import io.opentracing.Span; import io.opentracing.util.GlobalTracer; import ognl.Ognl; @@ -1094,6 +1100,60 @@ public String otelDropInSpan() { return "OK"; } + @RequestMapping("/otel_drop_in_default_propagator_extract") + public String otelDropInDefaultPropagatorExtract(@RequestHeader Map headers) throws com.fasterxml.jackson.core.JsonProcessingException { + ContextPropagators propagators = GlobalOpenTelemetry.getPropagators(); + TextMapPropagator textMapPropagator = propagators.getTextMapPropagator(); + + Context extractedContext = textMapPropagator.extract(Context.current(), headers, new TextMapGetter>() { + @Override + public Iterable keys(Map map) { + return map.keySet(); + } + + @Override + public String get(Map map, String key) { + return map.get(key); + } + }); + + io.opentelemetry.api.trace.SpanContext spanContext = io.opentelemetry.api.trace.Span.fromContext(extractedContext).getSpanContext(); + Long ddTraceId = Long.parseLong(spanContext.getTraceId().substring(16), 16); + Long ddSpanId = Long.parseLong(spanContext.getSpanId(), 16); + + Map map = new HashMap<>(); + map.put("trace_id", ddTraceId); + map.put("span_id", ddSpanId); + map.put("tracestate", spanContext.getTraceState().asMap().toString()); + map.put("baggage", Baggage.fromContext(extractedContext).asMap().toString()); + + // Convert headers map to JSON string + ObjectMapper mapper = new ObjectMapper(); + String jsonString = mapper.writeValueAsString(map); + + return jsonString; + } + + @RequestMapping("/otel_drop_in_default_propagator_inject") + public String otelDropInDefaultPropagatorInject() throws com.fasterxml.jackson.core.JsonProcessingException { + ContextPropagators propagators = GlobalOpenTelemetry.getPropagators(); + TextMapPropagator textMapPropagator = propagators.getTextMapPropagator(); + + Map map = new HashMap<>(); + textMapPropagator.inject(Context.current(), map, new TextMapSetter>() { + @Override + public void set(Map map, String key, String value) { + map.put(key, value); + } + }); + + // Convert headers map to JSON string + ObjectMapper mapper = new ObjectMapper(); + String jsonString = mapper.writeValueAsString(map); + + return jsonString; + } + @GetMapping(value = "/requestdownstream") public String requestdownstream(HttpServletResponse response) throws IOException { String url = "http://localhost:7777/returnheaders"; diff --git a/utils/build/docker/nodejs/express/app.js b/utils/build/docker/nodejs/express/app.js index 2ac3da2728..1e573030df 100644 --- a/utils/build/docker/nodejs/express/app.js +++ b/utils/build/docker/nodejs/express/app.js @@ -12,6 +12,7 @@ const fs = require('fs') const passport = require('passport') const crypto = require('crypto') const pino = require('pino') +const api = require('@opentelemetry/api') const iast = require('./iast') const dsm = require('./dsm') @@ -429,6 +430,29 @@ app.get('/db', async (req, res) => { } }) +app.get('/otel_drop_in_default_propagator_extract', (req, res) => { + const ctx = api.propagation.extract(api.context.active(), req.headers) + const spanContext = api.trace.getSpan(ctx).spanContext() + + const result = {} + result.trace_id = parseInt(spanContext.traceId.substring(16), 16) + result.span_id = parseInt(spanContext.spanId, 16) + result.tracestate = spanContext.traceState.serialize() + // result.baggage = api.propagation.getBaggage(spanContext).toString() + + res.json(result) +}) + +app.get('/otel_drop_in_default_propagator_inject', (req, res) => { + const tracer = api.trace.getTracer('my-application', '0.1.0') + const span = tracer.startSpan('main') + const result = {} + + api.propagation.inject( + api.trace.setSpanContext(api.ROOT_CONTEXT, span.spanContext()), result, api.defaultTextMapSetter) + res.json(result) +}) + app.post('/shell_execution', (req, res) => { const options = { shell: !!req?.body?.options?.shell } const reqArgs = req?.body?.args diff --git a/utils/build/docker/nodejs/express/package-lock.json b/utils/build/docker/nodejs/express/package-lock.json index 32cf5b06e4..30bb493c07 100644 --- a/utils/build/docker/nodejs/express/package-lock.json +++ b/utils/build/docker/nodejs/express/package-lock.json @@ -9,6 +9,8 @@ "version": "1.0.0", "license": "ISC", "dependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/core": "^1.30.1", "amqplib": "^0.10.3", "aws-sdk": "^2.1530.0", "axios": "1.2.3", @@ -544,6 +546,36 @@ "node": ">= 8" } }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "dependencies": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "@opentelemetry/api": ">=1.0.0 <1.10.0" + } + }, + "node_modules/@opentelemetry/core/node_modules/@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==", + "engines": { + "node": ">=14" + } + }, "node_modules/@sinonjs/commons": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", @@ -5758,6 +5790,26 @@ "fastq": "^1.6.0" } }, + "@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" + }, + "@opentelemetry/core": { + "version": "1.30.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-1.30.1.tgz", + "integrity": "sha512-OOCM2C/QIURhJMuKaekP3TRBxBKxG/TWWA0TL2J6nXUtDnuCtccy49LUJF8xPFXMX+0LMcxFpCo8M9cGY1W6rQ==", + "requires": { + "@opentelemetry/semantic-conventions": "1.28.0" + }, + "dependencies": { + "@opentelemetry/semantic-conventions": { + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.28.0.tgz", + "integrity": "sha512-lp4qAiMTD4sNWW4DbKLBkfiMZ4jbAboJIGOQr5DvciMRI494OapieI9qiODpOt0XBr1LjIDy1xAGAnVs5supTA==" + } + } + }, "@sinonjs/commons": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", diff --git a/utils/build/docker/nodejs/express/package.json b/utils/build/docker/nodejs/express/package.json index df490866ae..a20e37761b 100644 --- a/utils/build/docker/nodejs/express/package.json +++ b/utils/build/docker/nodejs/express/package.json @@ -10,6 +10,8 @@ "author": "", "license": "ISC", "dependencies": { + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/core": "^1.30.1", "amqplib": "^0.10.3", "aws-sdk": "^2.1530.0", "axios": "1.2.3", diff --git a/utils/build/docker/php/app.sh b/utils/build/docker/php/app.sh new file mode 100644 index 0000000000..d513691838 --- /dev/null +++ b/utils/build/docker/php/app.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +#Nothing to do here. This file is to allow --sleep execution diff --git a/utils/build/docker/php/common/php.ini b/utils/build/docker/php/common/php.ini index 0b5d91ee3b..83d41153e6 100644 --- a/utils/build/docker/php/common/php.ini +++ b/utils/build/docker/php/common/php.ini @@ -10,6 +10,7 @@ extension=mysqli.so extension=pdo.so extension=pdo_mysql.so extension=pdo_pgsql.so +extension=simplexml.so error_log=/var/log/system-tests/php_error.log error_reporting=2147483647 display_errors=0 diff --git a/utils/build/docker/php/common/rasp/lfi.php b/utils/build/docker/php/common/rasp/lfi.php index fbf6fe7e2d..5b13daec6f 100644 --- a/utils/build/docker/php/common/rasp/lfi.php +++ b/utils/build/docker/php/common/rasp/lfi.php @@ -1 +1,23 @@ - 4.0" diff --git a/utils/build/docker/ruby/rails70/Gemfile.lock b/utils/build/docker/ruby/rails70/Gemfile.lock index b89e5065f2..9f55d48399 100644 --- a/utils/build/docker/ruby/rails70/Gemfile.lock +++ b/utils/build/docker/ruby/rails70/Gemfile.lock @@ -168,6 +168,18 @@ GEM racc (~> 1.4) nokogiri (1.13.1-x86_64-linux) racc (~> 1.4) + opentelemetry-api (1.4.0) + opentelemetry-common (0.21.0) + opentelemetry-api (~> 1.0) + opentelemetry-registry (0.3.1) + opentelemetry-api (~> 1.1) + opentelemetry-sdk (1.6.0) + opentelemetry-api (~> 1.1) + opentelemetry-common (~> 0.20) + opentelemetry-registry (~> 0.2) + opentelemetry-semantic_conventions + opentelemetry-semantic_conventions (1.10.1) + opentelemetry-api (~> 1.0) orm_adapter (0.5.0) pry (0.14.1) coderay (~> 1.1) @@ -257,11 +269,11 @@ GEM zeitwerk (2.5.4) PLATFORMS - aarch64-linux-gnu + aarch64-linux arm64-darwin ruby x86_64-darwin - x86_64-linux-gnu + x86_64-linux DEPENDENCIES bootsnap @@ -271,6 +283,8 @@ DEPENDENCIES devise importmap-rails jbuilder + opentelemetry-api + opentelemetry-sdk pry puma (~> 5.0) rails (~> 7.0.1) diff --git a/utils/build/docker/ruby/rails70/app/controllers/system_test_controller.rb b/utils/build/docker/ruby/rails70/app/controllers/system_test_controller.rb index 53ec7001e5..9d8fd1bb76 100644 --- a/utils/build/docker/ruby/rails70/app/controllers/system_test_controller.rb +++ b/utils/build/docker/ruby/rails70/app/controllers/system_test_controller.rb @@ -2,6 +2,7 @@ require 'datadog/kit/appsec/events' require 'kafka' +require 'opentelemetry' class SystemTestController < ApplicationController skip_before_action :verify_authenticity_token @@ -250,4 +251,34 @@ def return_headers end render json: JSON.generate(request_headers), content_type: 'application/json' end + + def otel_drop_in_default_propagator_extract + # The extract operation succeeds with a custom OpenTelemetry propagator, but not with the default one. + # To see this, uncomment the next line, and use that propagator to do the context extraction + # propagator = OpenTelemetry::Context::Propagation::CompositeTextMapPropagator.compose_propagators([OpenTelemetry::Trace::Propagation::TraceContext.text_map_propagator, OpenTelemetry::Baggage::Propagation.text_map_propagator]) + context = OpenTelemetry.propagation.extract(request.headers) + + span_context = OpenTelemetry::Trace.current_span(context).context + + baggage = OpenTelemetry::Baggage.raw_entries() + baggage_str = "" + baggage.each_pair do |key, value| + baggage_str << value << ',' + end + baggage_str.chop! + + result = {} + result["trace_id"] = span_context.hex_trace_id.from(16).to_i(16) + result["span_id"] = span_context.hex_span_id.to_i(16) + result["tracestate"] = span_context.tracestate.to_s + result["baggage"] = baggage_str + + render json: JSON.generate(result), content_type: 'application/json' + end + + def otel_drop_in_default_propagator_inject + headers = {} + OpenTelemetry.propagation.inject(headers) + render json: JSON.generate(headers), content_type: 'application/json' + end end diff --git a/utils/build/docker/ruby/rails70/config/initializers/datadog.rb b/utils/build/docker/ruby/rails70/config/initializers/datadog.rb index e7079ecc33..08ecc3be10 100644 --- a/utils/build/docker/ruby/rails70/config/initializers/datadog.rb +++ b/utils/build/docker/ruby/rails70/config/initializers/datadog.rb @@ -1,7 +1,13 @@ +require 'opentelemetry/sdk' +require 'datadog/opentelemetry' + Datadog.configure do |c| c.diagnostics.debug = true end +::OpenTelemetry::SDK.configure do |_c| +end + # Send non-web init event if defined?(Datadog::Tracing) diff --git a/utils/build/docker/ruby/rails70/config/routes.rb b/utils/build/docker/ruby/rails70/config/routes.rb index 5624e4b3e1..46da54cdfb 100644 --- a/utils/build/docker/ruby/rails70/config/routes.rb +++ b/utils/build/docker/ruby/rails70/config/routes.rb @@ -48,6 +48,9 @@ get '/requestdownstream' => 'system_test#request_downstream' get '/returnheaders' => 'system_test#return_headers' + get '/otel_drop_in_default_propagator_extract' => 'system_test#otel_drop_in_default_propagator_extract' + get '/otel_drop_in_default_propagator_inject' => 'system_test#otel_drop_in_default_propagator_inject' + get '/debugger/init' => 'debugger#init' get '/debugger/pii' => 'debugger#pii' get '/debugger/log' => 'debugger#log_probe' diff --git a/utils/build/docker/runner.Dockerfile b/utils/build/docker/runner.Dockerfile index 9582f9e2c9..bb3e27e0fc 100644 --- a/utils/build/docker/runner.Dockerfile +++ b/utils/build/docker/runner.Dockerfile @@ -3,8 +3,11 @@ FROM python:3.12 RUN mkdir /app WORKDIR /app +RUN mkdir tests/ manifests/ + COPY utils/build/docker/python/parametric/requirements.txt utils/build/docker/python/parametric/requirements.txt COPY requirements.txt . +COPY pyproject.toml . COPY build.sh . COPY utils/build/build.sh utils/build/build.sh RUN mkdir -p /app/utils/build/docker && ./build.sh -i runner @@ -21,7 +24,6 @@ COPY manifests /app/manifests # toplevel things COPY conftest.py /app/ -COPY pyproject.toml /app/ COPY run.sh /app/ CMD ./run.sh diff --git a/utils/interfaces/_core.py b/utils/interfaces/_core.py index fdb47919fa..ae4e51841a 100644 --- a/utils/interfaces/_core.py +++ b/utils/interfaces/_core.py @@ -6,7 +6,7 @@ import json from os import listdir -from os.path import isfile, join +from os.path import join from pathlib import Path import re import shutil @@ -109,7 +109,7 @@ def check_deserialization_errors(self): def load_data_from_logs(self): for filename in sorted(listdir(self.log_folder)): file_path = join(self.log_folder, filename) - if isfile(file_path): + if Path(file_path).is_file(): with open(file_path, encoding="utf-8") as f: data = json.load(f) diff --git a/utils/interfaces/_logs.py b/utils/interfaces/_logs.py index cff94e38db..905f850443 100644 --- a/utils/interfaces/_logs.py +++ b/utils/interfaces/_logs.py @@ -5,8 +5,9 @@ """Check data that are sent to logs file on weblog""" import json -import re import os +from pathlib import Path +import re from utils._context.core import context from utils.tools import logger @@ -219,7 +220,7 @@ def _get_files(self): for f in files: filename = os.path.join(f"{context.scenario.host_log_folder}/docker/weblog/logs/", f) - if os.path.isfile(filename) and re.search(r"dotnet-tracer-managed-dotnet-\d+(_\d+)?.log", filename): + if Path(filename).is_file() and re.search(r"dotnet-tracer-managed-dotnet-\d+(_\d+)?.log", filename): result.append(filename) return result diff --git a/utils/interfaces/_schemas_validators.py b/utils/interfaces/_schemas_validators.py index 3f9055647f..d5b87f98a7 100644 --- a/utils/interfaces/_schemas_validators.py +++ b/utils/interfaces/_schemas_validators.py @@ -117,9 +117,9 @@ def _main(): for folder in folders: path = f"{folder}/interfaces/{interface}" - if not os.path.exists(path): + if not Path(path).exists(): continue - files = [file for file in os.listdir(path) if os.path.isfile(os.path.join(path, file))] + files = [file for file in os.listdir(path) if Path(os.path.join(path, file)).is_file()] for file in files: with open(os.path.join(path, file), encoding="utf-8") as f: data = json.load(f) diff --git a/utils/onboarding/debug_vm.py b/utils/onboarding/debug_vm.py index 6b794d6225..c913edd2f1 100644 --- a/utils/onboarding/debug_vm.py +++ b/utils/onboarding/debug_vm.py @@ -113,7 +113,9 @@ def _print_app_tracer_host_logs(sshClient, file_to_write): def _print_app_tracer_host_dotnet_logs(sshClient, file_to_write): """App tracer logs for dotnet (dotnet tracer doesn't write debug tracer in stdout)""" - file_to_write_dotnet = os.path.splitext(file_to_write)[0] + "_dotnet.log" + path = Path(file_to_write) + root = path.parent / path.stem + file_to_write_dotnet = f"{root}_dotnet.log" _, stdout_dotnet, _ = sshClient.exec_command("sudo find /var/log/datadog/dotnet/ -type f | xargs tail -n +1") _write_to_debug_file(stdout_dotnet, file_to_write_dotnet) diff --git a/utils/onboarding/weblog_interface.py b/utils/onboarding/weblog_interface.py index 84e4b0e1f8..ff137ed74f 100644 --- a/utils/onboarding/weblog_interface.py +++ b/utils/onboarding/weblog_interface.py @@ -1,6 +1,7 @@ import time from random import randint import os +from pathlib import Path import requests @@ -56,7 +57,7 @@ def make_internal_get_request(stdin_file, vm_port): sleep 1 done""" script_name = "request_weblog.sh" - shared_folder = os.path.dirname(os.path.abspath(stdin_file)) + shared_folder = Path(Path(stdin_file).resolve()).parent # Write the script in the shared folder with open(os.path.join(shared_folder, script_name), "w", encoding="utf-8") as file: @@ -69,9 +70,9 @@ def make_internal_get_request(stdin_file, vm_port): # Wait for the script to finish start = time.time() - while os.stat(stdin_file).st_size != 0 and time.time() - start < (timeout + 5): + while Path(stdin_file).stat().st_size != 0 and time.time() - start < (timeout + 5): time.sleep(1) - if os.stat(stdin_file).st_size != 0: + if Path(stdin_file).stat().st_size != 0: raise TimeoutError("Timed out waiting for weblog ready") return generated_uuid diff --git a/utils/scripts/compute-workflow-parameters.py b/utils/scripts/compute-workflow-parameters.py index 19a5ae5cc2..74d43bddc0 100644 --- a/utils/scripts/compute-workflow-parameters.py +++ b/utils/scripts/compute-workflow-parameters.py @@ -1,6 +1,6 @@ +from collections import defaultdict import argparse import json -import os from utils._context._scenarios import get_all_scenarios, ScenarioGroup @@ -59,7 +59,7 @@ def get_endtoend_weblogs(library, ci_environment: str) -> list[str]: weblogs = { "cpp": ["nginx"], "dotnet": ["poc", "uds"], - "golang": ["chi", "echo", "gin", "net-http", "uds-echo"], + "golang": ["chi", "echo", "gin", "net-http", "uds-echo", "net-http-orchestrion"], "java": [ "akka-http", "jersey-grizzly2", @@ -114,27 +114,38 @@ def get_opentelemetry_weblogs(library) -> list[str]: return weblogs[library] -def main(language: str, scenarios: str, groups: str, ci_environment: str) -> None: - scenario_map = get_github_workflow_map(scenarios.split(","), groups.split(",")) +def _print_output(result: dict[str, dict], output_format: str) -> None: + if output_format == "github": + for workflow_name, workflow in result.items(): + for parameter, value in workflow.items(): + print(f"{workflow_name}_{parameter}={json.dumps(value)}") + else: + raise ValueError(f"Invalid format: {format}") - for github_workflow, scenario_list in scenario_map.items(): - print(f"{github_workflow}_scenarios={json.dumps(scenario_list)}") - endtoend_weblogs = get_endtoend_weblogs(language, ci_environment) - print(f"endtoend_weblogs={json.dumps(endtoend_weblogs)}") +def main( + language: str, scenarios: str, groups: str, parametric_job_count: int, ci_environment: str, output_format: str +) -> None: + result = defaultdict(dict) + # this data struture is a dict where: + # the key is the workflow identifier + # the value is also a dict, where the key/value pair is the parameter name/value. + scenario_map = get_github_workflow_map(scenarios.split(","), groups.split(",")) - graphql_weblogs = get_graphql_weblogs(language) - print(f"graphql_weblogs={json.dumps(graphql_weblogs)}") + for github_workflow, scenario_list in scenario_map.items(): + result[github_workflow]["scenarios"] = scenario_list - opentelemetry_weblogs = get_opentelemetry_weblogs(language) - print(f"opentelemetry_weblogs={json.dumps(opentelemetry_weblogs)}") + result["endtoend"]["weblogs"] = get_endtoend_weblogs(language, ci_environment) + result["graphql"]["weblogs"] = get_graphql_weblogs(language) + result["opentelemetry"]["weblogs"] = get_opentelemetry_weblogs(language) + result["parametric"]["job_count"] = parametric_job_count + result["parametric"]["job_matrix"] = list(range(1, parametric_job_count + 1)) - _experimental_parametric_job_count = int(os.environ.get("_EXPERIMENTAL_PARAMETRIC_JOB_COUNT", "1")) - print(f"_experimental_parametric_job_matrix={list(range(1, _experimental_parametric_job_count + 1))!s}") + _print_output(result, output_format) if __name__ == "__main__": - parser = argparse.ArgumentParser(prog="get-github-parameters", description="Get scenarios and weblog to run") + parser = argparse.ArgumentParser(prog="get-github-parameters", description="Get scenarios and weblogs to run") parser.add_argument( "language", type=str, @@ -142,10 +153,31 @@ def main(language: str, scenarios: str, groups: str, ci_environment: str) -> Non choices=["cpp", "dotnet", "python", "ruby", "golang", "java", "nodejs", "php"], ) + parser.add_argument( + "--format", + "-f", + type=str, + help="Select the output format", + choices=["github"], + default="github", + ) + parser.add_argument("--scenarios", "-s", type=str, help="Scenarios to run", default="") parser.add_argument("--groups", "-g", type=str, help="Scenario groups to run", default="") + + # workflow specific parameters + parser.add_argument("--parametric-job-count", type=int, help="How may jobs must run parametric scenario", default=1) + + # Misc parser.add_argument("--ci-environment", type=str, help="Used internally in system-tests CI", default="custom") args = parser.parse_args() - main(language=args.language, scenarios=args.scenarios, groups=args.groups, ci_environment=args.ci_environment) + main( + language=args.language, + scenarios=args.scenarios, + groups=args.groups, + ci_environment=args.ci_environment, + output_format=args.format, + parametric_job_count=args.parametric_job_count, + ) diff --git a/utils/scripts/compute_impacted_scenario.py b/utils/scripts/compute_impacted_scenario.py index 2ce3c7015e..077aa5e6c4 100644 --- a/utils/scripts/compute_impacted_scenario.py +++ b/utils/scripts/compute_impacted_scenario.py @@ -41,7 +41,7 @@ def handle_labels(self, labels: list[str]) -> None: if "run-open-telemetry-scenarios" in labels: self.add_scenario_group(ScenarioGroup.OPEN_TELEMETRY.value) if "run-parametric-scenario" in labels: - self.add_scenario_group(ScenarioGroup.PARAMETRIC.value) + self.add_scenario(scenarios.parametric.name) if "run-graphql-scenarios" in labels: self.add_scenario_group(ScenarioGroup.GRAPHQL.value) if "run-docker-ssi-scenarios" in labels: @@ -140,7 +140,7 @@ def main() -> None: r"\.circleci/.*": None, # nothing to do r"\.vscode/.*": None, # nothing to do ## .github folder - r"\.github/workflows/run-parametric\.yml": ScenarioGroup.PARAMETRIC, + r"\.github/workflows/run-parametric\.yml": scenarios.parametric, r"\.github/workflows/run-lib-injection\.yml": ScenarioGroup.LIB_INJECTION, r"\.github/workflows/run-docker-ssi\.yml": ScenarioGroup.DOCKER_SSI, r"\.github/workflows/run-graphql\.yml": ScenarioGroup.GRAPHQL, @@ -166,10 +166,10 @@ def main() -> None: r"utils/_context/_scenarios/auto_injection\.py": None, r"utils/_context/virtual_machine\.py": None, #### Parametric case - r"utils/build/docker/\w+/parametric/.*": ScenarioGroup.PARAMETRIC, - r"utils/_context/_scenarios/parametric\.py": ScenarioGroup.PARAMETRIC, - r"utils/parametric/.*": ScenarioGroup.PARAMETRIC, - r"utils/scripts/parametric/.*": ScenarioGroup.PARAMETRIC, + r"utils/build/docker/\w+/parametric/.*": scenarios.parametric, + r"utils/_context/_scenarios/parametric\.py": scenarios.parametric, + r"utils/parametric/.*": scenarios.parametric, + r"utils/scripts/parametric/.*": scenarios.parametric, #### Docker SSI case r"utils/docker_ssi/.*": ScenarioGroup.DOCKER_SSI, ### other scenarios def diff --git a/utils/scripts/get-nightly-logs.py b/utils/scripts/get-nightly-logs.py index a239b0ea8d..f9984c678f 100644 --- a/utils/scripts/get-nightly-logs.py +++ b/utils/scripts/get-nightly-logs.py @@ -2,6 +2,7 @@ import logging import io import os +from pathlib import Path import sys import tarfile from typing import Any @@ -74,7 +75,7 @@ def download_artifact(session: requests.Session, artifact: dict, output_dir: str z.extractall(output_dir) for file in os.listdir(output_dir): - if file.endswith(".tar.gz") and os.path.isfile(os.path.join(output_dir, file)): + if file.endswith(".tar.gz") and Path(os.path.join(output_dir, file)).is_file(): with tarfile.open(os.path.join(output_dir, file), "r:gz") as t: t.extractall(output_dir, filter=lambda tar_info, _: tar_info) diff --git a/utils/scripts/load-binary.sh b/utils/scripts/load-binary.sh index cad0eb4f85..07555895b0 100755 --- a/utils/scripts/load-binary.sh +++ b/utils/scripts/load-binary.sh @@ -221,6 +221,9 @@ elif [ "$TARGET" = "golang" ]; then echo "Using ghcr.io/datadog/dd-trace-go/service-extensions-callout:dev" echo "ghcr.io/datadog/dd-trace-go/service-extensions-callout:dev" > golang-service-extensions-callout-image + echo "Using github.com/DataDog/orchestrion@main" + echo "github.com/DataDog/orchestrion@main" > orchestrion-load-from-go-get + elif [ "$TARGET" = "cpp" ]; then assert_version_is_dev # get_circleci_artifact "gh/DataDog/dd-opentracing-cpp" "build_test_deploy" "build" "TBD" diff --git a/utils/scripts/markdown_logs.py b/utils/scripts/markdown_logs.py index ce40afec9d..a69d2fb1b3 100644 --- a/utils/scripts/markdown_logs.py +++ b/utils/scripts/markdown_logs.py @@ -1,6 +1,7 @@ -import os -import json import collections +import json +import os +from pathlib import Path def table_row(*args: list[str]) -> None: @@ -12,7 +13,7 @@ def main() -> None: all_outcomes = {"passed": "βœ…", "xpassed": "πŸ‡", "skipped": "⏸️", "failed": "❌"} for x in os.listdir("."): - if x.startswith("logs") and os.path.isfile(f"{x}/report.json"): + if x.startswith("logs") and Path(f"{x}/report.json").is_file(): result[x] = collections.defaultdict(int) with open(f"{x}/report.json") as f: data = json.load(f) diff --git a/utils/scripts/merge_gitlab_aws_pipelines.py b/utils/scripts/merge_gitlab_aws_pipelines.py index 0b1411bf0e..047bdb8991 100644 --- a/utils/scripts/merge_gitlab_aws_pipelines.py +++ b/utils/scripts/merge_gitlab_aws_pipelines.py @@ -1,6 +1,6 @@ import yaml import argparse -import os.path +from pathlib import Path def main() -> None: @@ -12,7 +12,7 @@ def main() -> None: with open(args.input) as f: pipeline = yaml.safe_load(f) - if os.path.exists(args.output): + if Path(args.output).exists(): # If final file exists, merge the stages and jobs with open(args.output) as f: final_pipeline = yaml.safe_load(f)