From 9a43533dca0b1a0b2b61e82e45fb0f4ab0800d97 Mon Sep 17 00:00:00 2001 From: "api-clients-generation-pipeline[bot]" <54105614+api-clients-generation-pipeline[bot]@users.noreply.github.com> Date: Fri, 17 Apr 2026 10:05:13 +0200 Subject: [PATCH 1/2] Regenerate client from commit fc2584f of spec repo (#3917) Co-authored-by: ci.datadog-api-spec --- .generator/schemas/v2/openapi.yaml | 594 +++++++++++++++++- .../model_apm_dependency_stat_name.go | 76 +++ .../model_apm_dependency_stats_data_source.go | 64 ++ .../model_apm_dependency_stats_query.go | 414 ++++++++++++ .../model_apm_metrics_data_source.go | 64 ++ api/datadogV2/model_apm_metrics_query.go | 500 +++++++++++++++ api/datadogV2/model_apm_metrics_span_kind.go | 72 +++ api/datadogV2/model_apm_metrics_stat.go | 94 +++ api/datadogV2/model_apm_resource_stat_name.go | 86 +++ .../model_apm_resource_stats_data_source.go | 64 ++ .../model_apm_resource_stats_query.go | 420 +++++++++++++ api/datadogV2/model_container_data_source.go | 64 ++ api/datadogV2/model_container_scalar_query.go | 403 ++++++++++++ .../model_container_timeseries_query.go | 360 +++++++++++ api/datadogV2/model_events_data_source.go | 28 +- api/datadogV2/model_events_scalar_query.go | 2 +- .../model_events_timeseries_query.go | 2 +- api/datadogV2/model_metrics_scalar_query.go | 2 +- .../model_metrics_timeseries_query.go | 2 +- api/datadogV2/model_process_data_source.go | 64 ++ api/datadogV2/model_process_scalar_query.go | 403 ++++++++++++ .../model_process_timeseries_query.go | 360 +++++++++++ api/datadogV2/model_scalar_query.go | 196 +++++- api/datadogV2/model_slo_data_source.go | 64 ++ api/datadogV2/model_slo_query.go | 329 ++++++++++ api/datadogV2/model_slos_group_mode.go | 66 ++ api/datadogV2/model_slos_measure.go | 82 +++ api/datadogV2/model_slos_query_type.go | 68 ++ api/datadogV2/model_timeseries_query.go | 196 +++++- .../v2/metrics/QueryScalarData_1479548882.go | 63 ++ .../v2/metrics/QueryScalarData_1904811219.go | 60 ++ .../v2/metrics/QueryScalarData_2298288525.go | 59 ++ .../v2/metrics/QueryScalarData_2312509843.go | 63 ++ .../v2/metrics/QueryScalarData_2398494003.go | 63 ++ .../v2/metrics/QueryScalarData_2533499017.go | 61 ++ .../v2/metrics/QueryScalarData_2757564916.go | 63 ++ .../v2/metrics/QueryScalarData_3210877526.go | 63 ++ .../v2/metrics/QueryScalarData_3246660196.go | 63 ++ .../v2/metrics/QueryScalarData_3470073355.go | 63 ++ .../v2/metrics/QueryScalarData_3740015316.go | 63 ++ .../v2/metrics/QueryScalarData_394862343.go | 63 ++ .../v2/metrics/QueryScalarData_397220765.go | 63 ++ .../v2/metrics/QueryScalarData_420944803.go | 63 ++ .../v2/metrics/QueryScalarData_4230617918.go | 61 ++ .../v2/metrics/QueryScalarData_4257291081.go | 63 ++ .../v2/metrics/QueryScalarData_779493885.go | 61 ++ .../v2/metrics/QueryScalarData_891952130.go | 59 ++ .../v2/metrics/QueryScalarData_922754919.go | 63 ++ .../metrics/QueryTimeseriesData_1080761370.go | 64 ++ .../metrics/QueryTimeseriesData_108927825.go | 60 ++ .../metrics/QueryTimeseriesData_1116544040.go | 64 ++ .../metrics/QueryTimeseriesData_123149143.go | 64 ++ .../metrics/QueryTimeseriesData_1606557647.go | 64 ++ .../metrics/QueryTimeseriesData_1639521432.go | 64 ++ .../metrics/QueryTimeseriesData_2159746306.go | 61 ++ .../metrics/QueryTimeseriesData_2186419469.go | 64 ++ .../metrics/QueryTimeseriesData_2649955681.go | 64 ++ .../metrics/QueryTimeseriesData_2673679719.go | 64 ++ .../metrics/QueryTimeseriesData_2884575435.go | 64 ++ .../metrics/QueryTimeseriesData_3174309318.go | 59 ++ .../metrics/QueryTimeseriesData_3442090283.go | 64 ++ .../metrics/QueryTimeseriesData_3535807425.go | 62 ++ .../metrics/QueryTimeseriesData_4028506518.go | 64 ++ .../metrics/QueryTimeseriesData_4246412951.go | 61 ++ .../metrics/QueryTimeseriesData_475733751.go | 64 ++ .../metrics/QueryTimeseriesData_597826488.go | 64 ++ .../metrics/QueryTimeseriesData_847716941.go | 62 ++ ...ats_data_source_returns_OK_response.freeze | 1 + ...stats_data_source_returns_OK_response.yaml | 25 + ...e_and_span_kind_returns_OK_response.freeze | 1 + ...rce_and_span_kind_returns_OK_response.yaml | 22 + ...ics_data_source_returns_OK_response.freeze | 1 + ...trics_data_source_returns_OK_response.yaml | 22 + ...ats_data_source_returns_OK_response.freeze | 1 + ...stats_data_source_returns_OK_response.yaml | 22 + ...dit_data_source_returns_OK_response.freeze | 1 + ...audit_data_source_returns_OK_response.yaml | 22 + ...nes_data_source_returns_OK_response.freeze | 1 + ...lines_data_source_returns_OK_response.yaml | 22 + ...sts_data_source_returns_OK_response.freeze | 1 + ...tests_data_source_returns_OK_response.yaml | 22 + ...ner_data_source_returns_OK_response.freeze | 1 + ...ainer_data_source_returns_OK_response.yaml | 22 + ...nts_data_source_returns_OK_response.freeze | 1 + ...vents_data_source_returns_OK_response.yaml | 22 + ...ogs_data_source_returns_OK_response.freeze | 1 + ..._logs_data_source_returns_OK_response.yaml | 22 + ...ork_data_source_returns_OK_response.freeze | 1 + ...twork_data_source_returns_OK_response.yaml | 22 + ...nts_data_source_returns_OK_response.freeze | 1 + ...vents_data_source_returns_OK_response.yaml | 22 + ...ess_data_source_returns_OK_response.freeze | 1 + ...ocess_data_source_returns_OK_response.yaml | 22 + ...ics_data_source_returns_OK_response.freeze | 1 + ...ytics_data_source_returns_OK_response.yaml | 22 + ...les_data_source_returns_OK_response.freeze | 1 + ...files_data_source_returns_OK_response.yaml | 22 + ...rum_data_source_returns_OK_response.freeze | 1 + ...h_rum_data_source_returns_OK_response.yaml | 22 + ...als_data_source_returns_OK_response.freeze | 1 + ...gnals_data_source_returns_OK_response.yaml | 22 + ...slo_data_source_returns_OK_response.freeze | 1 + ...h_slo_data_source_returns_OK_response.yaml | 22 + ...ans_data_source_returns_OK_response.freeze | 1 + ...spans_data_source_returns_OK_response.yaml | 22 + ...ats_data_source_returns_OK_response.freeze | 1 + ...stats_data_source_returns_OK_response.yaml | 24 + ...e_and_span_kind_returns_OK_response.freeze | 1 + ...rce_and_span_kind_returns_OK_response.yaml | 24 + ...ics_data_source_returns_OK_response.freeze | 1 + ...trics_data_source_returns_OK_response.yaml | 24 + ...ats_data_source_returns_OK_response.freeze | 1 + ...stats_data_source_returns_OK_response.yaml | 24 + ...dit_data_source_returns_OK_response.freeze | 1 + ...audit_data_source_returns_OK_response.yaml | 24 + ...nes_data_source_returns_OK_response.freeze | 1 + ...lines_data_source_returns_OK_response.yaml | 24 + ...sts_data_source_returns_OK_response.freeze | 1 + ...tests_data_source_returns_OK_response.yaml | 24 + ...ner_data_source_returns_OK_response.freeze | 1 + ...ainer_data_source_returns_OK_response.yaml | 24 + ...nts_data_source_returns_OK_response.freeze | 1 + ...vents_data_source_returns_OK_response.yaml | 24 + ...ogs_data_source_returns_OK_response.freeze | 1 + ..._logs_data_source_returns_OK_response.yaml | 24 + ...ork_data_source_returns_OK_response.freeze | 1 + ...twork_data_source_returns_OK_response.yaml | 24 + ...nts_data_source_returns_OK_response.freeze | 1 + ...vents_data_source_returns_OK_response.yaml | 24 + ...ess_data_source_returns_OK_response.freeze | 1 + ...ocess_data_source_returns_OK_response.yaml | 24 + ...ics_data_source_returns_OK_response.freeze | 1 + ...ytics_data_source_returns_OK_response.yaml | 24 + ...les_data_source_returns_OK_response.freeze | 1 + ...files_data_source_returns_OK_response.yaml | 24 + ...rum_data_source_returns_OK_response.freeze | 1 + ...h_rum_data_source_returns_OK_response.yaml | 24 + ...als_data_source_returns_OK_response.freeze | 1 + ...gnals_data_source_returns_OK_response.yaml | 24 + ...slo_data_source_returns_OK_response.freeze | 1 + ...h_slo_data_source_returns_OK_response.yaml | 24 + ...ans_data_source_returns_OK_response.freeze | 1 + ...spans_data_source_returns_OK_response.yaml | 24 + tests/scenarios/features/v2/metrics.feature | 342 ++++++++++ 144 files changed, 8758 insertions(+), 15 deletions(-) create mode 100644 api/datadogV2/model_apm_dependency_stat_name.go create mode 100644 api/datadogV2/model_apm_dependency_stats_data_source.go create mode 100644 api/datadogV2/model_apm_dependency_stats_query.go create mode 100644 api/datadogV2/model_apm_metrics_data_source.go create mode 100644 api/datadogV2/model_apm_metrics_query.go create mode 100644 api/datadogV2/model_apm_metrics_span_kind.go create mode 100644 api/datadogV2/model_apm_metrics_stat.go create mode 100644 api/datadogV2/model_apm_resource_stat_name.go create mode 100644 api/datadogV2/model_apm_resource_stats_data_source.go create mode 100644 api/datadogV2/model_apm_resource_stats_query.go create mode 100644 api/datadogV2/model_container_data_source.go create mode 100644 api/datadogV2/model_container_scalar_query.go create mode 100644 api/datadogV2/model_container_timeseries_query.go create mode 100644 api/datadogV2/model_process_data_source.go create mode 100644 api/datadogV2/model_process_scalar_query.go create mode 100644 api/datadogV2/model_process_timeseries_query.go create mode 100644 api/datadogV2/model_slo_data_source.go create mode 100644 api/datadogV2/model_slo_query.go create mode 100644 api/datadogV2/model_slos_group_mode.go create mode 100644 api/datadogV2/model_slos_measure.go create mode 100644 api/datadogV2/model_slos_query_type.go create mode 100644 examples/v2/metrics/QueryScalarData_1479548882.go create mode 100644 examples/v2/metrics/QueryScalarData_1904811219.go create mode 100644 examples/v2/metrics/QueryScalarData_2298288525.go create mode 100644 examples/v2/metrics/QueryScalarData_2312509843.go create mode 100644 examples/v2/metrics/QueryScalarData_2398494003.go create mode 100644 examples/v2/metrics/QueryScalarData_2533499017.go create mode 100644 examples/v2/metrics/QueryScalarData_2757564916.go create mode 100644 examples/v2/metrics/QueryScalarData_3210877526.go create mode 100644 examples/v2/metrics/QueryScalarData_3246660196.go create mode 100644 examples/v2/metrics/QueryScalarData_3470073355.go create mode 100644 examples/v2/metrics/QueryScalarData_3740015316.go create mode 100644 examples/v2/metrics/QueryScalarData_394862343.go create mode 100644 examples/v2/metrics/QueryScalarData_397220765.go create mode 100644 examples/v2/metrics/QueryScalarData_420944803.go create mode 100644 examples/v2/metrics/QueryScalarData_4230617918.go create mode 100644 examples/v2/metrics/QueryScalarData_4257291081.go create mode 100644 examples/v2/metrics/QueryScalarData_779493885.go create mode 100644 examples/v2/metrics/QueryScalarData_891952130.go create mode 100644 examples/v2/metrics/QueryScalarData_922754919.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_1080761370.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_108927825.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_1116544040.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_123149143.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_1606557647.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_1639521432.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_2159746306.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_2186419469.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_2649955681.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_2673679719.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_2884575435.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_3174309318.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_3442090283.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_3535807425.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_4028506518.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_4246412951.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_475733751.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_597826488.go create mode 100644 examples/v2/metrics/QueryTimeseriesData_847716941.go create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.yaml diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 816ac84aac5..67ea97d1d67 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4068,6 +4068,293 @@ components: example: "90646597-5fdb-4a17-a240-647003f8c028" format: uuid type: string + ApmDependencyStatName: + description: The APM dependency statistic to query. + enum: + - avg_duration + - avg_root_duration + - avg_spans_per_trace + - error_rate + - pct_exec_time + - pct_of_traces + - total_traces_count + example: avg_duration + type: string + x-enum-varnames: + - AVG_DURATION + - AVG_ROOT_DURATION + - AVG_SPANS_PER_TRACE + - ERROR_RATE + - PCT_EXEC_TIME + - PCT_OF_TRACES + - TOTAL_TRACES_COUNT + ApmDependencyStatsDataSource: + default: apm_dependency_stats + description: A data source for APM dependency statistics queries. + enum: + - apm_dependency_stats + example: apm_dependency_stats + type: string + x-enum-varnames: + - APM_DEPENDENCY_STATS + ApmDependencyStatsQuery: + description: >- + A query for APM dependency statistics between services, such as call latency and error rates. + properties: + data_source: + $ref: "#/components/schemas/ApmDependencyStatsDataSource" + env: + description: The environment to query. + example: prod + type: string + is_upstream: + description: Determines whether stats for upstream or downstream dependencies should be queried. + example: true + type: boolean + name: + description: The variable name for use in formulas. + example: query1 + type: string + operation_name: + description: The APM operation name. + example: web.request + type: string + primary_tag_name: + description: The name of the second primary tag used within APM; required when `primary_tag_value` is specified. See https://docs.datadoghq.com/tracing/guide/setting_primary_tags_to_scope/#add-a-second-primary-tag-in-datadog. + example: datacenter + type: string + primary_tag_value: + description: Filter APM data by the second primary tag. `primary_tag_name` must also be specified. + example: us-east-1 + type: string + resource_name: + description: The resource name to filter by. + example: "GET /api/v2/users" + type: string + service: + description: The service name to filter by. + example: web-store + type: string + stat: + $ref: "#/components/schemas/ApmDependencyStatName" + required: + - data_source + - name + - env + - operation_name + - resource_name + - service + - stat + type: object + ApmMetricsDataSource: + default: apm_metrics + description: A data source for APM metrics queries. + enum: + - apm_metrics + example: apm_metrics + type: string + x-enum-varnames: + - APM_METRICS + ApmMetricsQuery: + description: >- + A query for APM trace metrics such as hits, errors, and latency percentiles, aggregated across services. + properties: + data_source: + $ref: "#/components/schemas/ApmMetricsDataSource" + group_by: + description: Optional fields to group the query results by. + items: + description: A field to group results by. + example: service + type: string + type: array + name: + description: The variable name for use in formulas. + example: query1 + type: string + operation_mode: + description: Optional operation mode to aggregate across operation names. + example: "primary" + type: string + operation_name: + description: Name of operation on service. If not provided, the primary operation name is used. + example: web.request + type: string + peer_tags: + description: Tags to query for a specific downstream entity (peer.service, peer.db_instance, peer.s3, peer.s3.bucket, etc.). + items: + description: A peer tag value. + example: "peer.service:my-service" + type: string + type: array + query_filter: + description: Additional filters for the query using metrics query syntax (for example, env, primary_tag). + example: "env:prod" + type: string + resource_hash: + description: The resource hash for exact matching. + example: "abc123" + type: string + resource_name: + description: The full name of a specific resource to filter by. + example: "GET /api/v1/users" + type: string + service: + description: The service name to filter by. + example: web-store + type: string + span_kind: + $ref: "#/components/schemas/ApmMetricsSpanKind" + stat: + $ref: "#/components/schemas/ApmMetricsStat" + required: + - data_source + - name + - stat + type: object + ApmMetricsSpanKind: + description: Describes the relationship between the span, its parents, and its children in a trace. + enum: + - consumer + - server + - client + - producer + - internal + example: server + type: string + x-enum-varnames: + - CONSUMER + - SERVER + - CLIENT + - PRODUCER + - INTERNAL + ApmMetricsStat: + description: The APM metric statistic to query. + enum: + - error_rate + - errors + - errors_per_second + - hits + - hits_per_second + - apdex + - latency_avg + - latency_max + - latency_p50 + - latency_p75 + - latency_p90 + - latency_p95 + - latency_p99 + - latency_p999 + - latency_distribution + - total_time + example: latency_p99 + type: string + x-enum-varnames: + - ERROR_RATE + - ERRORS + - ERRORS_PER_SECOND + - HITS + - HITS_PER_SECOND + - APDEX + - LATENCY_AVG + - LATENCY_MAX + - LATENCY_P50 + - LATENCY_P75 + - LATENCY_P90 + - LATENCY_P95 + - LATENCY_P99 + - LATENCY_P999 + - LATENCY_DISTRIBUTION + - TOTAL_TIME + ApmResourceStatName: + description: The APM resource statistic to query. + enum: + - error_rate + - errors + - hits + - latency_avg + - latency_max + - latency_p50 + - latency_p75 + - latency_p90 + - latency_p95 + - latency_p99 + - latency_distribution + - total_time + example: latency_p95 + type: string + x-enum-varnames: + - ERROR_RATE + - ERRORS + - HITS + - LATENCY_AVG + - LATENCY_MAX + - LATENCY_P50 + - LATENCY_P75 + - LATENCY_P90 + - LATENCY_P95 + - LATENCY_P99 + - LATENCY_DISTRIBUTION + - TOTAL_TIME + ApmResourceStatsDataSource: + default: apm_resource_stats + description: A data source for APM resource statistics queries. + enum: + - apm_resource_stats + example: apm_resource_stats + type: string + x-enum-varnames: + - APM_RESOURCE_STATS + ApmResourceStatsQuery: + description: >- + A query for APM resource statistics such as latency, error rate, and hit count, grouped by resource name. + properties: + data_source: + $ref: "#/components/schemas/ApmResourceStatsDataSource" + env: + description: The environment to query. + example: prod + type: string + group_by: + description: Tag keys to group results by. + items: + description: A tag key to group by. + example: resource_name + type: string + type: array + name: + description: The variable name for use in formulas. + example: query1 + type: string + operation_name: + description: The APM operation name. + example: web.request + type: string + primary_tag_name: + description: Name of the second primary tag used within APM. Required when `primary_tag_value` is specified. See https://docs.datadoghq.com/tracing/guide/setting_primary_tags_to_scope/#add-a-second-primary-tag-in-datadog + example: datacenter + type: string + primary_tag_value: + description: Value of the second primary tag by which to filter APM data. `primary_tag_name` must also be specified. + example: us-east-1 + type: string + resource_name: + description: The resource name to filter by. + example: "Admin::ProductsController#create" + type: string + service: + description: The service name to filter by. + example: web-store + type: string + stat: + $ref: "#/components/schemas/ApmResourceStatName" + required: + - data_source + - name + - env + - service + - stat + type: object ApmRetentionFilterType: default: apm_retention_filter description: The type of the resource. @@ -14182,6 +14469,15 @@ components: type: string type: array type: object + ContainerDataSource: + default: container + description: A data source for container-level infrastructure metrics. + enum: + - container + example: container + type: string + x-enum-varnames: + - CONTAINER ContainerGroup: description: Container group object. properties: @@ -14593,6 +14889,82 @@ components: type: string x-enum-varnames: - CURSOR_LIMIT + ContainerScalarQuery: + description: A query for container-level metrics such as CPU and memory usage. + properties: + aggregator: + $ref: "#/components/schemas/MetricsAggregator" + data_source: + $ref: "#/components/schemas/ContainerDataSource" + is_normalized_cpu: + description: Whether CPU metrics should be normalized by core count. + type: boolean + limit: + description: Maximum number of results to return. + format: int64 + type: integer + metric: + description: The container metric to query. + example: process.stat.container.cpu.total_pct + type: string + name: + description: The variable name for use in formulas. + example: query1 + type: string + sort: + $ref: "#/components/schemas/QuerySortOrder" + tag_filters: + description: Tag filters to narrow down containers. + items: + description: A tag filter value. + example: "env:prod" + type: string + type: array + text_filter: + description: A full-text search filter to match container names. + type: string + required: + - data_source + - name + - metric + type: object + ContainerTimeseriesQuery: + description: A query for container-level metrics such as CPU and memory usage. + properties: + data_source: + $ref: "#/components/schemas/ContainerDataSource" + is_normalized_cpu: + description: Whether CPU metrics should be normalized by core count. + type: boolean + limit: + description: Maximum number of results to return. + format: int64 + type: integer + metric: + description: The container metric to query. + example: process.stat.container.cpu.total_pct + type: string + name: + description: The variable name for use in formulas. + example: query1 + type: string + sort: + $ref: "#/components/schemas/QuerySortOrder" + tag_filters: + description: Tag filters to narrow down containers. + items: + description: A tag filter value. + example: "env:prod" + type: string + type: array + text_filter: + description: A full-text search filter to match container names. + type: string + required: + - data_source + - name + - metric + type: object ContainerType: default: container description: Type of container. @@ -25234,13 +25606,35 @@ components: description: A data source that is powered by the Events Platform. enum: - logs + - spans + - network - rum + - security_signals + - profiles + - audit + - events + - ci_tests + - ci_pipelines + - incident_analytics + - product_analytics + - on_call_events - dora example: logs type: string x-enum-varnames: - LOGS + - SPANS + - NETWORK - RUM + - SECURITY_SIGNALS + - PROFILES + - AUDIT + - EVENTS + - CI_TESTS + - CI_PIPELINES + - INCIDENT_ANALYTICS + - PRODUCT_ANALYTICS + - ON_CALL_EVENTS - DORA EventsGroupBy: description: A dimension on which to split a query's results. @@ -25409,7 +25803,8 @@ components: type: string type: object EventsScalarQuery: - description: An individual scalar events query. + description: >- + An individual scalar query for logs, RUM, traces, CI pipelines, security signals, and other event-based data sources. Use this query type for any data source powered by the Events Platform. See the data_source field for the full list of supported sources. properties: compute: $ref: "#/components/schemas/EventsCompute" @@ -25461,7 +25856,8 @@ components: - ALPHABETICAL - MEASURE EventsTimeseriesQuery: - description: An individual timeseries events query. + description: >- + An individual timeseries query for logs, RUM, traces, CI pipelines, security signals, and other event-based data sources. Use this query type for any data source powered by the Events Platform. See the data_source field for the full list of supported sources. properties: compute: $ref: "#/components/schemas/EventsCompute" @@ -42587,7 +42983,7 @@ components: type: string type: object MetricsScalarQuery: - description: An individual scalar metrics query. + description: A query against Datadog custom metrics or Cloud Cost data sources. properties: aggregator: $ref: "#/components/schemas/MetricsAggregator" @@ -42606,7 +43002,7 @@ components: - aggregator type: object MetricsTimeseriesQuery: - description: An individual timeseries metrics query. + description: A query against Datadog custom metrics or Cloud Cost data sources. properties: data_source: $ref: "#/components/schemas/MetricsDataSource" @@ -53269,6 +53665,54 @@ components: required: - type type: object + ProcessDataSource: + default: process + description: A data source for process-level infrastructure metrics. + enum: + - process + example: process + type: string + x-enum-varnames: + - PROCESS + ProcessScalarQuery: + description: A query for host-level process metrics such as CPU and memory usage. + properties: + aggregator: + $ref: "#/components/schemas/MetricsAggregator" + data_source: + $ref: "#/components/schemas/ProcessDataSource" + is_normalized_cpu: + description: Whether CPU metrics should be normalized by core count. + type: boolean + limit: + description: Maximum number of results to return. + format: int64 + type: integer + metric: + description: The process metric to query. + example: process.stat.cpu.total_pct + type: string + name: + description: The variable name for use in formulas. + example: query1 + type: string + sort: + $ref: "#/components/schemas/QuerySortOrder" + tag_filters: + description: Tag filters to narrow down processes. + items: + description: A tag filter value. + example: "env:prod" + type: string + type: array + text_filter: + description: A full-text search filter to match process names or commands. + type: string + required: + - data_source + - name + - metric + type: object ProcessSummariesMeta: description: Response metadata object. properties: @@ -53355,6 +53799,43 @@ components: type: string x-enum-varnames: - PROCESS + ProcessTimeseriesQuery: + description: A query for host-level process metrics such as CPU and memory usage. + properties: + data_source: + $ref: "#/components/schemas/ProcessDataSource" + is_normalized_cpu: + description: Whether CPU metrics should be normalized by core count. + type: boolean + limit: + description: Maximum number of results to return. + format: int64 + type: integer + metric: + description: The process metric to query. + example: process.stat.cpu.total_pct + type: string + name: + description: The variable name for use in formulas. + example: query1 + type: string + sort: + $ref: "#/components/schemas/QuerySortOrder" + tag_filters: + description: Tag filters to narrow down processes. + items: + description: A tag filter value. + example: "env:prod" + type: string + type: array + text_filter: + description: A full-text search filter to match process names or commands. + type: string + required: + - data_source + - name + - metric + type: object ProductAnalyticsAnalyticsQuery: description: The analytics query definition containing a base query, compute rule, and optional grouping. properties: @@ -59563,6 +60044,12 @@ components: oneOf: - $ref: "#/components/schemas/MetricsScalarQuery" - $ref: "#/components/schemas/EventsScalarQuery" + - $ref: "#/components/schemas/ApmResourceStatsQuery" + - $ref: "#/components/schemas/ApmMetricsQuery" + - $ref: "#/components/schemas/ApmDependencyStatsQuery" + - $ref: "#/components/schemas/SloQuery" + - $ref: "#/components/schemas/ProcessScalarQuery" + - $ref: "#/components/schemas/ContainerScalarQuery" ScalarResponse: description: A message containing the response to a scalar query. properties: @@ -67446,6 +67933,51 @@ components: required: - slackTrigger type: object + SloDataSource: + default: slo + description: A data source for SLO queries. + enum: + - slo + example: slo + type: string + x-enum-varnames: + - SLO + SloQuery: + description: A query for SLO status, error budget, and burn rate metrics. + example: + additional_query_filters: "*" + data_source: "slo" + group_mode: "overall" + measure: "good_events" + name: "my_slo" + slo_id: "12345678910" + slo_query_type: "metric" + properties: + additional_query_filters: + description: Additional filters applied to the SLO query. + example: "host:host_a,env:prod" + type: string + data_source: + $ref: "#/components/schemas/SloDataSource" + group_mode: + $ref: "#/components/schemas/SlosGroupMode" + measure: + $ref: "#/components/schemas/SlosMeasure" + name: + description: The variable name for use in formulas. + example: query1 + type: string + slo_id: + description: The unique identifier of the SLO to query. + example: "a]b123c45de6f78g90h" + type: string + slo_query_type: + $ref: "#/components/schemas/SlosQueryType" + required: + - data_source + - slo_id + - measure + type: object SloReportCreateRequest: description: The SLO report request body. properties: @@ -67553,6 +68085,54 @@ components: type: string x-enum-varnames: - SLO_STATUS + SlosGroupMode: + description: How SLO results are grouped in the response. + enum: + - overall + - components + example: overall + type: string + x-enum-varnames: + - OVERALL + - COMPONENTS + SlosMeasure: + description: The SLO measurement to retrieve. + enum: + - good_events + - bad_events + - slo_status + - error_budget_remaining + - error_budget_remaining_history + - error_budget_burndown + - burn_rate + - slo_status_history + - good_minutes + - bad_minutes + example: slo_status + type: string + x-enum-varnames: + - GOOD_EVENTS + - BAD_EVENTS + - SLO_STATUS + - ERROR_BUDGET_REMAINING + - ERROR_BUDGET_REMAINING_HISTORY + - ERROR_BUDGET_BURNDOWN + - BURN_RATE + - SLO_STATUS_HISTORY + - GOOD_MINUTES + - BAD_MINUTES + SlosQueryType: + description: The type of SLO definition being queried. + enum: + - metric + - time_slice + - monitor + example: metric + type: string + x-enum-varnames: + - METRIC + - TIME_SLICE + - MONITOR Snapshot: description: A single heatmap snapshot resource returned by create or update operations. properties: @@ -74623,6 +75203,12 @@ components: oneOf: - $ref: "#/components/schemas/MetricsTimeseriesQuery" - $ref: "#/components/schemas/EventsTimeseriesQuery" + - $ref: "#/components/schemas/ApmResourceStatsQuery" + - $ref: "#/components/schemas/ApmMetricsQuery" + - $ref: "#/components/schemas/ApmDependencyStatsQuery" + - $ref: "#/components/schemas/SloQuery" + - $ref: "#/components/schemas/ProcessTimeseriesQuery" + - $ref: "#/components/schemas/ContainerTimeseriesQuery" TimeseriesResponse: description: A message containing the response to a timeseries query. properties: diff --git a/api/datadogV2/model_apm_dependency_stat_name.go b/api/datadogV2/model_apm_dependency_stat_name.go new file mode 100644 index 00000000000..23242830378 --- /dev/null +++ b/api/datadogV2/model_apm_dependency_stat_name.go @@ -0,0 +1,76 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmDependencyStatName The APM dependency statistic to query. +type ApmDependencyStatName string + +// List of ApmDependencyStatName. +const ( + APMDEPENDENCYSTATNAME_AVG_DURATION ApmDependencyStatName = "avg_duration" + APMDEPENDENCYSTATNAME_AVG_ROOT_DURATION ApmDependencyStatName = "avg_root_duration" + APMDEPENDENCYSTATNAME_AVG_SPANS_PER_TRACE ApmDependencyStatName = "avg_spans_per_trace" + APMDEPENDENCYSTATNAME_ERROR_RATE ApmDependencyStatName = "error_rate" + APMDEPENDENCYSTATNAME_PCT_EXEC_TIME ApmDependencyStatName = "pct_exec_time" + APMDEPENDENCYSTATNAME_PCT_OF_TRACES ApmDependencyStatName = "pct_of_traces" + APMDEPENDENCYSTATNAME_TOTAL_TRACES_COUNT ApmDependencyStatName = "total_traces_count" +) + +var allowedApmDependencyStatNameEnumValues = []ApmDependencyStatName{ + APMDEPENDENCYSTATNAME_AVG_DURATION, + APMDEPENDENCYSTATNAME_AVG_ROOT_DURATION, + APMDEPENDENCYSTATNAME_AVG_SPANS_PER_TRACE, + APMDEPENDENCYSTATNAME_ERROR_RATE, + APMDEPENDENCYSTATNAME_PCT_EXEC_TIME, + APMDEPENDENCYSTATNAME_PCT_OF_TRACES, + APMDEPENDENCYSTATNAME_TOTAL_TRACES_COUNT, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmDependencyStatName) GetAllowedValues() []ApmDependencyStatName { + return allowedApmDependencyStatNameEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmDependencyStatName) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmDependencyStatName(value) + return nil +} + +// NewApmDependencyStatNameFromValue returns a pointer to a valid ApmDependencyStatName +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmDependencyStatNameFromValue(v string) (*ApmDependencyStatName, error) { + ev := ApmDependencyStatName(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmDependencyStatName: valid values are %v", v, allowedApmDependencyStatNameEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmDependencyStatName) IsValid() bool { + for _, existing := range allowedApmDependencyStatNameEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmDependencyStatName value. +func (v ApmDependencyStatName) Ptr() *ApmDependencyStatName { + return &v +} diff --git a/api/datadogV2/model_apm_dependency_stats_data_source.go b/api/datadogV2/model_apm_dependency_stats_data_source.go new file mode 100644 index 00000000000..a7253087546 --- /dev/null +++ b/api/datadogV2/model_apm_dependency_stats_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmDependencyStatsDataSource A data source for APM dependency statistics queries. +type ApmDependencyStatsDataSource string + +// List of ApmDependencyStatsDataSource. +const ( + APMDEPENDENCYSTATSDATASOURCE_APM_DEPENDENCY_STATS ApmDependencyStatsDataSource = "apm_dependency_stats" +) + +var allowedApmDependencyStatsDataSourceEnumValues = []ApmDependencyStatsDataSource{ + APMDEPENDENCYSTATSDATASOURCE_APM_DEPENDENCY_STATS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmDependencyStatsDataSource) GetAllowedValues() []ApmDependencyStatsDataSource { + return allowedApmDependencyStatsDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmDependencyStatsDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmDependencyStatsDataSource(value) + return nil +} + +// NewApmDependencyStatsDataSourceFromValue returns a pointer to a valid ApmDependencyStatsDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmDependencyStatsDataSourceFromValue(v string) (*ApmDependencyStatsDataSource, error) { + ev := ApmDependencyStatsDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmDependencyStatsDataSource: valid values are %v", v, allowedApmDependencyStatsDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmDependencyStatsDataSource) IsValid() bool { + for _, existing := range allowedApmDependencyStatsDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmDependencyStatsDataSource value. +func (v ApmDependencyStatsDataSource) Ptr() *ApmDependencyStatsDataSource { + return &v +} diff --git a/api/datadogV2/model_apm_dependency_stats_query.go b/api/datadogV2/model_apm_dependency_stats_query.go new file mode 100644 index 00000000000..8b2f872cf97 --- /dev/null +++ b/api/datadogV2/model_apm_dependency_stats_query.go @@ -0,0 +1,414 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmDependencyStatsQuery A query for APM dependency statistics between services, such as call latency and error rates. +type ApmDependencyStatsQuery struct { + // A data source for APM dependency statistics queries. + DataSource ApmDependencyStatsDataSource `json:"data_source"` + // The environment to query. + Env string `json:"env"` + // Determines whether stats for upstream or downstream dependencies should be queried. + IsUpstream *bool `json:"is_upstream,omitempty"` + // The variable name for use in formulas. + Name string `json:"name"` + // The APM operation name. + OperationName string `json:"operation_name"` + // The name of the second primary tag used within APM; required when `primary_tag_value` is specified. See https://docs.datadoghq.com/tracing/guide/setting_primary_tags_to_scope/#add-a-second-primary-tag-in-datadog. + PrimaryTagName *string `json:"primary_tag_name,omitempty"` + // Filter APM data by the second primary tag. `primary_tag_name` must also be specified. + PrimaryTagValue *string `json:"primary_tag_value,omitempty"` + // The resource name to filter by. + ResourceName string `json:"resource_name"` + // The service name to filter by. + Service string `json:"service"` + // The APM dependency statistic to query. + Stat ApmDependencyStatName `json:"stat"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewApmDependencyStatsQuery instantiates a new ApmDependencyStatsQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewApmDependencyStatsQuery(dataSource ApmDependencyStatsDataSource, env string, name string, operationName string, resourceName string, service string, stat ApmDependencyStatName) *ApmDependencyStatsQuery { + this := ApmDependencyStatsQuery{} + this.DataSource = dataSource + this.Env = env + this.Name = name + this.OperationName = operationName + this.ResourceName = resourceName + this.Service = service + this.Stat = stat + return &this +} + +// NewApmDependencyStatsQueryWithDefaults instantiates a new ApmDependencyStatsQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewApmDependencyStatsQueryWithDefaults() *ApmDependencyStatsQuery { + this := ApmDependencyStatsQuery{} + var dataSource ApmDependencyStatsDataSource = APMDEPENDENCYSTATSDATASOURCE_APM_DEPENDENCY_STATS + this.DataSource = dataSource + return &this +} + +// GetDataSource returns the DataSource field value. +func (o *ApmDependencyStatsQuery) GetDataSource() ApmDependencyStatsDataSource { + if o == nil { + var ret ApmDependencyStatsDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetDataSourceOk() (*ApmDependencyStatsDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ApmDependencyStatsQuery) SetDataSource(v ApmDependencyStatsDataSource) { + o.DataSource = v +} + +// GetEnv returns the Env field value. +func (o *ApmDependencyStatsQuery) GetEnv() string { + if o == nil { + var ret string + return ret + } + return o.Env +} + +// GetEnvOk returns a tuple with the Env field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetEnvOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Env, true +} + +// SetEnv sets field value. +func (o *ApmDependencyStatsQuery) SetEnv(v string) { + o.Env = v +} + +// GetIsUpstream returns the IsUpstream field value if set, zero value otherwise. +func (o *ApmDependencyStatsQuery) GetIsUpstream() bool { + if o == nil || o.IsUpstream == nil { + var ret bool + return ret + } + return *o.IsUpstream +} + +// GetIsUpstreamOk returns a tuple with the IsUpstream field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetIsUpstreamOk() (*bool, bool) { + if o == nil || o.IsUpstream == nil { + return nil, false + } + return o.IsUpstream, true +} + +// HasIsUpstream returns a boolean if a field has been set. +func (o *ApmDependencyStatsQuery) HasIsUpstream() bool { + return o != nil && o.IsUpstream != nil +} + +// SetIsUpstream gets a reference to the given bool and assigns it to the IsUpstream field. +func (o *ApmDependencyStatsQuery) SetIsUpstream(v bool) { + o.IsUpstream = &v +} + +// GetName returns the Name field value. +func (o *ApmDependencyStatsQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ApmDependencyStatsQuery) SetName(v string) { + o.Name = v +} + +// GetOperationName returns the OperationName field value. +func (o *ApmDependencyStatsQuery) GetOperationName() string { + if o == nil { + var ret string + return ret + } + return o.OperationName +} + +// GetOperationNameOk returns a tuple with the OperationName field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetOperationNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.OperationName, true +} + +// SetOperationName sets field value. +func (o *ApmDependencyStatsQuery) SetOperationName(v string) { + o.OperationName = v +} + +// GetPrimaryTagName returns the PrimaryTagName field value if set, zero value otherwise. +func (o *ApmDependencyStatsQuery) GetPrimaryTagName() string { + if o == nil || o.PrimaryTagName == nil { + var ret string + return ret + } + return *o.PrimaryTagName +} + +// GetPrimaryTagNameOk returns a tuple with the PrimaryTagName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetPrimaryTagNameOk() (*string, bool) { + if o == nil || o.PrimaryTagName == nil { + return nil, false + } + return o.PrimaryTagName, true +} + +// HasPrimaryTagName returns a boolean if a field has been set. +func (o *ApmDependencyStatsQuery) HasPrimaryTagName() bool { + return o != nil && o.PrimaryTagName != nil +} + +// SetPrimaryTagName gets a reference to the given string and assigns it to the PrimaryTagName field. +func (o *ApmDependencyStatsQuery) SetPrimaryTagName(v string) { + o.PrimaryTagName = &v +} + +// GetPrimaryTagValue returns the PrimaryTagValue field value if set, zero value otherwise. +func (o *ApmDependencyStatsQuery) GetPrimaryTagValue() string { + if o == nil || o.PrimaryTagValue == nil { + var ret string + return ret + } + return *o.PrimaryTagValue +} + +// GetPrimaryTagValueOk returns a tuple with the PrimaryTagValue field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetPrimaryTagValueOk() (*string, bool) { + if o == nil || o.PrimaryTagValue == nil { + return nil, false + } + return o.PrimaryTagValue, true +} + +// HasPrimaryTagValue returns a boolean if a field has been set. +func (o *ApmDependencyStatsQuery) HasPrimaryTagValue() bool { + return o != nil && o.PrimaryTagValue != nil +} + +// SetPrimaryTagValue gets a reference to the given string and assigns it to the PrimaryTagValue field. +func (o *ApmDependencyStatsQuery) SetPrimaryTagValue(v string) { + o.PrimaryTagValue = &v +} + +// GetResourceName returns the ResourceName field value. +func (o *ApmDependencyStatsQuery) GetResourceName() string { + if o == nil { + var ret string + return ret + } + return o.ResourceName +} + +// GetResourceNameOk returns a tuple with the ResourceName field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetResourceNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.ResourceName, true +} + +// SetResourceName sets field value. +func (o *ApmDependencyStatsQuery) SetResourceName(v string) { + o.ResourceName = v +} + +// GetService returns the Service field value. +func (o *ApmDependencyStatsQuery) GetService() string { + if o == nil { + var ret string + return ret + } + return o.Service +} + +// GetServiceOk returns a tuple with the Service field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetServiceOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Service, true +} + +// SetService sets field value. +func (o *ApmDependencyStatsQuery) SetService(v string) { + o.Service = v +} + +// GetStat returns the Stat field value. +func (o *ApmDependencyStatsQuery) GetStat() ApmDependencyStatName { + if o == nil { + var ret ApmDependencyStatName + return ret + } + return o.Stat +} + +// GetStatOk returns a tuple with the Stat field value +// and a boolean to check if the value has been set. +func (o *ApmDependencyStatsQuery) GetStatOk() (*ApmDependencyStatName, bool) { + if o == nil { + return nil, false + } + return &o.Stat, true +} + +// SetStat sets field value. +func (o *ApmDependencyStatsQuery) SetStat(v ApmDependencyStatName) { + o.Stat = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ApmDependencyStatsQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data_source"] = o.DataSource + toSerialize["env"] = o.Env + if o.IsUpstream != nil { + toSerialize["is_upstream"] = o.IsUpstream + } + toSerialize["name"] = o.Name + toSerialize["operation_name"] = o.OperationName + if o.PrimaryTagName != nil { + toSerialize["primary_tag_name"] = o.PrimaryTagName + } + if o.PrimaryTagValue != nil { + toSerialize["primary_tag_value"] = o.PrimaryTagValue + } + toSerialize["resource_name"] = o.ResourceName + toSerialize["service"] = o.Service + toSerialize["stat"] = o.Stat + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ApmDependencyStatsQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + DataSource *ApmDependencyStatsDataSource `json:"data_source"` + Env *string `json:"env"` + IsUpstream *bool `json:"is_upstream,omitempty"` + Name *string `json:"name"` + OperationName *string `json:"operation_name"` + PrimaryTagName *string `json:"primary_tag_name,omitempty"` + PrimaryTagValue *string `json:"primary_tag_value,omitempty"` + ResourceName *string `json:"resource_name"` + Service *string `json:"service"` + Stat *ApmDependencyStatName `json:"stat"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Env == nil { + return fmt.Errorf("required field env missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + if all.OperationName == nil { + return fmt.Errorf("required field operation_name missing") + } + if all.ResourceName == nil { + return fmt.Errorf("required field resource_name missing") + } + if all.Service == nil { + return fmt.Errorf("required field service missing") + } + if all.Stat == nil { + return fmt.Errorf("required field stat missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data_source", "env", "is_upstream", "name", "operation_name", "primary_tag_name", "primary_tag_value", "resource_name", "service", "stat"}) + } else { + return err + } + + hasInvalidField := false + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.Env = *all.Env + o.IsUpstream = all.IsUpstream + o.Name = *all.Name + o.OperationName = *all.OperationName + o.PrimaryTagName = all.PrimaryTagName + o.PrimaryTagValue = all.PrimaryTagValue + o.ResourceName = *all.ResourceName + o.Service = *all.Service + if !all.Stat.IsValid() { + hasInvalidField = true + } else { + o.Stat = *all.Stat + } + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_apm_metrics_data_source.go b/api/datadogV2/model_apm_metrics_data_source.go new file mode 100644 index 00000000000..b01cae6ebbc --- /dev/null +++ b/api/datadogV2/model_apm_metrics_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmMetricsDataSource A data source for APM metrics queries. +type ApmMetricsDataSource string + +// List of ApmMetricsDataSource. +const ( + APMMETRICSDATASOURCE_APM_METRICS ApmMetricsDataSource = "apm_metrics" +) + +var allowedApmMetricsDataSourceEnumValues = []ApmMetricsDataSource{ + APMMETRICSDATASOURCE_APM_METRICS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmMetricsDataSource) GetAllowedValues() []ApmMetricsDataSource { + return allowedApmMetricsDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmMetricsDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmMetricsDataSource(value) + return nil +} + +// NewApmMetricsDataSourceFromValue returns a pointer to a valid ApmMetricsDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmMetricsDataSourceFromValue(v string) (*ApmMetricsDataSource, error) { + ev := ApmMetricsDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmMetricsDataSource: valid values are %v", v, allowedApmMetricsDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmMetricsDataSource) IsValid() bool { + for _, existing := range allowedApmMetricsDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmMetricsDataSource value. +func (v ApmMetricsDataSource) Ptr() *ApmMetricsDataSource { + return &v +} diff --git a/api/datadogV2/model_apm_metrics_query.go b/api/datadogV2/model_apm_metrics_query.go new file mode 100644 index 00000000000..535b6f295bf --- /dev/null +++ b/api/datadogV2/model_apm_metrics_query.go @@ -0,0 +1,500 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmMetricsQuery A query for APM trace metrics such as hits, errors, and latency percentiles, aggregated across services. +type ApmMetricsQuery struct { + // A data source for APM metrics queries. + DataSource ApmMetricsDataSource `json:"data_source"` + // Optional fields to group the query results by. + GroupBy []string `json:"group_by,omitempty"` + // The variable name for use in formulas. + Name string `json:"name"` + // Optional operation mode to aggregate across operation names. + OperationMode *string `json:"operation_mode,omitempty"` + // Name of operation on service. If not provided, the primary operation name is used. + OperationName *string `json:"operation_name,omitempty"` + // Tags to query for a specific downstream entity (peer.service, peer.db_instance, peer.s3, peer.s3.bucket, etc.). + PeerTags []string `json:"peer_tags,omitempty"` + // Additional filters for the query using metrics query syntax (for example, env, primary_tag). + QueryFilter *string `json:"query_filter,omitempty"` + // The resource hash for exact matching. + ResourceHash *string `json:"resource_hash,omitempty"` + // The full name of a specific resource to filter by. + ResourceName *string `json:"resource_name,omitempty"` + // The service name to filter by. + Service *string `json:"service,omitempty"` + // Describes the relationship between the span, its parents, and its children in a trace. + SpanKind *ApmMetricsSpanKind `json:"span_kind,omitempty"` + // The APM metric statistic to query. + Stat ApmMetricsStat `json:"stat"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewApmMetricsQuery instantiates a new ApmMetricsQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewApmMetricsQuery(dataSource ApmMetricsDataSource, name string, stat ApmMetricsStat) *ApmMetricsQuery { + this := ApmMetricsQuery{} + this.DataSource = dataSource + this.Name = name + this.Stat = stat + return &this +} + +// NewApmMetricsQueryWithDefaults instantiates a new ApmMetricsQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewApmMetricsQueryWithDefaults() *ApmMetricsQuery { + this := ApmMetricsQuery{} + var dataSource ApmMetricsDataSource = APMMETRICSDATASOURCE_APM_METRICS + this.DataSource = dataSource + return &this +} + +// GetDataSource returns the DataSource field value. +func (o *ApmMetricsQuery) GetDataSource() ApmMetricsDataSource { + if o == nil { + var ret ApmMetricsDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetDataSourceOk() (*ApmMetricsDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ApmMetricsQuery) SetDataSource(v ApmMetricsDataSource) { + o.DataSource = v +} + +// GetGroupBy returns the GroupBy field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetGroupBy() []string { + if o == nil || o.GroupBy == nil { + var ret []string + return ret + } + return o.GroupBy +} + +// GetGroupByOk returns a tuple with the GroupBy field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetGroupByOk() (*[]string, bool) { + if o == nil || o.GroupBy == nil { + return nil, false + } + return &o.GroupBy, true +} + +// HasGroupBy returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasGroupBy() bool { + return o != nil && o.GroupBy != nil +} + +// SetGroupBy gets a reference to the given []string and assigns it to the GroupBy field. +func (o *ApmMetricsQuery) SetGroupBy(v []string) { + o.GroupBy = v +} + +// GetName returns the Name field value. +func (o *ApmMetricsQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ApmMetricsQuery) SetName(v string) { + o.Name = v +} + +// GetOperationMode returns the OperationMode field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetOperationMode() string { + if o == nil || o.OperationMode == nil { + var ret string + return ret + } + return *o.OperationMode +} + +// GetOperationModeOk returns a tuple with the OperationMode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetOperationModeOk() (*string, bool) { + if o == nil || o.OperationMode == nil { + return nil, false + } + return o.OperationMode, true +} + +// HasOperationMode returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasOperationMode() bool { + return o != nil && o.OperationMode != nil +} + +// SetOperationMode gets a reference to the given string and assigns it to the OperationMode field. +func (o *ApmMetricsQuery) SetOperationMode(v string) { + o.OperationMode = &v +} + +// GetOperationName returns the OperationName field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetOperationName() string { + if o == nil || o.OperationName == nil { + var ret string + return ret + } + return *o.OperationName +} + +// GetOperationNameOk returns a tuple with the OperationName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetOperationNameOk() (*string, bool) { + if o == nil || o.OperationName == nil { + return nil, false + } + return o.OperationName, true +} + +// HasOperationName returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasOperationName() bool { + return o != nil && o.OperationName != nil +} + +// SetOperationName gets a reference to the given string and assigns it to the OperationName field. +func (o *ApmMetricsQuery) SetOperationName(v string) { + o.OperationName = &v +} + +// GetPeerTags returns the PeerTags field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetPeerTags() []string { + if o == nil || o.PeerTags == nil { + var ret []string + return ret + } + return o.PeerTags +} + +// GetPeerTagsOk returns a tuple with the PeerTags field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetPeerTagsOk() (*[]string, bool) { + if o == nil || o.PeerTags == nil { + return nil, false + } + return &o.PeerTags, true +} + +// HasPeerTags returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasPeerTags() bool { + return o != nil && o.PeerTags != nil +} + +// SetPeerTags gets a reference to the given []string and assigns it to the PeerTags field. +func (o *ApmMetricsQuery) SetPeerTags(v []string) { + o.PeerTags = v +} + +// GetQueryFilter returns the QueryFilter field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetQueryFilter() string { + if o == nil || o.QueryFilter == nil { + var ret string + return ret + } + return *o.QueryFilter +} + +// GetQueryFilterOk returns a tuple with the QueryFilter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetQueryFilterOk() (*string, bool) { + if o == nil || o.QueryFilter == nil { + return nil, false + } + return o.QueryFilter, true +} + +// HasQueryFilter returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasQueryFilter() bool { + return o != nil && o.QueryFilter != nil +} + +// SetQueryFilter gets a reference to the given string and assigns it to the QueryFilter field. +func (o *ApmMetricsQuery) SetQueryFilter(v string) { + o.QueryFilter = &v +} + +// GetResourceHash returns the ResourceHash field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetResourceHash() string { + if o == nil || o.ResourceHash == nil { + var ret string + return ret + } + return *o.ResourceHash +} + +// GetResourceHashOk returns a tuple with the ResourceHash field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetResourceHashOk() (*string, bool) { + if o == nil || o.ResourceHash == nil { + return nil, false + } + return o.ResourceHash, true +} + +// HasResourceHash returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasResourceHash() bool { + return o != nil && o.ResourceHash != nil +} + +// SetResourceHash gets a reference to the given string and assigns it to the ResourceHash field. +func (o *ApmMetricsQuery) SetResourceHash(v string) { + o.ResourceHash = &v +} + +// GetResourceName returns the ResourceName field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetResourceName() string { + if o == nil || o.ResourceName == nil { + var ret string + return ret + } + return *o.ResourceName +} + +// GetResourceNameOk returns a tuple with the ResourceName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetResourceNameOk() (*string, bool) { + if o == nil || o.ResourceName == nil { + return nil, false + } + return o.ResourceName, true +} + +// HasResourceName returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasResourceName() bool { + return o != nil && o.ResourceName != nil +} + +// SetResourceName gets a reference to the given string and assigns it to the ResourceName field. +func (o *ApmMetricsQuery) SetResourceName(v string) { + o.ResourceName = &v +} + +// GetService returns the Service field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetService() string { + if o == nil || o.Service == nil { + var ret string + return ret + } + return *o.Service +} + +// GetServiceOk returns a tuple with the Service field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetServiceOk() (*string, bool) { + if o == nil || o.Service == nil { + return nil, false + } + return o.Service, true +} + +// HasService returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasService() bool { + return o != nil && o.Service != nil +} + +// SetService gets a reference to the given string and assigns it to the Service field. +func (o *ApmMetricsQuery) SetService(v string) { + o.Service = &v +} + +// GetSpanKind returns the SpanKind field value if set, zero value otherwise. +func (o *ApmMetricsQuery) GetSpanKind() ApmMetricsSpanKind { + if o == nil || o.SpanKind == nil { + var ret ApmMetricsSpanKind + return ret + } + return *o.SpanKind +} + +// GetSpanKindOk returns a tuple with the SpanKind field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetSpanKindOk() (*ApmMetricsSpanKind, bool) { + if o == nil || o.SpanKind == nil { + return nil, false + } + return o.SpanKind, true +} + +// HasSpanKind returns a boolean if a field has been set. +func (o *ApmMetricsQuery) HasSpanKind() bool { + return o != nil && o.SpanKind != nil +} + +// SetSpanKind gets a reference to the given ApmMetricsSpanKind and assigns it to the SpanKind field. +func (o *ApmMetricsQuery) SetSpanKind(v ApmMetricsSpanKind) { + o.SpanKind = &v +} + +// GetStat returns the Stat field value. +func (o *ApmMetricsQuery) GetStat() ApmMetricsStat { + if o == nil { + var ret ApmMetricsStat + return ret + } + return o.Stat +} + +// GetStatOk returns a tuple with the Stat field value +// and a boolean to check if the value has been set. +func (o *ApmMetricsQuery) GetStatOk() (*ApmMetricsStat, bool) { + if o == nil { + return nil, false + } + return &o.Stat, true +} + +// SetStat sets field value. +func (o *ApmMetricsQuery) SetStat(v ApmMetricsStat) { + o.Stat = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ApmMetricsQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data_source"] = o.DataSource + if o.GroupBy != nil { + toSerialize["group_by"] = o.GroupBy + } + toSerialize["name"] = o.Name + if o.OperationMode != nil { + toSerialize["operation_mode"] = o.OperationMode + } + if o.OperationName != nil { + toSerialize["operation_name"] = o.OperationName + } + if o.PeerTags != nil { + toSerialize["peer_tags"] = o.PeerTags + } + if o.QueryFilter != nil { + toSerialize["query_filter"] = o.QueryFilter + } + if o.ResourceHash != nil { + toSerialize["resource_hash"] = o.ResourceHash + } + if o.ResourceName != nil { + toSerialize["resource_name"] = o.ResourceName + } + if o.Service != nil { + toSerialize["service"] = o.Service + } + if o.SpanKind != nil { + toSerialize["span_kind"] = o.SpanKind + } + toSerialize["stat"] = o.Stat + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ApmMetricsQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + DataSource *ApmMetricsDataSource `json:"data_source"` + GroupBy []string `json:"group_by,omitempty"` + Name *string `json:"name"` + OperationMode *string `json:"operation_mode,omitempty"` + OperationName *string `json:"operation_name,omitempty"` + PeerTags []string `json:"peer_tags,omitempty"` + QueryFilter *string `json:"query_filter,omitempty"` + ResourceHash *string `json:"resource_hash,omitempty"` + ResourceName *string `json:"resource_name,omitempty"` + Service *string `json:"service,omitempty"` + SpanKind *ApmMetricsSpanKind `json:"span_kind,omitempty"` + Stat *ApmMetricsStat `json:"stat"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + if all.Stat == nil { + return fmt.Errorf("required field stat missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data_source", "group_by", "name", "operation_mode", "operation_name", "peer_tags", "query_filter", "resource_hash", "resource_name", "service", "span_kind", "stat"}) + } else { + return err + } + + hasInvalidField := false + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.GroupBy = all.GroupBy + o.Name = *all.Name + o.OperationMode = all.OperationMode + o.OperationName = all.OperationName + o.PeerTags = all.PeerTags + o.QueryFilter = all.QueryFilter + o.ResourceHash = all.ResourceHash + o.ResourceName = all.ResourceName + o.Service = all.Service + if all.SpanKind != nil && !all.SpanKind.IsValid() { + hasInvalidField = true + } else { + o.SpanKind = all.SpanKind + } + if !all.Stat.IsValid() { + hasInvalidField = true + } else { + o.Stat = *all.Stat + } + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_apm_metrics_span_kind.go b/api/datadogV2/model_apm_metrics_span_kind.go new file mode 100644 index 00000000000..71212a7790f --- /dev/null +++ b/api/datadogV2/model_apm_metrics_span_kind.go @@ -0,0 +1,72 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmMetricsSpanKind Describes the relationship between the span, its parents, and its children in a trace. +type ApmMetricsSpanKind string + +// List of ApmMetricsSpanKind. +const ( + APMMETRICSSPANKIND_CONSUMER ApmMetricsSpanKind = "consumer" + APMMETRICSSPANKIND_SERVER ApmMetricsSpanKind = "server" + APMMETRICSSPANKIND_CLIENT ApmMetricsSpanKind = "client" + APMMETRICSSPANKIND_PRODUCER ApmMetricsSpanKind = "producer" + APMMETRICSSPANKIND_INTERNAL ApmMetricsSpanKind = "internal" +) + +var allowedApmMetricsSpanKindEnumValues = []ApmMetricsSpanKind{ + APMMETRICSSPANKIND_CONSUMER, + APMMETRICSSPANKIND_SERVER, + APMMETRICSSPANKIND_CLIENT, + APMMETRICSSPANKIND_PRODUCER, + APMMETRICSSPANKIND_INTERNAL, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmMetricsSpanKind) GetAllowedValues() []ApmMetricsSpanKind { + return allowedApmMetricsSpanKindEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmMetricsSpanKind) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmMetricsSpanKind(value) + return nil +} + +// NewApmMetricsSpanKindFromValue returns a pointer to a valid ApmMetricsSpanKind +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmMetricsSpanKindFromValue(v string) (*ApmMetricsSpanKind, error) { + ev := ApmMetricsSpanKind(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmMetricsSpanKind: valid values are %v", v, allowedApmMetricsSpanKindEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmMetricsSpanKind) IsValid() bool { + for _, existing := range allowedApmMetricsSpanKindEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmMetricsSpanKind value. +func (v ApmMetricsSpanKind) Ptr() *ApmMetricsSpanKind { + return &v +} diff --git a/api/datadogV2/model_apm_metrics_stat.go b/api/datadogV2/model_apm_metrics_stat.go new file mode 100644 index 00000000000..9729c91e5ea --- /dev/null +++ b/api/datadogV2/model_apm_metrics_stat.go @@ -0,0 +1,94 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmMetricsStat The APM metric statistic to query. +type ApmMetricsStat string + +// List of ApmMetricsStat. +const ( + APMMETRICSSTAT_ERROR_RATE ApmMetricsStat = "error_rate" + APMMETRICSSTAT_ERRORS ApmMetricsStat = "errors" + APMMETRICSSTAT_ERRORS_PER_SECOND ApmMetricsStat = "errors_per_second" + APMMETRICSSTAT_HITS ApmMetricsStat = "hits" + APMMETRICSSTAT_HITS_PER_SECOND ApmMetricsStat = "hits_per_second" + APMMETRICSSTAT_APDEX ApmMetricsStat = "apdex" + APMMETRICSSTAT_LATENCY_AVG ApmMetricsStat = "latency_avg" + APMMETRICSSTAT_LATENCY_MAX ApmMetricsStat = "latency_max" + APMMETRICSSTAT_LATENCY_P50 ApmMetricsStat = "latency_p50" + APMMETRICSSTAT_LATENCY_P75 ApmMetricsStat = "latency_p75" + APMMETRICSSTAT_LATENCY_P90 ApmMetricsStat = "latency_p90" + APMMETRICSSTAT_LATENCY_P95 ApmMetricsStat = "latency_p95" + APMMETRICSSTAT_LATENCY_P99 ApmMetricsStat = "latency_p99" + APMMETRICSSTAT_LATENCY_P999 ApmMetricsStat = "latency_p999" + APMMETRICSSTAT_LATENCY_DISTRIBUTION ApmMetricsStat = "latency_distribution" + APMMETRICSSTAT_TOTAL_TIME ApmMetricsStat = "total_time" +) + +var allowedApmMetricsStatEnumValues = []ApmMetricsStat{ + APMMETRICSSTAT_ERROR_RATE, + APMMETRICSSTAT_ERRORS, + APMMETRICSSTAT_ERRORS_PER_SECOND, + APMMETRICSSTAT_HITS, + APMMETRICSSTAT_HITS_PER_SECOND, + APMMETRICSSTAT_APDEX, + APMMETRICSSTAT_LATENCY_AVG, + APMMETRICSSTAT_LATENCY_MAX, + APMMETRICSSTAT_LATENCY_P50, + APMMETRICSSTAT_LATENCY_P75, + APMMETRICSSTAT_LATENCY_P90, + APMMETRICSSTAT_LATENCY_P95, + APMMETRICSSTAT_LATENCY_P99, + APMMETRICSSTAT_LATENCY_P999, + APMMETRICSSTAT_LATENCY_DISTRIBUTION, + APMMETRICSSTAT_TOTAL_TIME, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmMetricsStat) GetAllowedValues() []ApmMetricsStat { + return allowedApmMetricsStatEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmMetricsStat) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmMetricsStat(value) + return nil +} + +// NewApmMetricsStatFromValue returns a pointer to a valid ApmMetricsStat +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmMetricsStatFromValue(v string) (*ApmMetricsStat, error) { + ev := ApmMetricsStat(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmMetricsStat: valid values are %v", v, allowedApmMetricsStatEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmMetricsStat) IsValid() bool { + for _, existing := range allowedApmMetricsStatEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmMetricsStat value. +func (v ApmMetricsStat) Ptr() *ApmMetricsStat { + return &v +} diff --git a/api/datadogV2/model_apm_resource_stat_name.go b/api/datadogV2/model_apm_resource_stat_name.go new file mode 100644 index 00000000000..118df1bc431 --- /dev/null +++ b/api/datadogV2/model_apm_resource_stat_name.go @@ -0,0 +1,86 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmResourceStatName The APM resource statistic to query. +type ApmResourceStatName string + +// List of ApmResourceStatName. +const ( + APMRESOURCESTATNAME_ERROR_RATE ApmResourceStatName = "error_rate" + APMRESOURCESTATNAME_ERRORS ApmResourceStatName = "errors" + APMRESOURCESTATNAME_HITS ApmResourceStatName = "hits" + APMRESOURCESTATNAME_LATENCY_AVG ApmResourceStatName = "latency_avg" + APMRESOURCESTATNAME_LATENCY_MAX ApmResourceStatName = "latency_max" + APMRESOURCESTATNAME_LATENCY_P50 ApmResourceStatName = "latency_p50" + APMRESOURCESTATNAME_LATENCY_P75 ApmResourceStatName = "latency_p75" + APMRESOURCESTATNAME_LATENCY_P90 ApmResourceStatName = "latency_p90" + APMRESOURCESTATNAME_LATENCY_P95 ApmResourceStatName = "latency_p95" + APMRESOURCESTATNAME_LATENCY_P99 ApmResourceStatName = "latency_p99" + APMRESOURCESTATNAME_LATENCY_DISTRIBUTION ApmResourceStatName = "latency_distribution" + APMRESOURCESTATNAME_TOTAL_TIME ApmResourceStatName = "total_time" +) + +var allowedApmResourceStatNameEnumValues = []ApmResourceStatName{ + APMRESOURCESTATNAME_ERROR_RATE, + APMRESOURCESTATNAME_ERRORS, + APMRESOURCESTATNAME_HITS, + APMRESOURCESTATNAME_LATENCY_AVG, + APMRESOURCESTATNAME_LATENCY_MAX, + APMRESOURCESTATNAME_LATENCY_P50, + APMRESOURCESTATNAME_LATENCY_P75, + APMRESOURCESTATNAME_LATENCY_P90, + APMRESOURCESTATNAME_LATENCY_P95, + APMRESOURCESTATNAME_LATENCY_P99, + APMRESOURCESTATNAME_LATENCY_DISTRIBUTION, + APMRESOURCESTATNAME_TOTAL_TIME, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmResourceStatName) GetAllowedValues() []ApmResourceStatName { + return allowedApmResourceStatNameEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmResourceStatName) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmResourceStatName(value) + return nil +} + +// NewApmResourceStatNameFromValue returns a pointer to a valid ApmResourceStatName +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmResourceStatNameFromValue(v string) (*ApmResourceStatName, error) { + ev := ApmResourceStatName(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmResourceStatName: valid values are %v", v, allowedApmResourceStatNameEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmResourceStatName) IsValid() bool { + for _, existing := range allowedApmResourceStatNameEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmResourceStatName value. +func (v ApmResourceStatName) Ptr() *ApmResourceStatName { + return &v +} diff --git a/api/datadogV2/model_apm_resource_stats_data_source.go b/api/datadogV2/model_apm_resource_stats_data_source.go new file mode 100644 index 00000000000..90426ddff7f --- /dev/null +++ b/api/datadogV2/model_apm_resource_stats_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmResourceStatsDataSource A data source for APM resource statistics queries. +type ApmResourceStatsDataSource string + +// List of ApmResourceStatsDataSource. +const ( + APMRESOURCESTATSDATASOURCE_APM_RESOURCE_STATS ApmResourceStatsDataSource = "apm_resource_stats" +) + +var allowedApmResourceStatsDataSourceEnumValues = []ApmResourceStatsDataSource{ + APMRESOURCESTATSDATASOURCE_APM_RESOURCE_STATS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ApmResourceStatsDataSource) GetAllowedValues() []ApmResourceStatsDataSource { + return allowedApmResourceStatsDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ApmResourceStatsDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ApmResourceStatsDataSource(value) + return nil +} + +// NewApmResourceStatsDataSourceFromValue returns a pointer to a valid ApmResourceStatsDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewApmResourceStatsDataSourceFromValue(v string) (*ApmResourceStatsDataSource, error) { + ev := ApmResourceStatsDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ApmResourceStatsDataSource: valid values are %v", v, allowedApmResourceStatsDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ApmResourceStatsDataSource) IsValid() bool { + for _, existing := range allowedApmResourceStatsDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ApmResourceStatsDataSource value. +func (v ApmResourceStatsDataSource) Ptr() *ApmResourceStatsDataSource { + return &v +} diff --git a/api/datadogV2/model_apm_resource_stats_query.go b/api/datadogV2/model_apm_resource_stats_query.go new file mode 100644 index 00000000000..cf1c4853860 --- /dev/null +++ b/api/datadogV2/model_apm_resource_stats_query.go @@ -0,0 +1,420 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ApmResourceStatsQuery A query for APM resource statistics such as latency, error rate, and hit count, grouped by resource name. +type ApmResourceStatsQuery struct { + // A data source for APM resource statistics queries. + DataSource ApmResourceStatsDataSource `json:"data_source"` + // The environment to query. + Env string `json:"env"` + // Tag keys to group results by. + GroupBy []string `json:"group_by,omitempty"` + // The variable name for use in formulas. + Name string `json:"name"` + // The APM operation name. + OperationName *string `json:"operation_name,omitempty"` + // Name of the second primary tag used within APM. Required when `primary_tag_value` is specified. See https://docs.datadoghq.com/tracing/guide/setting_primary_tags_to_scope/#add-a-second-primary-tag-in-datadog + PrimaryTagName *string `json:"primary_tag_name,omitempty"` + // Value of the second primary tag by which to filter APM data. `primary_tag_name` must also be specified. + PrimaryTagValue *string `json:"primary_tag_value,omitempty"` + // The resource name to filter by. + ResourceName *string `json:"resource_name,omitempty"` + // The service name to filter by. + Service string `json:"service"` + // The APM resource statistic to query. + Stat ApmResourceStatName `json:"stat"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewApmResourceStatsQuery instantiates a new ApmResourceStatsQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewApmResourceStatsQuery(dataSource ApmResourceStatsDataSource, env string, name string, service string, stat ApmResourceStatName) *ApmResourceStatsQuery { + this := ApmResourceStatsQuery{} + this.DataSource = dataSource + this.Env = env + this.Name = name + this.Service = service + this.Stat = stat + return &this +} + +// NewApmResourceStatsQueryWithDefaults instantiates a new ApmResourceStatsQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewApmResourceStatsQueryWithDefaults() *ApmResourceStatsQuery { + this := ApmResourceStatsQuery{} + var dataSource ApmResourceStatsDataSource = APMRESOURCESTATSDATASOURCE_APM_RESOURCE_STATS + this.DataSource = dataSource + return &this +} + +// GetDataSource returns the DataSource field value. +func (o *ApmResourceStatsQuery) GetDataSource() ApmResourceStatsDataSource { + if o == nil { + var ret ApmResourceStatsDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetDataSourceOk() (*ApmResourceStatsDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ApmResourceStatsQuery) SetDataSource(v ApmResourceStatsDataSource) { + o.DataSource = v +} + +// GetEnv returns the Env field value. +func (o *ApmResourceStatsQuery) GetEnv() string { + if o == nil { + var ret string + return ret + } + return o.Env +} + +// GetEnvOk returns a tuple with the Env field value +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetEnvOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Env, true +} + +// SetEnv sets field value. +func (o *ApmResourceStatsQuery) SetEnv(v string) { + o.Env = v +} + +// GetGroupBy returns the GroupBy field value if set, zero value otherwise. +func (o *ApmResourceStatsQuery) GetGroupBy() []string { + if o == nil || o.GroupBy == nil { + var ret []string + return ret + } + return o.GroupBy +} + +// GetGroupByOk returns a tuple with the GroupBy field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetGroupByOk() (*[]string, bool) { + if o == nil || o.GroupBy == nil { + return nil, false + } + return &o.GroupBy, true +} + +// HasGroupBy returns a boolean if a field has been set. +func (o *ApmResourceStatsQuery) HasGroupBy() bool { + return o != nil && o.GroupBy != nil +} + +// SetGroupBy gets a reference to the given []string and assigns it to the GroupBy field. +func (o *ApmResourceStatsQuery) SetGroupBy(v []string) { + o.GroupBy = v +} + +// GetName returns the Name field value. +func (o *ApmResourceStatsQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ApmResourceStatsQuery) SetName(v string) { + o.Name = v +} + +// GetOperationName returns the OperationName field value if set, zero value otherwise. +func (o *ApmResourceStatsQuery) GetOperationName() string { + if o == nil || o.OperationName == nil { + var ret string + return ret + } + return *o.OperationName +} + +// GetOperationNameOk returns a tuple with the OperationName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetOperationNameOk() (*string, bool) { + if o == nil || o.OperationName == nil { + return nil, false + } + return o.OperationName, true +} + +// HasOperationName returns a boolean if a field has been set. +func (o *ApmResourceStatsQuery) HasOperationName() bool { + return o != nil && o.OperationName != nil +} + +// SetOperationName gets a reference to the given string and assigns it to the OperationName field. +func (o *ApmResourceStatsQuery) SetOperationName(v string) { + o.OperationName = &v +} + +// GetPrimaryTagName returns the PrimaryTagName field value if set, zero value otherwise. +func (o *ApmResourceStatsQuery) GetPrimaryTagName() string { + if o == nil || o.PrimaryTagName == nil { + var ret string + return ret + } + return *o.PrimaryTagName +} + +// GetPrimaryTagNameOk returns a tuple with the PrimaryTagName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetPrimaryTagNameOk() (*string, bool) { + if o == nil || o.PrimaryTagName == nil { + return nil, false + } + return o.PrimaryTagName, true +} + +// HasPrimaryTagName returns a boolean if a field has been set. +func (o *ApmResourceStatsQuery) HasPrimaryTagName() bool { + return o != nil && o.PrimaryTagName != nil +} + +// SetPrimaryTagName gets a reference to the given string and assigns it to the PrimaryTagName field. +func (o *ApmResourceStatsQuery) SetPrimaryTagName(v string) { + o.PrimaryTagName = &v +} + +// GetPrimaryTagValue returns the PrimaryTagValue field value if set, zero value otherwise. +func (o *ApmResourceStatsQuery) GetPrimaryTagValue() string { + if o == nil || o.PrimaryTagValue == nil { + var ret string + return ret + } + return *o.PrimaryTagValue +} + +// GetPrimaryTagValueOk returns a tuple with the PrimaryTagValue field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetPrimaryTagValueOk() (*string, bool) { + if o == nil || o.PrimaryTagValue == nil { + return nil, false + } + return o.PrimaryTagValue, true +} + +// HasPrimaryTagValue returns a boolean if a field has been set. +func (o *ApmResourceStatsQuery) HasPrimaryTagValue() bool { + return o != nil && o.PrimaryTagValue != nil +} + +// SetPrimaryTagValue gets a reference to the given string and assigns it to the PrimaryTagValue field. +func (o *ApmResourceStatsQuery) SetPrimaryTagValue(v string) { + o.PrimaryTagValue = &v +} + +// GetResourceName returns the ResourceName field value if set, zero value otherwise. +func (o *ApmResourceStatsQuery) GetResourceName() string { + if o == nil || o.ResourceName == nil { + var ret string + return ret + } + return *o.ResourceName +} + +// GetResourceNameOk returns a tuple with the ResourceName field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetResourceNameOk() (*string, bool) { + if o == nil || o.ResourceName == nil { + return nil, false + } + return o.ResourceName, true +} + +// HasResourceName returns a boolean if a field has been set. +func (o *ApmResourceStatsQuery) HasResourceName() bool { + return o != nil && o.ResourceName != nil +} + +// SetResourceName gets a reference to the given string and assigns it to the ResourceName field. +func (o *ApmResourceStatsQuery) SetResourceName(v string) { + o.ResourceName = &v +} + +// GetService returns the Service field value. +func (o *ApmResourceStatsQuery) GetService() string { + if o == nil { + var ret string + return ret + } + return o.Service +} + +// GetServiceOk returns a tuple with the Service field value +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetServiceOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Service, true +} + +// SetService sets field value. +func (o *ApmResourceStatsQuery) SetService(v string) { + o.Service = v +} + +// GetStat returns the Stat field value. +func (o *ApmResourceStatsQuery) GetStat() ApmResourceStatName { + if o == nil { + var ret ApmResourceStatName + return ret + } + return o.Stat +} + +// GetStatOk returns a tuple with the Stat field value +// and a boolean to check if the value has been set. +func (o *ApmResourceStatsQuery) GetStatOk() (*ApmResourceStatName, bool) { + if o == nil { + return nil, false + } + return &o.Stat, true +} + +// SetStat sets field value. +func (o *ApmResourceStatsQuery) SetStat(v ApmResourceStatName) { + o.Stat = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ApmResourceStatsQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data_source"] = o.DataSource + toSerialize["env"] = o.Env + if o.GroupBy != nil { + toSerialize["group_by"] = o.GroupBy + } + toSerialize["name"] = o.Name + if o.OperationName != nil { + toSerialize["operation_name"] = o.OperationName + } + if o.PrimaryTagName != nil { + toSerialize["primary_tag_name"] = o.PrimaryTagName + } + if o.PrimaryTagValue != nil { + toSerialize["primary_tag_value"] = o.PrimaryTagValue + } + if o.ResourceName != nil { + toSerialize["resource_name"] = o.ResourceName + } + toSerialize["service"] = o.Service + toSerialize["stat"] = o.Stat + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ApmResourceStatsQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + DataSource *ApmResourceStatsDataSource `json:"data_source"` + Env *string `json:"env"` + GroupBy []string `json:"group_by,omitempty"` + Name *string `json:"name"` + OperationName *string `json:"operation_name,omitempty"` + PrimaryTagName *string `json:"primary_tag_name,omitempty"` + PrimaryTagValue *string `json:"primary_tag_value,omitempty"` + ResourceName *string `json:"resource_name,omitempty"` + Service *string `json:"service"` + Stat *ApmResourceStatName `json:"stat"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Env == nil { + return fmt.Errorf("required field env missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + if all.Service == nil { + return fmt.Errorf("required field service missing") + } + if all.Stat == nil { + return fmt.Errorf("required field stat missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data_source", "env", "group_by", "name", "operation_name", "primary_tag_name", "primary_tag_value", "resource_name", "service", "stat"}) + } else { + return err + } + + hasInvalidField := false + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.Env = *all.Env + o.GroupBy = all.GroupBy + o.Name = *all.Name + o.OperationName = all.OperationName + o.PrimaryTagName = all.PrimaryTagName + o.PrimaryTagValue = all.PrimaryTagValue + o.ResourceName = all.ResourceName + o.Service = *all.Service + if !all.Stat.IsValid() { + hasInvalidField = true + } else { + o.Stat = *all.Stat + } + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_container_data_source.go b/api/datadogV2/model_container_data_source.go new file mode 100644 index 00000000000..8e77c417ff9 --- /dev/null +++ b/api/datadogV2/model_container_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ContainerDataSource A data source for container-level infrastructure metrics. +type ContainerDataSource string + +// List of ContainerDataSource. +const ( + CONTAINERDATASOURCE_CONTAINER ContainerDataSource = "container" +) + +var allowedContainerDataSourceEnumValues = []ContainerDataSource{ + CONTAINERDATASOURCE_CONTAINER, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ContainerDataSource) GetAllowedValues() []ContainerDataSource { + return allowedContainerDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ContainerDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ContainerDataSource(value) + return nil +} + +// NewContainerDataSourceFromValue returns a pointer to a valid ContainerDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewContainerDataSourceFromValue(v string) (*ContainerDataSource, error) { + ev := ContainerDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ContainerDataSource: valid values are %v", v, allowedContainerDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ContainerDataSource) IsValid() bool { + for _, existing := range allowedContainerDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ContainerDataSource value. +func (v ContainerDataSource) Ptr() *ContainerDataSource { + return &v +} diff --git a/api/datadogV2/model_container_scalar_query.go b/api/datadogV2/model_container_scalar_query.go new file mode 100644 index 00000000000..734cf7a8f59 --- /dev/null +++ b/api/datadogV2/model_container_scalar_query.go @@ -0,0 +1,403 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ContainerScalarQuery A query for container-level metrics such as CPU and memory usage. +type ContainerScalarQuery struct { + // The type of aggregation that can be performed on metrics-based queries. + Aggregator *MetricsAggregator `json:"aggregator,omitempty"` + // A data source for container-level infrastructure metrics. + DataSource ContainerDataSource `json:"data_source"` + // Whether CPU metrics should be normalized by core count. + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + // Maximum number of results to return. + Limit *int64 `json:"limit,omitempty"` + // The container metric to query. + Metric string `json:"metric"` + // The variable name for use in formulas. + Name string `json:"name"` + // Direction of sort. + Sort *QuerySortOrder `json:"sort,omitempty"` + // Tag filters to narrow down containers. + TagFilters []string `json:"tag_filters,omitempty"` + // A full-text search filter to match container names. + TextFilter *string `json:"text_filter,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewContainerScalarQuery instantiates a new ContainerScalarQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewContainerScalarQuery(dataSource ContainerDataSource, metric string, name string) *ContainerScalarQuery { + this := ContainerScalarQuery{} + var aggregator MetricsAggregator = METRICSAGGREGATOR_AVG + this.Aggregator = &aggregator + this.DataSource = dataSource + this.Metric = metric + this.Name = name + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// NewContainerScalarQueryWithDefaults instantiates a new ContainerScalarQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewContainerScalarQueryWithDefaults() *ContainerScalarQuery { + this := ContainerScalarQuery{} + var aggregator MetricsAggregator = METRICSAGGREGATOR_AVG + this.Aggregator = &aggregator + var dataSource ContainerDataSource = CONTAINERDATASOURCE_CONTAINER + this.DataSource = dataSource + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// GetAggregator returns the Aggregator field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetAggregator() MetricsAggregator { + if o == nil || o.Aggregator == nil { + var ret MetricsAggregator + return ret + } + return *o.Aggregator +} + +// GetAggregatorOk returns a tuple with the Aggregator field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetAggregatorOk() (*MetricsAggregator, bool) { + if o == nil || o.Aggregator == nil { + return nil, false + } + return o.Aggregator, true +} + +// HasAggregator returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasAggregator() bool { + return o != nil && o.Aggregator != nil +} + +// SetAggregator gets a reference to the given MetricsAggregator and assigns it to the Aggregator field. +func (o *ContainerScalarQuery) SetAggregator(v MetricsAggregator) { + o.Aggregator = &v +} + +// GetDataSource returns the DataSource field value. +func (o *ContainerScalarQuery) GetDataSource() ContainerDataSource { + if o == nil { + var ret ContainerDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetDataSourceOk() (*ContainerDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ContainerScalarQuery) SetDataSource(v ContainerDataSource) { + o.DataSource = v +} + +// GetIsNormalizedCpu returns the IsNormalizedCpu field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetIsNormalizedCpu() bool { + if o == nil || o.IsNormalizedCpu == nil { + var ret bool + return ret + } + return *o.IsNormalizedCpu +} + +// GetIsNormalizedCpuOk returns a tuple with the IsNormalizedCpu field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetIsNormalizedCpuOk() (*bool, bool) { + if o == nil || o.IsNormalizedCpu == nil { + return nil, false + } + return o.IsNormalizedCpu, true +} + +// HasIsNormalizedCpu returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasIsNormalizedCpu() bool { + return o != nil && o.IsNormalizedCpu != nil +} + +// SetIsNormalizedCpu gets a reference to the given bool and assigns it to the IsNormalizedCpu field. +func (o *ContainerScalarQuery) SetIsNormalizedCpu(v bool) { + o.IsNormalizedCpu = &v +} + +// GetLimit returns the Limit field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetLimit() int64 { + if o == nil || o.Limit == nil { + var ret int64 + return ret + } + return *o.Limit +} + +// GetLimitOk returns a tuple with the Limit field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetLimitOk() (*int64, bool) { + if o == nil || o.Limit == nil { + return nil, false + } + return o.Limit, true +} + +// HasLimit returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasLimit() bool { + return o != nil && o.Limit != nil +} + +// SetLimit gets a reference to the given int64 and assigns it to the Limit field. +func (o *ContainerScalarQuery) SetLimit(v int64) { + o.Limit = &v +} + +// GetMetric returns the Metric field value. +func (o *ContainerScalarQuery) GetMetric() string { + if o == nil { + var ret string + return ret + } + return o.Metric +} + +// GetMetricOk returns a tuple with the Metric field value +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetMetricOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Metric, true +} + +// SetMetric sets field value. +func (o *ContainerScalarQuery) SetMetric(v string) { + o.Metric = v +} + +// GetName returns the Name field value. +func (o *ContainerScalarQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ContainerScalarQuery) SetName(v string) { + o.Name = v +} + +// GetSort returns the Sort field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetSort() QuerySortOrder { + if o == nil || o.Sort == nil { + var ret QuerySortOrder + return ret + } + return *o.Sort +} + +// GetSortOk returns a tuple with the Sort field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetSortOk() (*QuerySortOrder, bool) { + if o == nil || o.Sort == nil { + return nil, false + } + return o.Sort, true +} + +// HasSort returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasSort() bool { + return o != nil && o.Sort != nil +} + +// SetSort gets a reference to the given QuerySortOrder and assigns it to the Sort field. +func (o *ContainerScalarQuery) SetSort(v QuerySortOrder) { + o.Sort = &v +} + +// GetTagFilters returns the TagFilters field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetTagFilters() []string { + if o == nil || o.TagFilters == nil { + var ret []string + return ret + } + return o.TagFilters +} + +// GetTagFiltersOk returns a tuple with the TagFilters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetTagFiltersOk() (*[]string, bool) { + if o == nil || o.TagFilters == nil { + return nil, false + } + return &o.TagFilters, true +} + +// HasTagFilters returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasTagFilters() bool { + return o != nil && o.TagFilters != nil +} + +// SetTagFilters gets a reference to the given []string and assigns it to the TagFilters field. +func (o *ContainerScalarQuery) SetTagFilters(v []string) { + o.TagFilters = v +} + +// GetTextFilter returns the TextFilter field value if set, zero value otherwise. +func (o *ContainerScalarQuery) GetTextFilter() string { + if o == nil || o.TextFilter == nil { + var ret string + return ret + } + return *o.TextFilter +} + +// GetTextFilterOk returns a tuple with the TextFilter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerScalarQuery) GetTextFilterOk() (*string, bool) { + if o == nil || o.TextFilter == nil { + return nil, false + } + return o.TextFilter, true +} + +// HasTextFilter returns a boolean if a field has been set. +func (o *ContainerScalarQuery) HasTextFilter() bool { + return o != nil && o.TextFilter != nil +} + +// SetTextFilter gets a reference to the given string and assigns it to the TextFilter field. +func (o *ContainerScalarQuery) SetTextFilter(v string) { + o.TextFilter = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ContainerScalarQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + if o.Aggregator != nil { + toSerialize["aggregator"] = o.Aggregator + } + toSerialize["data_source"] = o.DataSource + if o.IsNormalizedCpu != nil { + toSerialize["is_normalized_cpu"] = o.IsNormalizedCpu + } + if o.Limit != nil { + toSerialize["limit"] = o.Limit + } + toSerialize["metric"] = o.Metric + toSerialize["name"] = o.Name + if o.Sort != nil { + toSerialize["sort"] = o.Sort + } + if o.TagFilters != nil { + toSerialize["tag_filters"] = o.TagFilters + } + if o.TextFilter != nil { + toSerialize["text_filter"] = o.TextFilter + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ContainerScalarQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Aggregator *MetricsAggregator `json:"aggregator,omitempty"` + DataSource *ContainerDataSource `json:"data_source"` + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + Limit *int64 `json:"limit,omitempty"` + Metric *string `json:"metric"` + Name *string `json:"name"` + Sort *QuerySortOrder `json:"sort,omitempty"` + TagFilters []string `json:"tag_filters,omitempty"` + TextFilter *string `json:"text_filter,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Metric == nil { + return fmt.Errorf("required field metric missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"aggregator", "data_source", "is_normalized_cpu", "limit", "metric", "name", "sort", "tag_filters", "text_filter"}) + } else { + return err + } + + hasInvalidField := false + if all.Aggregator != nil && !all.Aggregator.IsValid() { + hasInvalidField = true + } else { + o.Aggregator = all.Aggregator + } + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.IsNormalizedCpu = all.IsNormalizedCpu + o.Limit = all.Limit + o.Metric = *all.Metric + o.Name = *all.Name + if all.Sort != nil && !all.Sort.IsValid() { + hasInvalidField = true + } else { + o.Sort = all.Sort + } + o.TagFilters = all.TagFilters + o.TextFilter = all.TextFilter + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_container_timeseries_query.go b/api/datadogV2/model_container_timeseries_query.go new file mode 100644 index 00000000000..5f242d08331 --- /dev/null +++ b/api/datadogV2/model_container_timeseries_query.go @@ -0,0 +1,360 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ContainerTimeseriesQuery A query for container-level metrics such as CPU and memory usage. +type ContainerTimeseriesQuery struct { + // A data source for container-level infrastructure metrics. + DataSource ContainerDataSource `json:"data_source"` + // Whether CPU metrics should be normalized by core count. + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + // Maximum number of results to return. + Limit *int64 `json:"limit,omitempty"` + // The container metric to query. + Metric string `json:"metric"` + // The variable name for use in formulas. + Name string `json:"name"` + // Direction of sort. + Sort *QuerySortOrder `json:"sort,omitempty"` + // Tag filters to narrow down containers. + TagFilters []string `json:"tag_filters,omitempty"` + // A full-text search filter to match container names. + TextFilter *string `json:"text_filter,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewContainerTimeseriesQuery instantiates a new ContainerTimeseriesQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewContainerTimeseriesQuery(dataSource ContainerDataSource, metric string, name string) *ContainerTimeseriesQuery { + this := ContainerTimeseriesQuery{} + this.DataSource = dataSource + this.Metric = metric + this.Name = name + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// NewContainerTimeseriesQueryWithDefaults instantiates a new ContainerTimeseriesQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewContainerTimeseriesQueryWithDefaults() *ContainerTimeseriesQuery { + this := ContainerTimeseriesQuery{} + var dataSource ContainerDataSource = CONTAINERDATASOURCE_CONTAINER + this.DataSource = dataSource + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// GetDataSource returns the DataSource field value. +func (o *ContainerTimeseriesQuery) GetDataSource() ContainerDataSource { + if o == nil { + var ret ContainerDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetDataSourceOk() (*ContainerDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ContainerTimeseriesQuery) SetDataSource(v ContainerDataSource) { + o.DataSource = v +} + +// GetIsNormalizedCpu returns the IsNormalizedCpu field value if set, zero value otherwise. +func (o *ContainerTimeseriesQuery) GetIsNormalizedCpu() bool { + if o == nil || o.IsNormalizedCpu == nil { + var ret bool + return ret + } + return *o.IsNormalizedCpu +} + +// GetIsNormalizedCpuOk returns a tuple with the IsNormalizedCpu field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetIsNormalizedCpuOk() (*bool, bool) { + if o == nil || o.IsNormalizedCpu == nil { + return nil, false + } + return o.IsNormalizedCpu, true +} + +// HasIsNormalizedCpu returns a boolean if a field has been set. +func (o *ContainerTimeseriesQuery) HasIsNormalizedCpu() bool { + return o != nil && o.IsNormalizedCpu != nil +} + +// SetIsNormalizedCpu gets a reference to the given bool and assigns it to the IsNormalizedCpu field. +func (o *ContainerTimeseriesQuery) SetIsNormalizedCpu(v bool) { + o.IsNormalizedCpu = &v +} + +// GetLimit returns the Limit field value if set, zero value otherwise. +func (o *ContainerTimeseriesQuery) GetLimit() int64 { + if o == nil || o.Limit == nil { + var ret int64 + return ret + } + return *o.Limit +} + +// GetLimitOk returns a tuple with the Limit field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetLimitOk() (*int64, bool) { + if o == nil || o.Limit == nil { + return nil, false + } + return o.Limit, true +} + +// HasLimit returns a boolean if a field has been set. +func (o *ContainerTimeseriesQuery) HasLimit() bool { + return o != nil && o.Limit != nil +} + +// SetLimit gets a reference to the given int64 and assigns it to the Limit field. +func (o *ContainerTimeseriesQuery) SetLimit(v int64) { + o.Limit = &v +} + +// GetMetric returns the Metric field value. +func (o *ContainerTimeseriesQuery) GetMetric() string { + if o == nil { + var ret string + return ret + } + return o.Metric +} + +// GetMetricOk returns a tuple with the Metric field value +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetMetricOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Metric, true +} + +// SetMetric sets field value. +func (o *ContainerTimeseriesQuery) SetMetric(v string) { + o.Metric = v +} + +// GetName returns the Name field value. +func (o *ContainerTimeseriesQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ContainerTimeseriesQuery) SetName(v string) { + o.Name = v +} + +// GetSort returns the Sort field value if set, zero value otherwise. +func (o *ContainerTimeseriesQuery) GetSort() QuerySortOrder { + if o == nil || o.Sort == nil { + var ret QuerySortOrder + return ret + } + return *o.Sort +} + +// GetSortOk returns a tuple with the Sort field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetSortOk() (*QuerySortOrder, bool) { + if o == nil || o.Sort == nil { + return nil, false + } + return o.Sort, true +} + +// HasSort returns a boolean if a field has been set. +func (o *ContainerTimeseriesQuery) HasSort() bool { + return o != nil && o.Sort != nil +} + +// SetSort gets a reference to the given QuerySortOrder and assigns it to the Sort field. +func (o *ContainerTimeseriesQuery) SetSort(v QuerySortOrder) { + o.Sort = &v +} + +// GetTagFilters returns the TagFilters field value if set, zero value otherwise. +func (o *ContainerTimeseriesQuery) GetTagFilters() []string { + if o == nil || o.TagFilters == nil { + var ret []string + return ret + } + return o.TagFilters +} + +// GetTagFiltersOk returns a tuple with the TagFilters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetTagFiltersOk() (*[]string, bool) { + if o == nil || o.TagFilters == nil { + return nil, false + } + return &o.TagFilters, true +} + +// HasTagFilters returns a boolean if a field has been set. +func (o *ContainerTimeseriesQuery) HasTagFilters() bool { + return o != nil && o.TagFilters != nil +} + +// SetTagFilters gets a reference to the given []string and assigns it to the TagFilters field. +func (o *ContainerTimeseriesQuery) SetTagFilters(v []string) { + o.TagFilters = v +} + +// GetTextFilter returns the TextFilter field value if set, zero value otherwise. +func (o *ContainerTimeseriesQuery) GetTextFilter() string { + if o == nil || o.TextFilter == nil { + var ret string + return ret + } + return *o.TextFilter +} + +// GetTextFilterOk returns a tuple with the TextFilter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ContainerTimeseriesQuery) GetTextFilterOk() (*string, bool) { + if o == nil || o.TextFilter == nil { + return nil, false + } + return o.TextFilter, true +} + +// HasTextFilter returns a boolean if a field has been set. +func (o *ContainerTimeseriesQuery) HasTextFilter() bool { + return o != nil && o.TextFilter != nil +} + +// SetTextFilter gets a reference to the given string and assigns it to the TextFilter field. +func (o *ContainerTimeseriesQuery) SetTextFilter(v string) { + o.TextFilter = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ContainerTimeseriesQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data_source"] = o.DataSource + if o.IsNormalizedCpu != nil { + toSerialize["is_normalized_cpu"] = o.IsNormalizedCpu + } + if o.Limit != nil { + toSerialize["limit"] = o.Limit + } + toSerialize["metric"] = o.Metric + toSerialize["name"] = o.Name + if o.Sort != nil { + toSerialize["sort"] = o.Sort + } + if o.TagFilters != nil { + toSerialize["tag_filters"] = o.TagFilters + } + if o.TextFilter != nil { + toSerialize["text_filter"] = o.TextFilter + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ContainerTimeseriesQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + DataSource *ContainerDataSource `json:"data_source"` + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + Limit *int64 `json:"limit,omitempty"` + Metric *string `json:"metric"` + Name *string `json:"name"` + Sort *QuerySortOrder `json:"sort,omitempty"` + TagFilters []string `json:"tag_filters,omitempty"` + TextFilter *string `json:"text_filter,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Metric == nil { + return fmt.Errorf("required field metric missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data_source", "is_normalized_cpu", "limit", "metric", "name", "sort", "tag_filters", "text_filter"}) + } else { + return err + } + + hasInvalidField := false + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.IsNormalizedCpu = all.IsNormalizedCpu + o.Limit = all.Limit + o.Metric = *all.Metric + o.Name = *all.Name + if all.Sort != nil && !all.Sort.IsValid() { + hasInvalidField = true + } else { + o.Sort = all.Sort + } + o.TagFilters = all.TagFilters + o.TextFilter = all.TextFilter + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_events_data_source.go b/api/datadogV2/model_events_data_source.go index 9a24a736961..ec06bb6f56c 100644 --- a/api/datadogV2/model_events_data_source.go +++ b/api/datadogV2/model_events_data_source.go @@ -15,14 +15,36 @@ type EventsDataSource string // List of EventsDataSource. const ( - EVENTSDATASOURCE_LOGS EventsDataSource = "logs" - EVENTSDATASOURCE_RUM EventsDataSource = "rum" - EVENTSDATASOURCE_DORA EventsDataSource = "dora" + EVENTSDATASOURCE_LOGS EventsDataSource = "logs" + EVENTSDATASOURCE_SPANS EventsDataSource = "spans" + EVENTSDATASOURCE_NETWORK EventsDataSource = "network" + EVENTSDATASOURCE_RUM EventsDataSource = "rum" + EVENTSDATASOURCE_SECURITY_SIGNALS EventsDataSource = "security_signals" + EVENTSDATASOURCE_PROFILES EventsDataSource = "profiles" + EVENTSDATASOURCE_AUDIT EventsDataSource = "audit" + EVENTSDATASOURCE_EVENTS EventsDataSource = "events" + EVENTSDATASOURCE_CI_TESTS EventsDataSource = "ci_tests" + EVENTSDATASOURCE_CI_PIPELINES EventsDataSource = "ci_pipelines" + EVENTSDATASOURCE_INCIDENT_ANALYTICS EventsDataSource = "incident_analytics" + EVENTSDATASOURCE_PRODUCT_ANALYTICS EventsDataSource = "product_analytics" + EVENTSDATASOURCE_ON_CALL_EVENTS EventsDataSource = "on_call_events" + EVENTSDATASOURCE_DORA EventsDataSource = "dora" ) var allowedEventsDataSourceEnumValues = []EventsDataSource{ EVENTSDATASOURCE_LOGS, + EVENTSDATASOURCE_SPANS, + EVENTSDATASOURCE_NETWORK, EVENTSDATASOURCE_RUM, + EVENTSDATASOURCE_SECURITY_SIGNALS, + EVENTSDATASOURCE_PROFILES, + EVENTSDATASOURCE_AUDIT, + EVENTSDATASOURCE_EVENTS, + EVENTSDATASOURCE_CI_TESTS, + EVENTSDATASOURCE_CI_PIPELINES, + EVENTSDATASOURCE_INCIDENT_ANALYTICS, + EVENTSDATASOURCE_PRODUCT_ANALYTICS, + EVENTSDATASOURCE_ON_CALL_EVENTS, EVENTSDATASOURCE_DORA, } diff --git a/api/datadogV2/model_events_scalar_query.go b/api/datadogV2/model_events_scalar_query.go index 0da37a18298..ec14faca923 100644 --- a/api/datadogV2/model_events_scalar_query.go +++ b/api/datadogV2/model_events_scalar_query.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/datadog-api-client-go/v2/api/datadog" ) -// EventsScalarQuery An individual scalar events query. +// EventsScalarQuery An individual scalar query for logs, RUM, traces, CI pipelines, security signals, and other event-based data sources. Use this query type for any data source powered by the Events Platform. See the data_source field for the full list of supported sources. type EventsScalarQuery struct { // The instructions for what to compute for this query. Compute EventsCompute `json:"compute"` diff --git a/api/datadogV2/model_events_timeseries_query.go b/api/datadogV2/model_events_timeseries_query.go index edd948f501d..c83f8a5bef8 100644 --- a/api/datadogV2/model_events_timeseries_query.go +++ b/api/datadogV2/model_events_timeseries_query.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/datadog-api-client-go/v2/api/datadog" ) -// EventsTimeseriesQuery An individual timeseries events query. +// EventsTimeseriesQuery An individual timeseries query for logs, RUM, traces, CI pipelines, security signals, and other event-based data sources. Use this query type for any data source powered by the Events Platform. See the data_source field for the full list of supported sources. type EventsTimeseriesQuery struct { // The instructions for what to compute for this query. Compute EventsCompute `json:"compute"` diff --git a/api/datadogV2/model_metrics_scalar_query.go b/api/datadogV2/model_metrics_scalar_query.go index aaea91d61ea..c264e8b5f2c 100644 --- a/api/datadogV2/model_metrics_scalar_query.go +++ b/api/datadogV2/model_metrics_scalar_query.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/datadog-api-client-go/v2/api/datadog" ) -// MetricsScalarQuery An individual scalar metrics query. +// MetricsScalarQuery A query against Datadog custom metrics or Cloud Cost data sources. type MetricsScalarQuery struct { // The type of aggregation that can be performed on metrics-based queries. Aggregator MetricsAggregator `json:"aggregator"` diff --git a/api/datadogV2/model_metrics_timeseries_query.go b/api/datadogV2/model_metrics_timeseries_query.go index 6fb20e478f6..8b3be924ec7 100644 --- a/api/datadogV2/model_metrics_timeseries_query.go +++ b/api/datadogV2/model_metrics_timeseries_query.go @@ -10,7 +10,7 @@ import ( "github.com/DataDog/datadog-api-client-go/v2/api/datadog" ) -// MetricsTimeseriesQuery An individual timeseries metrics query. +// MetricsTimeseriesQuery A query against Datadog custom metrics or Cloud Cost data sources. type MetricsTimeseriesQuery struct { // A data source that is powered by the Metrics platform. DataSource MetricsDataSource `json:"data_source"` diff --git a/api/datadogV2/model_process_data_source.go b/api/datadogV2/model_process_data_source.go new file mode 100644 index 00000000000..3b705a12fca --- /dev/null +++ b/api/datadogV2/model_process_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ProcessDataSource A data source for process-level infrastructure metrics. +type ProcessDataSource string + +// List of ProcessDataSource. +const ( + PROCESSDATASOURCE_PROCESS ProcessDataSource = "process" +) + +var allowedProcessDataSourceEnumValues = []ProcessDataSource{ + PROCESSDATASOURCE_PROCESS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *ProcessDataSource) GetAllowedValues() []ProcessDataSource { + return allowedProcessDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *ProcessDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = ProcessDataSource(value) + return nil +} + +// NewProcessDataSourceFromValue returns a pointer to a valid ProcessDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewProcessDataSourceFromValue(v string) (*ProcessDataSource, error) { + ev := ProcessDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for ProcessDataSource: valid values are %v", v, allowedProcessDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v ProcessDataSource) IsValid() bool { + for _, existing := range allowedProcessDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to ProcessDataSource value. +func (v ProcessDataSource) Ptr() *ProcessDataSource { + return &v +} diff --git a/api/datadogV2/model_process_scalar_query.go b/api/datadogV2/model_process_scalar_query.go new file mode 100644 index 00000000000..e1aa893fde0 --- /dev/null +++ b/api/datadogV2/model_process_scalar_query.go @@ -0,0 +1,403 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ProcessScalarQuery A query for host-level process metrics such as CPU and memory usage. +type ProcessScalarQuery struct { + // The type of aggregation that can be performed on metrics-based queries. + Aggregator *MetricsAggregator `json:"aggregator,omitempty"` + // A data source for process-level infrastructure metrics. + DataSource ProcessDataSource `json:"data_source"` + // Whether CPU metrics should be normalized by core count. + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + // Maximum number of results to return. + Limit *int64 `json:"limit,omitempty"` + // The process metric to query. + Metric string `json:"metric"` + // The variable name for use in formulas. + Name string `json:"name"` + // Direction of sort. + Sort *QuerySortOrder `json:"sort,omitempty"` + // Tag filters to narrow down processes. + TagFilters []string `json:"tag_filters,omitempty"` + // A full-text search filter to match process names or commands. + TextFilter *string `json:"text_filter,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewProcessScalarQuery instantiates a new ProcessScalarQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewProcessScalarQuery(dataSource ProcessDataSource, metric string, name string) *ProcessScalarQuery { + this := ProcessScalarQuery{} + var aggregator MetricsAggregator = METRICSAGGREGATOR_AVG + this.Aggregator = &aggregator + this.DataSource = dataSource + this.Metric = metric + this.Name = name + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// NewProcessScalarQueryWithDefaults instantiates a new ProcessScalarQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewProcessScalarQueryWithDefaults() *ProcessScalarQuery { + this := ProcessScalarQuery{} + var aggregator MetricsAggregator = METRICSAGGREGATOR_AVG + this.Aggregator = &aggregator + var dataSource ProcessDataSource = PROCESSDATASOURCE_PROCESS + this.DataSource = dataSource + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// GetAggregator returns the Aggregator field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetAggregator() MetricsAggregator { + if o == nil || o.Aggregator == nil { + var ret MetricsAggregator + return ret + } + return *o.Aggregator +} + +// GetAggregatorOk returns a tuple with the Aggregator field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetAggregatorOk() (*MetricsAggregator, bool) { + if o == nil || o.Aggregator == nil { + return nil, false + } + return o.Aggregator, true +} + +// HasAggregator returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasAggregator() bool { + return o != nil && o.Aggregator != nil +} + +// SetAggregator gets a reference to the given MetricsAggregator and assigns it to the Aggregator field. +func (o *ProcessScalarQuery) SetAggregator(v MetricsAggregator) { + o.Aggregator = &v +} + +// GetDataSource returns the DataSource field value. +func (o *ProcessScalarQuery) GetDataSource() ProcessDataSource { + if o == nil { + var ret ProcessDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetDataSourceOk() (*ProcessDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ProcessScalarQuery) SetDataSource(v ProcessDataSource) { + o.DataSource = v +} + +// GetIsNormalizedCpu returns the IsNormalizedCpu field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetIsNormalizedCpu() bool { + if o == nil || o.IsNormalizedCpu == nil { + var ret bool + return ret + } + return *o.IsNormalizedCpu +} + +// GetIsNormalizedCpuOk returns a tuple with the IsNormalizedCpu field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetIsNormalizedCpuOk() (*bool, bool) { + if o == nil || o.IsNormalizedCpu == nil { + return nil, false + } + return o.IsNormalizedCpu, true +} + +// HasIsNormalizedCpu returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasIsNormalizedCpu() bool { + return o != nil && o.IsNormalizedCpu != nil +} + +// SetIsNormalizedCpu gets a reference to the given bool and assigns it to the IsNormalizedCpu field. +func (o *ProcessScalarQuery) SetIsNormalizedCpu(v bool) { + o.IsNormalizedCpu = &v +} + +// GetLimit returns the Limit field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetLimit() int64 { + if o == nil || o.Limit == nil { + var ret int64 + return ret + } + return *o.Limit +} + +// GetLimitOk returns a tuple with the Limit field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetLimitOk() (*int64, bool) { + if o == nil || o.Limit == nil { + return nil, false + } + return o.Limit, true +} + +// HasLimit returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasLimit() bool { + return o != nil && o.Limit != nil +} + +// SetLimit gets a reference to the given int64 and assigns it to the Limit field. +func (o *ProcessScalarQuery) SetLimit(v int64) { + o.Limit = &v +} + +// GetMetric returns the Metric field value. +func (o *ProcessScalarQuery) GetMetric() string { + if o == nil { + var ret string + return ret + } + return o.Metric +} + +// GetMetricOk returns a tuple with the Metric field value +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetMetricOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Metric, true +} + +// SetMetric sets field value. +func (o *ProcessScalarQuery) SetMetric(v string) { + o.Metric = v +} + +// GetName returns the Name field value. +func (o *ProcessScalarQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ProcessScalarQuery) SetName(v string) { + o.Name = v +} + +// GetSort returns the Sort field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetSort() QuerySortOrder { + if o == nil || o.Sort == nil { + var ret QuerySortOrder + return ret + } + return *o.Sort +} + +// GetSortOk returns a tuple with the Sort field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetSortOk() (*QuerySortOrder, bool) { + if o == nil || o.Sort == nil { + return nil, false + } + return o.Sort, true +} + +// HasSort returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasSort() bool { + return o != nil && o.Sort != nil +} + +// SetSort gets a reference to the given QuerySortOrder and assigns it to the Sort field. +func (o *ProcessScalarQuery) SetSort(v QuerySortOrder) { + o.Sort = &v +} + +// GetTagFilters returns the TagFilters field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetTagFilters() []string { + if o == nil || o.TagFilters == nil { + var ret []string + return ret + } + return o.TagFilters +} + +// GetTagFiltersOk returns a tuple with the TagFilters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetTagFiltersOk() (*[]string, bool) { + if o == nil || o.TagFilters == nil { + return nil, false + } + return &o.TagFilters, true +} + +// HasTagFilters returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasTagFilters() bool { + return o != nil && o.TagFilters != nil +} + +// SetTagFilters gets a reference to the given []string and assigns it to the TagFilters field. +func (o *ProcessScalarQuery) SetTagFilters(v []string) { + o.TagFilters = v +} + +// GetTextFilter returns the TextFilter field value if set, zero value otherwise. +func (o *ProcessScalarQuery) GetTextFilter() string { + if o == nil || o.TextFilter == nil { + var ret string + return ret + } + return *o.TextFilter +} + +// GetTextFilterOk returns a tuple with the TextFilter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessScalarQuery) GetTextFilterOk() (*string, bool) { + if o == nil || o.TextFilter == nil { + return nil, false + } + return o.TextFilter, true +} + +// HasTextFilter returns a boolean if a field has been set. +func (o *ProcessScalarQuery) HasTextFilter() bool { + return o != nil && o.TextFilter != nil +} + +// SetTextFilter gets a reference to the given string and assigns it to the TextFilter field. +func (o *ProcessScalarQuery) SetTextFilter(v string) { + o.TextFilter = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ProcessScalarQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + if o.Aggregator != nil { + toSerialize["aggregator"] = o.Aggregator + } + toSerialize["data_source"] = o.DataSource + if o.IsNormalizedCpu != nil { + toSerialize["is_normalized_cpu"] = o.IsNormalizedCpu + } + if o.Limit != nil { + toSerialize["limit"] = o.Limit + } + toSerialize["metric"] = o.Metric + toSerialize["name"] = o.Name + if o.Sort != nil { + toSerialize["sort"] = o.Sort + } + if o.TagFilters != nil { + toSerialize["tag_filters"] = o.TagFilters + } + if o.TextFilter != nil { + toSerialize["text_filter"] = o.TextFilter + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ProcessScalarQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Aggregator *MetricsAggregator `json:"aggregator,omitempty"` + DataSource *ProcessDataSource `json:"data_source"` + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + Limit *int64 `json:"limit,omitempty"` + Metric *string `json:"metric"` + Name *string `json:"name"` + Sort *QuerySortOrder `json:"sort,omitempty"` + TagFilters []string `json:"tag_filters,omitempty"` + TextFilter *string `json:"text_filter,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Metric == nil { + return fmt.Errorf("required field metric missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"aggregator", "data_source", "is_normalized_cpu", "limit", "metric", "name", "sort", "tag_filters", "text_filter"}) + } else { + return err + } + + hasInvalidField := false + if all.Aggregator != nil && !all.Aggregator.IsValid() { + hasInvalidField = true + } else { + o.Aggregator = all.Aggregator + } + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.IsNormalizedCpu = all.IsNormalizedCpu + o.Limit = all.Limit + o.Metric = *all.Metric + o.Name = *all.Name + if all.Sort != nil && !all.Sort.IsValid() { + hasInvalidField = true + } else { + o.Sort = all.Sort + } + o.TagFilters = all.TagFilters + o.TextFilter = all.TextFilter + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_process_timeseries_query.go b/api/datadogV2/model_process_timeseries_query.go new file mode 100644 index 00000000000..e34a4fdc926 --- /dev/null +++ b/api/datadogV2/model_process_timeseries_query.go @@ -0,0 +1,360 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// ProcessTimeseriesQuery A query for host-level process metrics such as CPU and memory usage. +type ProcessTimeseriesQuery struct { + // A data source for process-level infrastructure metrics. + DataSource ProcessDataSource `json:"data_source"` + // Whether CPU metrics should be normalized by core count. + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + // Maximum number of results to return. + Limit *int64 `json:"limit,omitempty"` + // The process metric to query. + Metric string `json:"metric"` + // The variable name for use in formulas. + Name string `json:"name"` + // Direction of sort. + Sort *QuerySortOrder `json:"sort,omitempty"` + // Tag filters to narrow down processes. + TagFilters []string `json:"tag_filters,omitempty"` + // A full-text search filter to match process names or commands. + TextFilter *string `json:"text_filter,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewProcessTimeseriesQuery instantiates a new ProcessTimeseriesQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewProcessTimeseriesQuery(dataSource ProcessDataSource, metric string, name string) *ProcessTimeseriesQuery { + this := ProcessTimeseriesQuery{} + this.DataSource = dataSource + this.Metric = metric + this.Name = name + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// NewProcessTimeseriesQueryWithDefaults instantiates a new ProcessTimeseriesQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewProcessTimeseriesQueryWithDefaults() *ProcessTimeseriesQuery { + this := ProcessTimeseriesQuery{} + var dataSource ProcessDataSource = PROCESSDATASOURCE_PROCESS + this.DataSource = dataSource + var sort QuerySortOrder = QUERYSORTORDER_DESC + this.Sort = &sort + return &this +} + +// GetDataSource returns the DataSource field value. +func (o *ProcessTimeseriesQuery) GetDataSource() ProcessDataSource { + if o == nil { + var ret ProcessDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetDataSourceOk() (*ProcessDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *ProcessTimeseriesQuery) SetDataSource(v ProcessDataSource) { + o.DataSource = v +} + +// GetIsNormalizedCpu returns the IsNormalizedCpu field value if set, zero value otherwise. +func (o *ProcessTimeseriesQuery) GetIsNormalizedCpu() bool { + if o == nil || o.IsNormalizedCpu == nil { + var ret bool + return ret + } + return *o.IsNormalizedCpu +} + +// GetIsNormalizedCpuOk returns a tuple with the IsNormalizedCpu field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetIsNormalizedCpuOk() (*bool, bool) { + if o == nil || o.IsNormalizedCpu == nil { + return nil, false + } + return o.IsNormalizedCpu, true +} + +// HasIsNormalizedCpu returns a boolean if a field has been set. +func (o *ProcessTimeseriesQuery) HasIsNormalizedCpu() bool { + return o != nil && o.IsNormalizedCpu != nil +} + +// SetIsNormalizedCpu gets a reference to the given bool and assigns it to the IsNormalizedCpu field. +func (o *ProcessTimeseriesQuery) SetIsNormalizedCpu(v bool) { + o.IsNormalizedCpu = &v +} + +// GetLimit returns the Limit field value if set, zero value otherwise. +func (o *ProcessTimeseriesQuery) GetLimit() int64 { + if o == nil || o.Limit == nil { + var ret int64 + return ret + } + return *o.Limit +} + +// GetLimitOk returns a tuple with the Limit field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetLimitOk() (*int64, bool) { + if o == nil || o.Limit == nil { + return nil, false + } + return o.Limit, true +} + +// HasLimit returns a boolean if a field has been set. +func (o *ProcessTimeseriesQuery) HasLimit() bool { + return o != nil && o.Limit != nil +} + +// SetLimit gets a reference to the given int64 and assigns it to the Limit field. +func (o *ProcessTimeseriesQuery) SetLimit(v int64) { + o.Limit = &v +} + +// GetMetric returns the Metric field value. +func (o *ProcessTimeseriesQuery) GetMetric() string { + if o == nil { + var ret string + return ret + } + return o.Metric +} + +// GetMetricOk returns a tuple with the Metric field value +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetMetricOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Metric, true +} + +// SetMetric sets field value. +func (o *ProcessTimeseriesQuery) SetMetric(v string) { + o.Metric = v +} + +// GetName returns the Name field value. +func (o *ProcessTimeseriesQuery) GetName() string { + if o == nil { + var ret string + return ret + } + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value. +func (o *ProcessTimeseriesQuery) SetName(v string) { + o.Name = v +} + +// GetSort returns the Sort field value if set, zero value otherwise. +func (o *ProcessTimeseriesQuery) GetSort() QuerySortOrder { + if o == nil || o.Sort == nil { + var ret QuerySortOrder + return ret + } + return *o.Sort +} + +// GetSortOk returns a tuple with the Sort field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetSortOk() (*QuerySortOrder, bool) { + if o == nil || o.Sort == nil { + return nil, false + } + return o.Sort, true +} + +// HasSort returns a boolean if a field has been set. +func (o *ProcessTimeseriesQuery) HasSort() bool { + return o != nil && o.Sort != nil +} + +// SetSort gets a reference to the given QuerySortOrder and assigns it to the Sort field. +func (o *ProcessTimeseriesQuery) SetSort(v QuerySortOrder) { + o.Sort = &v +} + +// GetTagFilters returns the TagFilters field value if set, zero value otherwise. +func (o *ProcessTimeseriesQuery) GetTagFilters() []string { + if o == nil || o.TagFilters == nil { + var ret []string + return ret + } + return o.TagFilters +} + +// GetTagFiltersOk returns a tuple with the TagFilters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetTagFiltersOk() (*[]string, bool) { + if o == nil || o.TagFilters == nil { + return nil, false + } + return &o.TagFilters, true +} + +// HasTagFilters returns a boolean if a field has been set. +func (o *ProcessTimeseriesQuery) HasTagFilters() bool { + return o != nil && o.TagFilters != nil +} + +// SetTagFilters gets a reference to the given []string and assigns it to the TagFilters field. +func (o *ProcessTimeseriesQuery) SetTagFilters(v []string) { + o.TagFilters = v +} + +// GetTextFilter returns the TextFilter field value if set, zero value otherwise. +func (o *ProcessTimeseriesQuery) GetTextFilter() string { + if o == nil || o.TextFilter == nil { + var ret string + return ret + } + return *o.TextFilter +} + +// GetTextFilterOk returns a tuple with the TextFilter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ProcessTimeseriesQuery) GetTextFilterOk() (*string, bool) { + if o == nil || o.TextFilter == nil { + return nil, false + } + return o.TextFilter, true +} + +// HasTextFilter returns a boolean if a field has been set. +func (o *ProcessTimeseriesQuery) HasTextFilter() bool { + return o != nil && o.TextFilter != nil +} + +// SetTextFilter gets a reference to the given string and assigns it to the TextFilter field. +func (o *ProcessTimeseriesQuery) SetTextFilter(v string) { + o.TextFilter = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o ProcessTimeseriesQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data_source"] = o.DataSource + if o.IsNormalizedCpu != nil { + toSerialize["is_normalized_cpu"] = o.IsNormalizedCpu + } + if o.Limit != nil { + toSerialize["limit"] = o.Limit + } + toSerialize["metric"] = o.Metric + toSerialize["name"] = o.Name + if o.Sort != nil { + toSerialize["sort"] = o.Sort + } + if o.TagFilters != nil { + toSerialize["tag_filters"] = o.TagFilters + } + if o.TextFilter != nil { + toSerialize["text_filter"] = o.TextFilter + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *ProcessTimeseriesQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + DataSource *ProcessDataSource `json:"data_source"` + IsNormalizedCpu *bool `json:"is_normalized_cpu,omitempty"` + Limit *int64 `json:"limit,omitempty"` + Metric *string `json:"metric"` + Name *string `json:"name"` + Sort *QuerySortOrder `json:"sort,omitempty"` + TagFilters []string `json:"tag_filters,omitempty"` + TextFilter *string `json:"text_filter,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Metric == nil { + return fmt.Errorf("required field metric missing") + } + if all.Name == nil { + return fmt.Errorf("required field name missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data_source", "is_normalized_cpu", "limit", "metric", "name", "sort", "tag_filters", "text_filter"}) + } else { + return err + } + + hasInvalidField := false + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + o.IsNormalizedCpu = all.IsNormalizedCpu + o.Limit = all.Limit + o.Metric = *all.Metric + o.Name = *all.Name + if all.Sort != nil && !all.Sort.IsValid() { + hasInvalidField = true + } else { + o.Sort = all.Sort + } + o.TagFilters = all.TagFilters + o.TextFilter = all.TextFilter + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_scalar_query.go b/api/datadogV2/model_scalar_query.go index 4c2b9ad3c59..fc5e376f213 100644 --- a/api/datadogV2/model_scalar_query.go +++ b/api/datadogV2/model_scalar_query.go @@ -10,8 +10,14 @@ import ( // ScalarQuery - An individual scalar query to one of the basic Datadog data sources. type ScalarQuery struct { - MetricsScalarQuery *MetricsScalarQuery - EventsScalarQuery *EventsScalarQuery + MetricsScalarQuery *MetricsScalarQuery + EventsScalarQuery *EventsScalarQuery + ApmResourceStatsQuery *ApmResourceStatsQuery + ApmMetricsQuery *ApmMetricsQuery + ApmDependencyStatsQuery *ApmDependencyStatsQuery + SloQuery *SloQuery + ProcessScalarQuery *ProcessScalarQuery + ContainerScalarQuery *ContainerScalarQuery // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct UnparsedObject interface{} @@ -27,6 +33,36 @@ func EventsScalarQueryAsScalarQuery(v *EventsScalarQuery) ScalarQuery { return ScalarQuery{EventsScalarQuery: v} } +// ApmResourceStatsQueryAsScalarQuery is a convenience function that returns ApmResourceStatsQuery wrapped in ScalarQuery. +func ApmResourceStatsQueryAsScalarQuery(v *ApmResourceStatsQuery) ScalarQuery { + return ScalarQuery{ApmResourceStatsQuery: v} +} + +// ApmMetricsQueryAsScalarQuery is a convenience function that returns ApmMetricsQuery wrapped in ScalarQuery. +func ApmMetricsQueryAsScalarQuery(v *ApmMetricsQuery) ScalarQuery { + return ScalarQuery{ApmMetricsQuery: v} +} + +// ApmDependencyStatsQueryAsScalarQuery is a convenience function that returns ApmDependencyStatsQuery wrapped in ScalarQuery. +func ApmDependencyStatsQueryAsScalarQuery(v *ApmDependencyStatsQuery) ScalarQuery { + return ScalarQuery{ApmDependencyStatsQuery: v} +} + +// SloQueryAsScalarQuery is a convenience function that returns SloQuery wrapped in ScalarQuery. +func SloQueryAsScalarQuery(v *SloQuery) ScalarQuery { + return ScalarQuery{SloQuery: v} +} + +// ProcessScalarQueryAsScalarQuery is a convenience function that returns ProcessScalarQuery wrapped in ScalarQuery. +func ProcessScalarQueryAsScalarQuery(v *ProcessScalarQuery) ScalarQuery { + return ScalarQuery{ProcessScalarQuery: v} +} + +// ContainerScalarQueryAsScalarQuery is a convenience function that returns ContainerScalarQuery wrapped in ScalarQuery. +func ContainerScalarQueryAsScalarQuery(v *ContainerScalarQuery) ScalarQuery { + return ScalarQuery{ContainerScalarQuery: v} +} + // UnmarshalJSON turns data into one of the pointers in the struct. func (obj *ScalarQuery) UnmarshalJSON(data []byte) error { var err error @@ -65,10 +101,118 @@ func (obj *ScalarQuery) UnmarshalJSON(data []byte) error { obj.EventsScalarQuery = nil } + // try to unmarshal data into ApmResourceStatsQuery + err = datadog.Unmarshal(data, &obj.ApmResourceStatsQuery) + if err == nil { + if obj.ApmResourceStatsQuery != nil && obj.ApmResourceStatsQuery.UnparsedObject == nil { + jsonApmResourceStatsQuery, _ := datadog.Marshal(obj.ApmResourceStatsQuery) + if string(jsonApmResourceStatsQuery) == "{}" { // empty struct + obj.ApmResourceStatsQuery = nil + } else { + match++ + } + } else { + obj.ApmResourceStatsQuery = nil + } + } else { + obj.ApmResourceStatsQuery = nil + } + + // try to unmarshal data into ApmMetricsQuery + err = datadog.Unmarshal(data, &obj.ApmMetricsQuery) + if err == nil { + if obj.ApmMetricsQuery != nil && obj.ApmMetricsQuery.UnparsedObject == nil { + jsonApmMetricsQuery, _ := datadog.Marshal(obj.ApmMetricsQuery) + if string(jsonApmMetricsQuery) == "{}" { // empty struct + obj.ApmMetricsQuery = nil + } else { + match++ + } + } else { + obj.ApmMetricsQuery = nil + } + } else { + obj.ApmMetricsQuery = nil + } + + // try to unmarshal data into ApmDependencyStatsQuery + err = datadog.Unmarshal(data, &obj.ApmDependencyStatsQuery) + if err == nil { + if obj.ApmDependencyStatsQuery != nil && obj.ApmDependencyStatsQuery.UnparsedObject == nil { + jsonApmDependencyStatsQuery, _ := datadog.Marshal(obj.ApmDependencyStatsQuery) + if string(jsonApmDependencyStatsQuery) == "{}" { // empty struct + obj.ApmDependencyStatsQuery = nil + } else { + match++ + } + } else { + obj.ApmDependencyStatsQuery = nil + } + } else { + obj.ApmDependencyStatsQuery = nil + } + + // try to unmarshal data into SloQuery + err = datadog.Unmarshal(data, &obj.SloQuery) + if err == nil { + if obj.SloQuery != nil && obj.SloQuery.UnparsedObject == nil { + jsonSloQuery, _ := datadog.Marshal(obj.SloQuery) + if string(jsonSloQuery) == "{}" { // empty struct + obj.SloQuery = nil + } else { + match++ + } + } else { + obj.SloQuery = nil + } + } else { + obj.SloQuery = nil + } + + // try to unmarshal data into ProcessScalarQuery + err = datadog.Unmarshal(data, &obj.ProcessScalarQuery) + if err == nil { + if obj.ProcessScalarQuery != nil && obj.ProcessScalarQuery.UnparsedObject == nil { + jsonProcessScalarQuery, _ := datadog.Marshal(obj.ProcessScalarQuery) + if string(jsonProcessScalarQuery) == "{}" { // empty struct + obj.ProcessScalarQuery = nil + } else { + match++ + } + } else { + obj.ProcessScalarQuery = nil + } + } else { + obj.ProcessScalarQuery = nil + } + + // try to unmarshal data into ContainerScalarQuery + err = datadog.Unmarshal(data, &obj.ContainerScalarQuery) + if err == nil { + if obj.ContainerScalarQuery != nil && obj.ContainerScalarQuery.UnparsedObject == nil { + jsonContainerScalarQuery, _ := datadog.Marshal(obj.ContainerScalarQuery) + if string(jsonContainerScalarQuery) == "{}" { // empty struct + obj.ContainerScalarQuery = nil + } else { + match++ + } + } else { + obj.ContainerScalarQuery = nil + } + } else { + obj.ContainerScalarQuery = nil + } + if match != 1 { // more than 1 match // reset to nil obj.MetricsScalarQuery = nil obj.EventsScalarQuery = nil + obj.ApmResourceStatsQuery = nil + obj.ApmMetricsQuery = nil + obj.ApmDependencyStatsQuery = nil + obj.SloQuery = nil + obj.ProcessScalarQuery = nil + obj.ContainerScalarQuery = nil return datadog.Unmarshal(data, &obj.UnparsedObject) } return nil // exactly one match @@ -84,6 +228,30 @@ func (obj ScalarQuery) MarshalJSON() ([]byte, error) { return datadog.Marshal(&obj.EventsScalarQuery) } + if obj.ApmResourceStatsQuery != nil { + return datadog.Marshal(&obj.ApmResourceStatsQuery) + } + + if obj.ApmMetricsQuery != nil { + return datadog.Marshal(&obj.ApmMetricsQuery) + } + + if obj.ApmDependencyStatsQuery != nil { + return datadog.Marshal(&obj.ApmDependencyStatsQuery) + } + + if obj.SloQuery != nil { + return datadog.Marshal(&obj.SloQuery) + } + + if obj.ProcessScalarQuery != nil { + return datadog.Marshal(&obj.ProcessScalarQuery) + } + + if obj.ContainerScalarQuery != nil { + return datadog.Marshal(&obj.ContainerScalarQuery) + } + if obj.UnparsedObject != nil { return datadog.Marshal(obj.UnparsedObject) } @@ -100,6 +268,30 @@ func (obj *ScalarQuery) GetActualInstance() interface{} { return obj.EventsScalarQuery } + if obj.ApmResourceStatsQuery != nil { + return obj.ApmResourceStatsQuery + } + + if obj.ApmMetricsQuery != nil { + return obj.ApmMetricsQuery + } + + if obj.ApmDependencyStatsQuery != nil { + return obj.ApmDependencyStatsQuery + } + + if obj.SloQuery != nil { + return obj.SloQuery + } + + if obj.ProcessScalarQuery != nil { + return obj.ProcessScalarQuery + } + + if obj.ContainerScalarQuery != nil { + return obj.ContainerScalarQuery + } + // all schemas are nil return nil } diff --git a/api/datadogV2/model_slo_data_source.go b/api/datadogV2/model_slo_data_source.go new file mode 100644 index 00000000000..21bf89ccd2d --- /dev/null +++ b/api/datadogV2/model_slo_data_source.go @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SloDataSource A data source for SLO queries. +type SloDataSource string + +// List of SloDataSource. +const ( + SLODATASOURCE_SLO SloDataSource = "slo" +) + +var allowedSloDataSourceEnumValues = []SloDataSource{ + SLODATASOURCE_SLO, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *SloDataSource) GetAllowedValues() []SloDataSource { + return allowedSloDataSourceEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *SloDataSource) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = SloDataSource(value) + return nil +} + +// NewSloDataSourceFromValue returns a pointer to a valid SloDataSource +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewSloDataSourceFromValue(v string) (*SloDataSource, error) { + ev := SloDataSource(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for SloDataSource: valid values are %v", v, allowedSloDataSourceEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v SloDataSource) IsValid() bool { + for _, existing := range allowedSloDataSourceEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to SloDataSource value. +func (v SloDataSource) Ptr() *SloDataSource { + return &v +} diff --git a/api/datadogV2/model_slo_query.go b/api/datadogV2/model_slo_query.go new file mode 100644 index 00000000000..8f8f5db03c1 --- /dev/null +++ b/api/datadogV2/model_slo_query.go @@ -0,0 +1,329 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SloQuery A query for SLO status, error budget, and burn rate metrics. +type SloQuery struct { + // Additional filters applied to the SLO query. + AdditionalQueryFilters *string `json:"additional_query_filters,omitempty"` + // A data source for SLO queries. + DataSource SloDataSource `json:"data_source"` + // How SLO results are grouped in the response. + GroupMode *SlosGroupMode `json:"group_mode,omitempty"` + // The SLO measurement to retrieve. + Measure SlosMeasure `json:"measure"` + // The variable name for use in formulas. + Name *string `json:"name,omitempty"` + // The unique identifier of the SLO to query. + SloId string `json:"slo_id"` + // The type of SLO definition being queried. + SloQueryType *SlosQueryType `json:"slo_query_type,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSloQuery instantiates a new SloQuery object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSloQuery(dataSource SloDataSource, measure SlosMeasure, sloId string) *SloQuery { + this := SloQuery{} + this.DataSource = dataSource + this.Measure = measure + this.SloId = sloId + return &this +} + +// NewSloQueryWithDefaults instantiates a new SloQuery object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSloQueryWithDefaults() *SloQuery { + this := SloQuery{} + var dataSource SloDataSource = SLODATASOURCE_SLO + this.DataSource = dataSource + return &this +} + +// GetAdditionalQueryFilters returns the AdditionalQueryFilters field value if set, zero value otherwise. +func (o *SloQuery) GetAdditionalQueryFilters() string { + if o == nil || o.AdditionalQueryFilters == nil { + var ret string + return ret + } + return *o.AdditionalQueryFilters +} + +// GetAdditionalQueryFiltersOk returns a tuple with the AdditionalQueryFilters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SloQuery) GetAdditionalQueryFiltersOk() (*string, bool) { + if o == nil || o.AdditionalQueryFilters == nil { + return nil, false + } + return o.AdditionalQueryFilters, true +} + +// HasAdditionalQueryFilters returns a boolean if a field has been set. +func (o *SloQuery) HasAdditionalQueryFilters() bool { + return o != nil && o.AdditionalQueryFilters != nil +} + +// SetAdditionalQueryFilters gets a reference to the given string and assigns it to the AdditionalQueryFilters field. +func (o *SloQuery) SetAdditionalQueryFilters(v string) { + o.AdditionalQueryFilters = &v +} + +// GetDataSource returns the DataSource field value. +func (o *SloQuery) GetDataSource() SloDataSource { + if o == nil { + var ret SloDataSource + return ret + } + return o.DataSource +} + +// GetDataSourceOk returns a tuple with the DataSource field value +// and a boolean to check if the value has been set. +func (o *SloQuery) GetDataSourceOk() (*SloDataSource, bool) { + if o == nil { + return nil, false + } + return &o.DataSource, true +} + +// SetDataSource sets field value. +func (o *SloQuery) SetDataSource(v SloDataSource) { + o.DataSource = v +} + +// GetGroupMode returns the GroupMode field value if set, zero value otherwise. +func (o *SloQuery) GetGroupMode() SlosGroupMode { + if o == nil || o.GroupMode == nil { + var ret SlosGroupMode + return ret + } + return *o.GroupMode +} + +// GetGroupModeOk returns a tuple with the GroupMode field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SloQuery) GetGroupModeOk() (*SlosGroupMode, bool) { + if o == nil || o.GroupMode == nil { + return nil, false + } + return o.GroupMode, true +} + +// HasGroupMode returns a boolean if a field has been set. +func (o *SloQuery) HasGroupMode() bool { + return o != nil && o.GroupMode != nil +} + +// SetGroupMode gets a reference to the given SlosGroupMode and assigns it to the GroupMode field. +func (o *SloQuery) SetGroupMode(v SlosGroupMode) { + o.GroupMode = &v +} + +// GetMeasure returns the Measure field value. +func (o *SloQuery) GetMeasure() SlosMeasure { + if o == nil { + var ret SlosMeasure + return ret + } + return o.Measure +} + +// GetMeasureOk returns a tuple with the Measure field value +// and a boolean to check if the value has been set. +func (o *SloQuery) GetMeasureOk() (*SlosMeasure, bool) { + if o == nil { + return nil, false + } + return &o.Measure, true +} + +// SetMeasure sets field value. +func (o *SloQuery) SetMeasure(v SlosMeasure) { + o.Measure = v +} + +// GetName returns the Name field value if set, zero value otherwise. +func (o *SloQuery) GetName() string { + if o == nil || o.Name == nil { + var ret string + return ret + } + return *o.Name +} + +// GetNameOk returns a tuple with the Name field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SloQuery) GetNameOk() (*string, bool) { + if o == nil || o.Name == nil { + return nil, false + } + return o.Name, true +} + +// HasName returns a boolean if a field has been set. +func (o *SloQuery) HasName() bool { + return o != nil && o.Name != nil +} + +// SetName gets a reference to the given string and assigns it to the Name field. +func (o *SloQuery) SetName(v string) { + o.Name = &v +} + +// GetSloId returns the SloId field value. +func (o *SloQuery) GetSloId() string { + if o == nil { + var ret string + return ret + } + return o.SloId +} + +// GetSloIdOk returns a tuple with the SloId field value +// and a boolean to check if the value has been set. +func (o *SloQuery) GetSloIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.SloId, true +} + +// SetSloId sets field value. +func (o *SloQuery) SetSloId(v string) { + o.SloId = v +} + +// GetSloQueryType returns the SloQueryType field value if set, zero value otherwise. +func (o *SloQuery) GetSloQueryType() SlosQueryType { + if o == nil || o.SloQueryType == nil { + var ret SlosQueryType + return ret + } + return *o.SloQueryType +} + +// GetSloQueryTypeOk returns a tuple with the SloQueryType field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SloQuery) GetSloQueryTypeOk() (*SlosQueryType, bool) { + if o == nil || o.SloQueryType == nil { + return nil, false + } + return o.SloQueryType, true +} + +// HasSloQueryType returns a boolean if a field has been set. +func (o *SloQuery) HasSloQueryType() bool { + return o != nil && o.SloQueryType != nil +} + +// SetSloQueryType gets a reference to the given SlosQueryType and assigns it to the SloQueryType field. +func (o *SloQuery) SetSloQueryType(v SlosQueryType) { + o.SloQueryType = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SloQuery) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + if o.AdditionalQueryFilters != nil { + toSerialize["additional_query_filters"] = o.AdditionalQueryFilters + } + toSerialize["data_source"] = o.DataSource + if o.GroupMode != nil { + toSerialize["group_mode"] = o.GroupMode + } + toSerialize["measure"] = o.Measure + if o.Name != nil { + toSerialize["name"] = o.Name + } + toSerialize["slo_id"] = o.SloId + if o.SloQueryType != nil { + toSerialize["slo_query_type"] = o.SloQueryType + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SloQuery) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + AdditionalQueryFilters *string `json:"additional_query_filters,omitempty"` + DataSource *SloDataSource `json:"data_source"` + GroupMode *SlosGroupMode `json:"group_mode,omitempty"` + Measure *SlosMeasure `json:"measure"` + Name *string `json:"name,omitempty"` + SloId *string `json:"slo_id"` + SloQueryType *SlosQueryType `json:"slo_query_type,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.DataSource == nil { + return fmt.Errorf("required field data_source missing") + } + if all.Measure == nil { + return fmt.Errorf("required field measure missing") + } + if all.SloId == nil { + return fmt.Errorf("required field slo_id missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"additional_query_filters", "data_source", "group_mode", "measure", "name", "slo_id", "slo_query_type"}) + } else { + return err + } + + hasInvalidField := false + o.AdditionalQueryFilters = all.AdditionalQueryFilters + if !all.DataSource.IsValid() { + hasInvalidField = true + } else { + o.DataSource = *all.DataSource + } + if all.GroupMode != nil && !all.GroupMode.IsValid() { + hasInvalidField = true + } else { + o.GroupMode = all.GroupMode + } + if !all.Measure.IsValid() { + hasInvalidField = true + } else { + o.Measure = *all.Measure + } + o.Name = all.Name + o.SloId = *all.SloId + if all.SloQueryType != nil && !all.SloQueryType.IsValid() { + hasInvalidField = true + } else { + o.SloQueryType = all.SloQueryType + } + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_slos_group_mode.go b/api/datadogV2/model_slos_group_mode.go new file mode 100644 index 00000000000..39f898804e5 --- /dev/null +++ b/api/datadogV2/model_slos_group_mode.go @@ -0,0 +1,66 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SlosGroupMode How SLO results are grouped in the response. +type SlosGroupMode string + +// List of SlosGroupMode. +const ( + SLOSGROUPMODE_OVERALL SlosGroupMode = "overall" + SLOSGROUPMODE_COMPONENTS SlosGroupMode = "components" +) + +var allowedSlosGroupModeEnumValues = []SlosGroupMode{ + SLOSGROUPMODE_OVERALL, + SLOSGROUPMODE_COMPONENTS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *SlosGroupMode) GetAllowedValues() []SlosGroupMode { + return allowedSlosGroupModeEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *SlosGroupMode) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = SlosGroupMode(value) + return nil +} + +// NewSlosGroupModeFromValue returns a pointer to a valid SlosGroupMode +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewSlosGroupModeFromValue(v string) (*SlosGroupMode, error) { + ev := SlosGroupMode(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for SlosGroupMode: valid values are %v", v, allowedSlosGroupModeEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v SlosGroupMode) IsValid() bool { + for _, existing := range allowedSlosGroupModeEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to SlosGroupMode value. +func (v SlosGroupMode) Ptr() *SlosGroupMode { + return &v +} diff --git a/api/datadogV2/model_slos_measure.go b/api/datadogV2/model_slos_measure.go new file mode 100644 index 00000000000..d2b467544c2 --- /dev/null +++ b/api/datadogV2/model_slos_measure.go @@ -0,0 +1,82 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SlosMeasure The SLO measurement to retrieve. +type SlosMeasure string + +// List of SlosMeasure. +const ( + SLOSMEASURE_GOOD_EVENTS SlosMeasure = "good_events" + SLOSMEASURE_BAD_EVENTS SlosMeasure = "bad_events" + SLOSMEASURE_SLO_STATUS SlosMeasure = "slo_status" + SLOSMEASURE_ERROR_BUDGET_REMAINING SlosMeasure = "error_budget_remaining" + SLOSMEASURE_ERROR_BUDGET_REMAINING_HISTORY SlosMeasure = "error_budget_remaining_history" + SLOSMEASURE_ERROR_BUDGET_BURNDOWN SlosMeasure = "error_budget_burndown" + SLOSMEASURE_BURN_RATE SlosMeasure = "burn_rate" + SLOSMEASURE_SLO_STATUS_HISTORY SlosMeasure = "slo_status_history" + SLOSMEASURE_GOOD_MINUTES SlosMeasure = "good_minutes" + SLOSMEASURE_BAD_MINUTES SlosMeasure = "bad_minutes" +) + +var allowedSlosMeasureEnumValues = []SlosMeasure{ + SLOSMEASURE_GOOD_EVENTS, + SLOSMEASURE_BAD_EVENTS, + SLOSMEASURE_SLO_STATUS, + SLOSMEASURE_ERROR_BUDGET_REMAINING, + SLOSMEASURE_ERROR_BUDGET_REMAINING_HISTORY, + SLOSMEASURE_ERROR_BUDGET_BURNDOWN, + SLOSMEASURE_BURN_RATE, + SLOSMEASURE_SLO_STATUS_HISTORY, + SLOSMEASURE_GOOD_MINUTES, + SLOSMEASURE_BAD_MINUTES, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *SlosMeasure) GetAllowedValues() []SlosMeasure { + return allowedSlosMeasureEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *SlosMeasure) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = SlosMeasure(value) + return nil +} + +// NewSlosMeasureFromValue returns a pointer to a valid SlosMeasure +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewSlosMeasureFromValue(v string) (*SlosMeasure, error) { + ev := SlosMeasure(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for SlosMeasure: valid values are %v", v, allowedSlosMeasureEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v SlosMeasure) IsValid() bool { + for _, existing := range allowedSlosMeasureEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to SlosMeasure value. +func (v SlosMeasure) Ptr() *SlosMeasure { + return &v +} diff --git a/api/datadogV2/model_slos_query_type.go b/api/datadogV2/model_slos_query_type.go new file mode 100644 index 00000000000..a05e6aa42ca --- /dev/null +++ b/api/datadogV2/model_slos_query_type.go @@ -0,0 +1,68 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SlosQueryType The type of SLO definition being queried. +type SlosQueryType string + +// List of SlosQueryType. +const ( + SLOSQUERYTYPE_METRIC SlosQueryType = "metric" + SLOSQUERYTYPE_TIME_SLICE SlosQueryType = "time_slice" + SLOSQUERYTYPE_MONITOR SlosQueryType = "monitor" +) + +var allowedSlosQueryTypeEnumValues = []SlosQueryType{ + SLOSQUERYTYPE_METRIC, + SLOSQUERYTYPE_TIME_SLICE, + SLOSQUERYTYPE_MONITOR, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *SlosQueryType) GetAllowedValues() []SlosQueryType { + return allowedSlosQueryTypeEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *SlosQueryType) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = SlosQueryType(value) + return nil +} + +// NewSlosQueryTypeFromValue returns a pointer to a valid SlosQueryType +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewSlosQueryTypeFromValue(v string) (*SlosQueryType, error) { + ev := SlosQueryType(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for SlosQueryType: valid values are %v", v, allowedSlosQueryTypeEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v SlosQueryType) IsValid() bool { + for _, existing := range allowedSlosQueryTypeEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to SlosQueryType value. +func (v SlosQueryType) Ptr() *SlosQueryType { + return &v +} diff --git a/api/datadogV2/model_timeseries_query.go b/api/datadogV2/model_timeseries_query.go index db176237f6e..0df4e1263a7 100644 --- a/api/datadogV2/model_timeseries_query.go +++ b/api/datadogV2/model_timeseries_query.go @@ -10,8 +10,14 @@ import ( // TimeseriesQuery - An individual timeseries query to one of the basic Datadog data sources. type TimeseriesQuery struct { - MetricsTimeseriesQuery *MetricsTimeseriesQuery - EventsTimeseriesQuery *EventsTimeseriesQuery + MetricsTimeseriesQuery *MetricsTimeseriesQuery + EventsTimeseriesQuery *EventsTimeseriesQuery + ApmResourceStatsQuery *ApmResourceStatsQuery + ApmMetricsQuery *ApmMetricsQuery + ApmDependencyStatsQuery *ApmDependencyStatsQuery + SloQuery *SloQuery + ProcessTimeseriesQuery *ProcessTimeseriesQuery + ContainerTimeseriesQuery *ContainerTimeseriesQuery // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct UnparsedObject interface{} @@ -27,6 +33,36 @@ func EventsTimeseriesQueryAsTimeseriesQuery(v *EventsTimeseriesQuery) Timeseries return TimeseriesQuery{EventsTimeseriesQuery: v} } +// ApmResourceStatsQueryAsTimeseriesQuery is a convenience function that returns ApmResourceStatsQuery wrapped in TimeseriesQuery. +func ApmResourceStatsQueryAsTimeseriesQuery(v *ApmResourceStatsQuery) TimeseriesQuery { + return TimeseriesQuery{ApmResourceStatsQuery: v} +} + +// ApmMetricsQueryAsTimeseriesQuery is a convenience function that returns ApmMetricsQuery wrapped in TimeseriesQuery. +func ApmMetricsQueryAsTimeseriesQuery(v *ApmMetricsQuery) TimeseriesQuery { + return TimeseriesQuery{ApmMetricsQuery: v} +} + +// ApmDependencyStatsQueryAsTimeseriesQuery is a convenience function that returns ApmDependencyStatsQuery wrapped in TimeseriesQuery. +func ApmDependencyStatsQueryAsTimeseriesQuery(v *ApmDependencyStatsQuery) TimeseriesQuery { + return TimeseriesQuery{ApmDependencyStatsQuery: v} +} + +// SloQueryAsTimeseriesQuery is a convenience function that returns SloQuery wrapped in TimeseriesQuery. +func SloQueryAsTimeseriesQuery(v *SloQuery) TimeseriesQuery { + return TimeseriesQuery{SloQuery: v} +} + +// ProcessTimeseriesQueryAsTimeseriesQuery is a convenience function that returns ProcessTimeseriesQuery wrapped in TimeseriesQuery. +func ProcessTimeseriesQueryAsTimeseriesQuery(v *ProcessTimeseriesQuery) TimeseriesQuery { + return TimeseriesQuery{ProcessTimeseriesQuery: v} +} + +// ContainerTimeseriesQueryAsTimeseriesQuery is a convenience function that returns ContainerTimeseriesQuery wrapped in TimeseriesQuery. +func ContainerTimeseriesQueryAsTimeseriesQuery(v *ContainerTimeseriesQuery) TimeseriesQuery { + return TimeseriesQuery{ContainerTimeseriesQuery: v} +} + // UnmarshalJSON turns data into one of the pointers in the struct. func (obj *TimeseriesQuery) UnmarshalJSON(data []byte) error { var err error @@ -65,10 +101,118 @@ func (obj *TimeseriesQuery) UnmarshalJSON(data []byte) error { obj.EventsTimeseriesQuery = nil } + // try to unmarshal data into ApmResourceStatsQuery + err = datadog.Unmarshal(data, &obj.ApmResourceStatsQuery) + if err == nil { + if obj.ApmResourceStatsQuery != nil && obj.ApmResourceStatsQuery.UnparsedObject == nil { + jsonApmResourceStatsQuery, _ := datadog.Marshal(obj.ApmResourceStatsQuery) + if string(jsonApmResourceStatsQuery) == "{}" { // empty struct + obj.ApmResourceStatsQuery = nil + } else { + match++ + } + } else { + obj.ApmResourceStatsQuery = nil + } + } else { + obj.ApmResourceStatsQuery = nil + } + + // try to unmarshal data into ApmMetricsQuery + err = datadog.Unmarshal(data, &obj.ApmMetricsQuery) + if err == nil { + if obj.ApmMetricsQuery != nil && obj.ApmMetricsQuery.UnparsedObject == nil { + jsonApmMetricsQuery, _ := datadog.Marshal(obj.ApmMetricsQuery) + if string(jsonApmMetricsQuery) == "{}" { // empty struct + obj.ApmMetricsQuery = nil + } else { + match++ + } + } else { + obj.ApmMetricsQuery = nil + } + } else { + obj.ApmMetricsQuery = nil + } + + // try to unmarshal data into ApmDependencyStatsQuery + err = datadog.Unmarshal(data, &obj.ApmDependencyStatsQuery) + if err == nil { + if obj.ApmDependencyStatsQuery != nil && obj.ApmDependencyStatsQuery.UnparsedObject == nil { + jsonApmDependencyStatsQuery, _ := datadog.Marshal(obj.ApmDependencyStatsQuery) + if string(jsonApmDependencyStatsQuery) == "{}" { // empty struct + obj.ApmDependencyStatsQuery = nil + } else { + match++ + } + } else { + obj.ApmDependencyStatsQuery = nil + } + } else { + obj.ApmDependencyStatsQuery = nil + } + + // try to unmarshal data into SloQuery + err = datadog.Unmarshal(data, &obj.SloQuery) + if err == nil { + if obj.SloQuery != nil && obj.SloQuery.UnparsedObject == nil { + jsonSloQuery, _ := datadog.Marshal(obj.SloQuery) + if string(jsonSloQuery) == "{}" { // empty struct + obj.SloQuery = nil + } else { + match++ + } + } else { + obj.SloQuery = nil + } + } else { + obj.SloQuery = nil + } + + // try to unmarshal data into ProcessTimeseriesQuery + err = datadog.Unmarshal(data, &obj.ProcessTimeseriesQuery) + if err == nil { + if obj.ProcessTimeseriesQuery != nil && obj.ProcessTimeseriesQuery.UnparsedObject == nil { + jsonProcessTimeseriesQuery, _ := datadog.Marshal(obj.ProcessTimeseriesQuery) + if string(jsonProcessTimeseriesQuery) == "{}" { // empty struct + obj.ProcessTimeseriesQuery = nil + } else { + match++ + } + } else { + obj.ProcessTimeseriesQuery = nil + } + } else { + obj.ProcessTimeseriesQuery = nil + } + + // try to unmarshal data into ContainerTimeseriesQuery + err = datadog.Unmarshal(data, &obj.ContainerTimeseriesQuery) + if err == nil { + if obj.ContainerTimeseriesQuery != nil && obj.ContainerTimeseriesQuery.UnparsedObject == nil { + jsonContainerTimeseriesQuery, _ := datadog.Marshal(obj.ContainerTimeseriesQuery) + if string(jsonContainerTimeseriesQuery) == "{}" { // empty struct + obj.ContainerTimeseriesQuery = nil + } else { + match++ + } + } else { + obj.ContainerTimeseriesQuery = nil + } + } else { + obj.ContainerTimeseriesQuery = nil + } + if match != 1 { // more than 1 match // reset to nil obj.MetricsTimeseriesQuery = nil obj.EventsTimeseriesQuery = nil + obj.ApmResourceStatsQuery = nil + obj.ApmMetricsQuery = nil + obj.ApmDependencyStatsQuery = nil + obj.SloQuery = nil + obj.ProcessTimeseriesQuery = nil + obj.ContainerTimeseriesQuery = nil return datadog.Unmarshal(data, &obj.UnparsedObject) } return nil // exactly one match @@ -84,6 +228,30 @@ func (obj TimeseriesQuery) MarshalJSON() ([]byte, error) { return datadog.Marshal(&obj.EventsTimeseriesQuery) } + if obj.ApmResourceStatsQuery != nil { + return datadog.Marshal(&obj.ApmResourceStatsQuery) + } + + if obj.ApmMetricsQuery != nil { + return datadog.Marshal(&obj.ApmMetricsQuery) + } + + if obj.ApmDependencyStatsQuery != nil { + return datadog.Marshal(&obj.ApmDependencyStatsQuery) + } + + if obj.SloQuery != nil { + return datadog.Marshal(&obj.SloQuery) + } + + if obj.ProcessTimeseriesQuery != nil { + return datadog.Marshal(&obj.ProcessTimeseriesQuery) + } + + if obj.ContainerTimeseriesQuery != nil { + return datadog.Marshal(&obj.ContainerTimeseriesQuery) + } + if obj.UnparsedObject != nil { return datadog.Marshal(obj.UnparsedObject) } @@ -100,6 +268,30 @@ func (obj *TimeseriesQuery) GetActualInstance() interface{} { return obj.EventsTimeseriesQuery } + if obj.ApmResourceStatsQuery != nil { + return obj.ApmResourceStatsQuery + } + + if obj.ApmMetricsQuery != nil { + return obj.ApmMetricsQuery + } + + if obj.ApmDependencyStatsQuery != nil { + return obj.ApmDependencyStatsQuery + } + + if obj.SloQuery != nil { + return obj.SloQuery + } + + if obj.ProcessTimeseriesQuery != nil { + return obj.ProcessTimeseriesQuery + } + + if obj.ContainerTimeseriesQuery != nil { + return obj.ContainerTimeseriesQuery + } + // all schemas are nil return nil } diff --git a/examples/v2/metrics/QueryScalarData_1479548882.go b/examples/v2/metrics/QueryScalarData_1479548882.go new file mode 100644 index 00000000000..d93a2fee58c --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_1479548882.go @@ -0,0 +1,63 @@ +// Scalar cross product query with apm_resource_stats data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ApmResourceStatsQuery: &datadogV2.ApmResourceStatsQuery{ + DataSource: datadogV2.APMRESOURCESTATSDATASOURCE_APM_RESOURCE_STATS, + Name: "a", + Env: "staging", + Service: "azure-bill-import", + Stat: datadogV2.APMRESOURCESTATNAME_HITS, + OperationName: datadog.PtrString("cassandra.query"), + GroupBy: []string{ + "resource_name", + }, + PrimaryTagName: datadog.PtrString("datacenter"), + PrimaryTagValue: datadog.PtrString("*"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_1904811219.go b/examples/v2/metrics/QueryScalarData_1904811219.go new file mode 100644 index 00000000000..63af662e650 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_1904811219.go @@ -0,0 +1,60 @@ +// Scalar cross product query with apm_metrics data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ApmMetricsQuery: &datadogV2.ApmMetricsQuery{ + DataSource: datadogV2.APMMETRICSDATASOURCE_APM_METRICS, + Name: "a", + Stat: datadogV2.APMMETRICSSTAT_HITS, + Service: datadog.PtrString("web-store"), + QueryFilter: datadog.PtrString("env:prod"), + GroupBy: []string{ + "resource_name", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_2298288525.go b/examples/v2/metrics/QueryScalarData_2298288525.go new file mode 100644 index 00000000000..9ce46d265aa --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_2298288525.go @@ -0,0 +1,59 @@ +// Scalar cross product query with slo data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + SloQuery: &datadogV2.SloQuery{ + DataSource: datadogV2.SLODATASOURCE_SLO, + Name: datadog.PtrString("a"), + SloId: "12345678910", + Measure: datadogV2.SLOSMEASURE_SLO_STATUS, + SloQueryType: datadogV2.SLOSQUERYTYPE_METRIC.Ptr(), + GroupMode: datadogV2.SLOSGROUPMODE_OVERALL.Ptr(), + AdditionalQueryFilters: datadog.PtrString("*"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_2312509843.go b/examples/v2/metrics/QueryScalarData_2312509843.go new file mode 100644 index 00000000000..8a2dfc98874 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_2312509843.go @@ -0,0 +1,63 @@ +// Scalar cross product query with rum data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_RUM, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_2398494003.go b/examples/v2/metrics/QueryScalarData_2398494003.go new file mode 100644 index 00000000000..02017872a79 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_2398494003.go @@ -0,0 +1,63 @@ +// Scalar cross product query with ci_pipelines data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_CI_PIPELINES, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_2533499017.go b/examples/v2/metrics/QueryScalarData_2533499017.go new file mode 100644 index 00000000000..ded880e3d43 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_2533499017.go @@ -0,0 +1,61 @@ +// Scalar cross product query with apm_dependency_stats data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ApmDependencyStatsQuery: &datadogV2.ApmDependencyStatsQuery{ + DataSource: datadogV2.APMDEPENDENCYSTATSDATASOURCE_APM_DEPENDENCY_STATS, + Name: "a", + Env: "ci", + Service: "cassandra", + Stat: datadogV2.APMDEPENDENCYSTATNAME_AVG_DURATION, + OperationName: "cassandra.query", + ResourceName: "DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?", + PrimaryTagName: datadog.PtrString("datacenter"), + PrimaryTagValue: datadog.PtrString("edge-eu1.prod.dog"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_2757564916.go b/examples/v2/metrics/QueryScalarData_2757564916.go new file mode 100644 index 00000000000..58c34d38b69 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_2757564916.go @@ -0,0 +1,63 @@ +// Scalar cross product query with logs data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_LOGS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_3210877526.go b/examples/v2/metrics/QueryScalarData_3210877526.go new file mode 100644 index 00000000000..36ec9441ace --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_3210877526.go @@ -0,0 +1,63 @@ +// Scalar cross product query with audit data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_AUDIT, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_3246660196.go b/examples/v2/metrics/QueryScalarData_3246660196.go new file mode 100644 index 00000000000..b0f2b6be278 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_3246660196.go @@ -0,0 +1,63 @@ +// Scalar cross product query with spans data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_SPANS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_3470073355.go b/examples/v2/metrics/QueryScalarData_3470073355.go new file mode 100644 index 00000000000..c13eb7a92ab --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_3470073355.go @@ -0,0 +1,63 @@ +// Scalar cross product query with on_call_events data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_ON_CALL_EVENTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_3740015316.go b/examples/v2/metrics/QueryScalarData_3740015316.go new file mode 100644 index 00000000000..1f50ff81ac7 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_3740015316.go @@ -0,0 +1,63 @@ +// Scalar cross product query with events data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_EVENTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_394862343.go b/examples/v2/metrics/QueryScalarData_394862343.go new file mode 100644 index 00000000000..b8824a49032 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_394862343.go @@ -0,0 +1,63 @@ +// Scalar cross product query with security_signals data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_SECURITY_SIGNALS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_397220765.go b/examples/v2/metrics/QueryScalarData_397220765.go new file mode 100644 index 00000000000..10ee2052926 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_397220765.go @@ -0,0 +1,63 @@ +// Scalar cross product query with ci_tests data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_CI_TESTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_420944803.go b/examples/v2/metrics/QueryScalarData_420944803.go new file mode 100644 index 00000000000..2966b4b0754 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_420944803.go @@ -0,0 +1,63 @@ +// Scalar cross product query with profiles data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_PROFILES, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_4230617918.go b/examples/v2/metrics/QueryScalarData_4230617918.go new file mode 100644 index 00000000000..5daa6d54b0b --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_4230617918.go @@ -0,0 +1,61 @@ +// Scalar cross product query with process data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ProcessScalarQuery: &datadogV2.ProcessScalarQuery{ + DataSource: datadogV2.PROCESSDATASOURCE_PROCESS, + Name: "a", + Metric: "process.stat.cpu.total_pct", + Aggregator: datadogV2.METRICSAGGREGATOR_AVG.Ptr(), + TextFilter: datadog.PtrString(""), + TagFilters: []string{}, + Limit: datadog.PtrInt64(10), + Sort: datadogV2.QUERYSORTORDER_DESC.Ptr(), + IsNormalizedCpu: datadog.PtrBool(false), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_4257291081.go b/examples/v2/metrics/QueryScalarData_4257291081.go new file mode 100644 index 00000000000..da8f76b2790 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_4257291081.go @@ -0,0 +1,63 @@ +// Scalar cross product query with network data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_NETWORK, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_779493885.go b/examples/v2/metrics/QueryScalarData_779493885.go new file mode 100644 index 00000000000..07aed7c7f85 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_779493885.go @@ -0,0 +1,61 @@ +// Scalar cross product query with apm_metrics data source and span_kind returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ApmMetricsQuery: &datadogV2.ApmMetricsQuery{ + DataSource: datadogV2.APMMETRICSDATASOURCE_APM_METRICS, + Name: "a", + Stat: datadogV2.APMMETRICSSTAT_HITS, + Service: datadog.PtrString("web-store"), + QueryFilter: datadog.PtrString("env:prod"), + SpanKind: datadogV2.APMMETRICSSPANKIND_SERVER.Ptr(), + GroupBy: []string{ + "resource_name", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_891952130.go b/examples/v2/metrics/QueryScalarData_891952130.go new file mode 100644 index 00000000000..73e9f5ef4a7 --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_891952130.go @@ -0,0 +1,59 @@ +// Scalar cross product query with container data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + ContainerScalarQuery: &datadogV2.ContainerScalarQuery{ + DataSource: datadogV2.CONTAINERDATASOURCE_CONTAINER, + Name: "a", + Metric: "process.stat.container.cpu.system_pct", + Aggregator: datadogV2.METRICSAGGREGATOR_AVG.Ptr(), + TagFilters: []string{}, + Limit: datadog.PtrInt64(10), + Sort: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryScalarData_922754919.go b/examples/v2/metrics/QueryScalarData_922754919.go new file mode 100644 index 00000000000..d95ff3ec46f --- /dev/null +++ b/examples/v2/metrics/QueryScalarData_922754919.go @@ -0,0 +1,63 @@ +// Scalar cross product query with product_analytics data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.ScalarFormulaQueryRequest{ + Data: datadogV2.ScalarFormulaRequest{ + Attributes: datadogV2.ScalarFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Queries: []datadogV2.ScalarQuery{ + datadogV2.ScalarQuery{ + EventsScalarQuery: &datadogV2.EventsScalarQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_PRODUCT_ANALYTICS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.SCALARFORMULAREQUESTTYPE_SCALAR_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryScalarData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryScalarData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryScalarData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_1080761370.go b/examples/v2/metrics/QueryTimeseriesData_1080761370.go new file mode 100644 index 00000000000..d35a518ced0 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_1080761370.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with audit data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_AUDIT, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_108927825.go b/examples/v2/metrics/QueryTimeseriesData_108927825.go new file mode 100644 index 00000000000..38e53eb3a3e --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_108927825.go @@ -0,0 +1,60 @@ +// Timeseries cross product query with slo data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + SloQuery: &datadogV2.SloQuery{ + DataSource: datadogV2.SLODATASOURCE_SLO, + Name: datadog.PtrString("a"), + SloId: "12345678910", + Measure: datadogV2.SLOSMEASURE_SLO_STATUS, + SloQueryType: datadogV2.SLOSQUERYTYPE_METRIC.Ptr(), + GroupMode: datadogV2.SLOSGROUPMODE_OVERALL.Ptr(), + AdditionalQueryFilters: datadog.PtrString("*"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_1116544040.go b/examples/v2/metrics/QueryTimeseriesData_1116544040.go new file mode 100644 index 00000000000..5211892e844 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_1116544040.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with spans data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_SPANS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_123149143.go b/examples/v2/metrics/QueryTimeseriesData_123149143.go new file mode 100644 index 00000000000..b1200a3d175 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_123149143.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with rum data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_RUM, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_1606557647.go b/examples/v2/metrics/QueryTimeseriesData_1606557647.go new file mode 100644 index 00000000000..92e42ce5dac --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_1606557647.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with on_call_events data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_ON_CALL_EVENTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_1639521432.go b/examples/v2/metrics/QueryTimeseriesData_1639521432.go new file mode 100644 index 00000000000..f7725e1b70e --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_1639521432.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with events data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_EVENTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_2159746306.go b/examples/v2/metrics/QueryTimeseriesData_2159746306.go new file mode 100644 index 00000000000..36152bd3e11 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_2159746306.go @@ -0,0 +1,61 @@ +// Timeseries cross product query with process data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ProcessTimeseriesQuery: &datadogV2.ProcessTimeseriesQuery{ + DataSource: datadogV2.PROCESSDATASOURCE_PROCESS, + Name: "a", + Metric: "process.stat.cpu.total_pct", + TextFilter: datadog.PtrString(""), + TagFilters: []string{}, + Limit: datadog.PtrInt64(10), + Sort: datadogV2.QUERYSORTORDER_DESC.Ptr(), + IsNormalizedCpu: datadog.PtrBool(false), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_2186419469.go b/examples/v2/metrics/QueryTimeseriesData_2186419469.go new file mode 100644 index 00000000000..e535f8ee5bf --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_2186419469.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with network data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_NETWORK, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_2649955681.go b/examples/v2/metrics/QueryTimeseriesData_2649955681.go new file mode 100644 index 00000000000..c9c5a0d9db4 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_2649955681.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with ci_tests data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_CI_TESTS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_2673679719.go b/examples/v2/metrics/QueryTimeseriesData_2673679719.go new file mode 100644 index 00000000000..58892248462 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_2673679719.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with profiles data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_PROFILES, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_2884575435.go b/examples/v2/metrics/QueryTimeseriesData_2884575435.go new file mode 100644 index 00000000000..5b640d92688 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_2884575435.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with security_signals data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_SECURITY_SIGNALS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_3174309318.go b/examples/v2/metrics/QueryTimeseriesData_3174309318.go new file mode 100644 index 00000000000..977453fb23c --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_3174309318.go @@ -0,0 +1,59 @@ +// Timeseries cross product query with container data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ContainerTimeseriesQuery: &datadogV2.ContainerTimeseriesQuery{ + DataSource: datadogV2.CONTAINERDATASOURCE_CONTAINER, + Name: "a", + Metric: "process.stat.container.cpu.system_pct", + TagFilters: []string{}, + Limit: datadog.PtrInt64(10), + Sort: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_3442090283.go b/examples/v2/metrics/QueryTimeseriesData_3442090283.go new file mode 100644 index 00000000000..5d001286f77 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_3442090283.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with product_analytics data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_PRODUCT_ANALYTICS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_3535807425.go b/examples/v2/metrics/QueryTimeseriesData_3535807425.go new file mode 100644 index 00000000000..b908b929073 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_3535807425.go @@ -0,0 +1,62 @@ +// Timeseries cross product query with apm_metrics data source and span_kind returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ApmMetricsQuery: &datadogV2.ApmMetricsQuery{ + DataSource: datadogV2.APMMETRICSDATASOURCE_APM_METRICS, + Name: "a", + Stat: datadogV2.APMMETRICSSTAT_HITS, + Service: datadog.PtrString("web-store"), + QueryFilter: datadog.PtrString("env:prod"), + SpanKind: datadogV2.APMMETRICSSPANKIND_SERVER.Ptr(), + GroupBy: []string{ + "resource_name", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_4028506518.go b/examples/v2/metrics/QueryTimeseriesData_4028506518.go new file mode 100644 index 00000000000..0c0fee1efbd --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_4028506518.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with apm_resource_stats data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ApmResourceStatsQuery: &datadogV2.ApmResourceStatsQuery{ + DataSource: datadogV2.APMRESOURCESTATSDATASOURCE_APM_RESOURCE_STATS, + Name: "a", + Env: "staging", + Service: "azure-bill-import", + Stat: datadogV2.APMRESOURCESTATNAME_HITS, + OperationName: datadog.PtrString("cassandra.query"), + GroupBy: []string{ + "resource_name", + }, + PrimaryTagName: datadog.PtrString("datacenter"), + PrimaryTagValue: datadog.PtrString("*"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_4246412951.go b/examples/v2/metrics/QueryTimeseriesData_4246412951.go new file mode 100644 index 00000000000..1c4467e69fc --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_4246412951.go @@ -0,0 +1,61 @@ +// Timeseries cross product query with apm_metrics data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ApmMetricsQuery: &datadogV2.ApmMetricsQuery{ + DataSource: datadogV2.APMMETRICSDATASOURCE_APM_METRICS, + Name: "a", + Stat: datadogV2.APMMETRICSSTAT_HITS, + Service: datadog.PtrString("web-store"), + QueryFilter: datadog.PtrString("env:prod"), + GroupBy: []string{ + "resource_name", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_475733751.go b/examples/v2/metrics/QueryTimeseriesData_475733751.go new file mode 100644 index 00000000000..9ba60b8264a --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_475733751.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with ci_pipelines data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_CI_PIPELINES, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_597826488.go b/examples/v2/metrics/QueryTimeseriesData_597826488.go new file mode 100644 index 00000000000..fd4153ace10 --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_597826488.go @@ -0,0 +1,64 @@ +// Timeseries cross product query with logs data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + EventsTimeseriesQuery: &datadogV2.EventsTimeseriesQuery{ + DataSource: datadogV2.EVENTSDATASOURCE_LOGS, + Name: datadog.PtrString("a"), + Compute: datadogV2.EventsCompute{ + Aggregation: datadogV2.EVENTSAGGREGATION_COUNT, + }, + Search: &datadogV2.EventsSearch{ + Query: datadog.PtrString("*"), + }, + Indexes: []string{ + "*", + }, + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/examples/v2/metrics/QueryTimeseriesData_847716941.go b/examples/v2/metrics/QueryTimeseriesData_847716941.go new file mode 100644 index 00000000000..0a98827ce7d --- /dev/null +++ b/examples/v2/metrics/QueryTimeseriesData_847716941.go @@ -0,0 +1,62 @@ +// Timeseries cross product query with apm_dependency_stats data source returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.TimeseriesFormulaQueryRequest{ + Data: datadogV2.TimeseriesFormulaRequest{ + Attributes: datadogV2.TimeseriesFormulaRequestAttributes{ + Formulas: []datadogV2.QueryFormula{ + { + Formula: "a", + Limit: &datadogV2.FormulaLimit{ + Count: datadog.PtrInt32(10), + Order: datadogV2.QUERYSORTORDER_DESC.Ptr(), + }, + }, + }, + From: 1636625471000, + Interval: datadog.PtrInt64(5000), + Queries: []datadogV2.TimeseriesQuery{ + datadogV2.TimeseriesQuery{ + ApmDependencyStatsQuery: &datadogV2.ApmDependencyStatsQuery{ + DataSource: datadogV2.APMDEPENDENCYSTATSDATASOURCE_APM_DEPENDENCY_STATS, + Name: "a", + Env: "ci", + Service: "cassandra", + Stat: datadogV2.APMDEPENDENCYSTATNAME_AVG_DURATION, + OperationName: "cassandra.query", + ResourceName: "DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?", + PrimaryTagName: datadog.PtrString("datacenter"), + PrimaryTagValue: datadog.PtrString("edge-eu1.prod.dog"), + }}, + }, + To: 1636629071000, + }, + Type: datadogV2.TIMESERIESFORMULAREQUESTTYPE_TIMESERIES_REQUEST, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewMetricsApi(apiClient) + resp, r, err := api.QueryTimeseriesData(ctx, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `MetricsApi.QueryTimeseriesData`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `MetricsApi.QueryTimeseriesData`:\n%s\n", responseContent) +} diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..a290b6c8ad4 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:29.994Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..87d9b126ec3 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml @@ -0,0 +1,25 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658329000,"queries":[{"data_source":"apm_dependency_stats","env":"ci","name":"a","operation_name":"cassandra.query","primary_tag_name":"datacenter","primary_tag_value":"edge-eu1.prod.dog","resource_name":"DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?","service":"cassandra","stat":"avg_duration"}],"to":1775661929000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: "{\"data\":{\"id\":\"0\",\"type\":\"scalar_response\",\"attributes\":{\"\ + columns\":[{\"name\":\"a\",\"values\":[],\"type\":\"number\",\"meta\":{\"unit\"\ + :[{\"family\":\"time\",\"name\":\"microsecond\",\"plural\":\"microseconds\"\ + ,\"scale_factor\":0.000001,\"short_name\":\"\u03BCs\",\"id\":9},null]}}]}}}" + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze new file mode 100644 index 00000000000..833a68b8b52 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-09T18:41:04.011Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml new file mode 100644 index 00000000000..081dc73f813 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775756464000,"queries":[{"data_source":"apm_metrics","group_by":["resource_name"],"name":"a","query_filter":"env:prod","service":"web-store","span_kind":"server","stat":"hits"}],"to":1775760064000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..936f6336897 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:31.055Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..062c3833791 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658331000,"queries":[{"data_source":"apm_metrics","group_by":["resource_name"],"name":"a","query_filter":"env:prod","service":"web-store","stat":"hits"}],"to":1775661931000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..35a011fa28c --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:31.243Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..c949c88db05 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658331000,"queries":[{"data_source":"apm_resource_stats","env":"staging","group_by":["resource_name"],"name":"a","operation_name":"cassandra.query","primary_tag_name":"datacenter","primary_tag_value":"*","service":"azure-bill-import","stat":"hits"}],"to":1775661931000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..699160de632 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:35.324Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..2e013598a76 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_audit_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290795000,"queries":[{"compute":{"aggregation":"count"},"data_source":"audit","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294395000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[983.0],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..a551bac16fc --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:36.679Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..ee682d74b5c --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290796000,"queries":[{"compute":{"aggregation":"count"},"data_source":"ci_pipelines","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294396000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[9.0],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..8007eb2c357 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:37.003Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..bf18181d0ab --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290797000,"queries":[{"compute":{"aggregation":"count"},"data_source":"ci_tests","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294397000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..5a73cccc287 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:31.666Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..284de54d996 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_container_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658331000,"queries":[{"aggregator":"avg","data_source":"container","limit":10,"metric":"process.stat.container.cpu.system_pct","name":"a","sort":"desc","tag_filters":[]}],"to":1775661931000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":[{"family":"percentage","name":"percent","plural":"percent","scale_factor":1.0,"short_name":"%","id":17},null]}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..caf45740951 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:37.321Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..976dba949b3 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_events_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290797000,"queries":[{"compute":{"aggregation":"count"},"data_source":"events","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294397000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[322.0],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..0a9e0e4e5cc --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:37.705Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..51a90e41d83 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_logs_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290797000,"queries":[{"compute":{"aggregation":"count"},"data_source":"logs","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294397000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..ac3e7dd054e --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:38.061Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..eaa1f663cce --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_network_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290798000,"queries":[{"compute":{"aggregation":"count"},"data_source":"network","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294398000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..d8faec80c99 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:38.350Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..de070cabaa2 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290798000,"queries":[{"compute":{"aggregation":"count"},"data_source":"on_call_events","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294398000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[5.0],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..aeb3bb6c4b5 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:31.838Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..85f15cfd291 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_process_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658331000,"queries":[{"aggregator":"avg","data_source":"process","is_normalized_cpu":false,"limit":10,"metric":"process.stat.cpu.total_pct","name":"a","sort":"desc","tag_filters":[],"text_filter":""}],"to":1775661931000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":[{"family":"percentage","name":"percent","plural":"percent","scale_factor":1.0,"short_name":"%","id":17},null]}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..f0e6af37019 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:38.663Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..93f859cc7c6 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290798000,"queries":[{"compute":{"aggregation":"count"},"data_source":"product_analytics","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294398000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..a65a5a624e7 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:39.011Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..e22bb1ef78a --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_profiles_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290799000,"queries":[{"compute":{"aggregation":"count"},"data_source":"profiles","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294399000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..b045cd705c6 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:39.329Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..01cf7c3c7d0 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_rum_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290799000,"queries":[{"compute":{"aggregation":"count"},"data_source":"rum","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294399000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[338.0],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..13a24d37895 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:39.804Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..118c8aeb414 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290799000,"queries":[{"compute":{"aggregation":"count"},"data_source":"security_signals","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294399000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..1271a7e2cd8 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.018Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..eb72dfffbfc --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_slo_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"queries":[{"additional_query_filters":"*","data_source":"slo","group_mode":"overall","measure":"slo_status","name":"a","slo_id":"12345678910","slo_query_type":"metric"}],"to":1775661932000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":[{"family":"percentage","name":"percent","plural":"percent","scale_factor":1.0,"short_name":"%","id":17},null]}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..0c984c3a95d --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:40.111Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..8d0cba817c3 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Scalar_cross_product_query_with_spans_data_source_returns_OK_response.yaml @@ -0,0 +1,22 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290800000,"queries":[{"compute":{"aggregation":"count"},"data_source":"spans","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294400000},"type":"scalar_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/scalar + response: + body: '{"data":{"id":"0","type":"scalar_response","attributes":{"columns":[{"name":"a","values":[],"type":"number","meta":{"unit":null}}]}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..8d85e07f104 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.147Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..540c72b9b18 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_dependency_stats_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"interval":5000,"queries":[{"data_source":"apm_dependency_stats","env":"ci","name":"a","operation_name":"cassandra.query","primary_tag_name":"datacenter","primary_tag_value":"edge-eu1.prod.dog","resource_name":"DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?","service":"cassandra","stat":"avg_duration"}],"to":1775661932000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze new file mode 100644 index 00000000000..c772bf9ce5e --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-09T18:41:05.084Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml new file mode 100644 index 00000000000..a49955bf96f --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_and_span_kind_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775756465000,"interval":5000,"queries":[{"data_source":"apm_metrics","group_by":["resource_name"],"name":"a","query_filter":"env:prod","service":"web-store","span_kind":"server","stat":"hits"}],"to":1775760065000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..316785c3cfc --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.363Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..9411b5e88a7 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_metrics_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"interval":5000,"queries":[{"data_source":"apm_metrics","group_by":["resource_name"],"name":"a","query_filter":"env:prod","service":"web-store","stat":"hits"}],"to":1775661932000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..c4bed5aa5b3 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.570Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..517d5d4ed8f --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_apm_resource_stats_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"interval":5000,"queries":[{"data_source":"apm_resource_stats","env":"staging","group_by":["resource_name"],"name":"a","operation_name":"cassandra.query","primary_tag_name":"datacenter","primary_tag_value":"*","service":"azure-bill-import","stat":"hits"}],"to":1775661932000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..02b2445fe14 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:40.465Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..8ef09d7af99 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_audit_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290800000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"audit","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294400000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[{"group_tags":[],"query_index":0,"unit":null}],"times":[1776290800000,1776290835000,1776290860000,1776290895000,1776290915000,1776290920000,1776290945000,1776290955000,1776290980000,1776291020000,1776291040000,1776291080000,1776291100000,1776291140000,1776291160000,1776291200000,1776291220000,1776291245000,1776291260000,1776291280000,1776291320000,1776291325000,1776291330000,1776291335000,1776291340000,1776291345000,1776291385000,1776291400000,1776291415000,1776291445000,1776291460000,1776291505000,1776291520000,1776291535000,1776291540000,1776291565000,1776291580000,1776291630000,1776291640000,1776291690000,1776291700000,1776291750000,1776291760000,1776291810000,1776291820000,1776291870000,1776291880000,1776291890000,1776291935000,1776291940000,1776291995000,1776292000000,1776292010000,1776292015000,1776292020000,1776292055000,1776292060000,1776292115000,1776292120000,1776292175000,1776292180000,1776292195000,1776292200000,1776292205000,1776292240000,1776292300000,1776292360000,1776292390000,1776292420000,1776292460000,1776292480000,1776292540000,1776292545000,1776292565000,1776292600000,1776292605000,1776292660000,1776292665000,1776292720000,1776292725000,1776292765000,1776292780000,1776292785000,1776292840000,1776292850000,1776292900000,1776292910000,1776292960000,1776292970000,1776293005000,1776293020000,1776293030000,1776293080000,1776293090000,1776293140000,1776293155000,1776293200000,1776293215000,1776293260000,1776293265000,1776293275000,1776293320000,1776293335000,1776293355000,1776293380000,1776293395000,1776293440000,1776293460000,1776293500000,1776293520000,1776293545000,1776293550000,1776293555000,1776293560000,1776293580000,1776293620000,1776293640000,1776293680000,1776293700000,1776293740000,1776293760000,1776293765000,1776293770000,1776293785000,1776293790000,1776293800000,1776293825000,1776293860000,1776293865000,1776293885000,1776293920000,1776293945000,1776293955000,1776293980000,1776294005000,1776294040000,1776294070000,1776294100000,1776294130000,1776294160000,1776294190000,1776294220000,1776294240000,1776294245000,1776294250000,1776294280000,1776294285000,1776294310000,1776294340000,1776294375000],"values":[[2,2,3,2,13,2,3,2,3,2,1,2,3,2,3,2,1,4,2,3,11,104,140,28,9,2,2,1,3,2,3,2,4,1,1,2,1,2,3,2,1,2,3,2,3,2,3,3,2,1,2,3,7,14,10,2,1,2,3,2,3,8,14,9,3,5,3,8,5,9,3,3,2,4,1,2,3,2,3,2,1,1,2,3,2,1,2,3,2,1,1,2,54,2,1,2,3,2,6,2,2,1,2,4,3,2,1,2,3,2,5,15,7,3,2,1,2,3,2,2,14,29,2,19,9,2,2,2,1,2,3,2,55,1,2,3,2,1,2,3,2,3,3,3,2,38,73,2,3,1]]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..af9e9e774fe --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:40.785Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..b6208fb8d48 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_pipelines_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290800000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"ci_pipelines","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294400000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[{"group_tags":[],"query_index":0,"unit":null}],"times":[1776291235000,1776291290000,1776291295000,1776291370000,1776291380000,1776291385000],"values":[[1,1,3,1,1,2]]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..c0ff58192c4 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:41.185Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..50af6e772d7 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_ci_tests_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290801000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"ci_tests","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294401000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..0b3ec018946 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.715Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..73060cf76bc --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_container_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"interval":5000,"queries":[{"data_source":"container","limit":10,"metric":"process.stat.container.cpu.system_pct","name":"a","sort":"desc","tag_filters":[]}],"to":1775661932000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..6f9d18a52f0 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:41.507Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..c4c5492f1eb --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_events_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290801000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"events","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294401000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[{"group_tags":[],"query_index":0,"unit":null}],"times":[1776290845000,1776290905000,1776290910000,1776290970000,1776291035000,1776291100000,1776291160000,1776291165000,1776291225000,1776291290000,1776291355000,1776291415000,1776291420000,1776291480000,1776291545000,1776291605000,1776291610000,1776291670000,1776291735000,1776291800000,1776291870000,1776291940000,1776292005000,1776292065000,1776292070000,1776292130000,1776292195000,1776292255000,1776292260000,1776292285000,1776292320000,1776292345000,1776292385000,1776292450000,1776292460000,1776292465000,1776292510000,1776292525000,1776292575000,1776292640000,1776292700000,1776292705000,1776292765000,1776292830000,1776292895000,1776292965000,1776293035000,1776293100000,1776293165000,1776293230000,1776293300000,1776293365000,1776293370000,1776293430000,1776293495000,1776293560000,1776293630000,1776293710000,1776293760000,1776293765000,1776293780000,1776293845000,1776293910000,1776293970000,1776293975000,1776294040000,1776294110000,1776294175000,1776294235000,1776294240000,1776294285000,1776294305000],"values":[[4,2,2,4,4,4,2,2,4,4,4,2,2,4,4,2,2,4,4,4,4,4,4,2,2,4,4,2,2,1,4,1,4,4,2,1,4,1,4,4,2,2,4,4,4,4,4,4,4,4,4,2,2,4,4,4,4,4,15,75,4,4,4,2,2,4,4,4,2,2,12,2]]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..5adb58b5394 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:41.871Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..831f2d81ee0 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_logs_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290801000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"logs","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294401000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..79bb64c6213 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:42.484Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..650e2ca25bf --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_network_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290802000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"network","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294402000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..e7ce3e23b1f --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:42.769Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..a3d27937720 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_on_call_events_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290802000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"on_call_events","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294402000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[{"group_tags":[],"query_index":0,"unit":null}],"times":[1776294280000,1776294285000],"values":[[2,3]]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..1ef87ffbe14 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:32.912Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..18adc41e455 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_process_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658332000,"interval":5000,"queries":[{"data_source":"process","is_normalized_cpu":false,"limit":10,"metric":"process.stat.cpu.total_pct","name":"a","sort":"desc","tag_filters":[],"text_filter":""}],"to":1775661932000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..681a24f0816 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:43.081Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..a4bdec39e6b --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_product_analytics_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290803000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"product_analytics","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294403000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..5f2adb5d3a4 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:43.385Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..be696afc65d --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_profiles_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290803000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"profiles","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294403000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..8d9c1fd4b39 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:43.715Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..0b41c680aa6 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_rum_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290803000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"rum","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294403000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[{"group_tags":[],"query_index":0,"unit":null}],"times":[1776290870000,1776291170000,1776291175000,1776291470000,1776291770000,1776292070000,1776292370000,1776292670000,1776292970000,1776293270000,1776293570000,1776293870000,1776294170000],"values":[[28,30,2,28,30,27,26,26,27,30,28,26,30]]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..2d8a7caaea3 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:44.027Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..97e7aa222f4 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_security_signals_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290804000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"security_signals","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294404000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..03ed54b79c2 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-08T15:25:33.123Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..fc93d284c13 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_slo_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1775658333000,"interval":5000,"queries":[{"additional_query_filters":"*","data_source":"slo","group_mode":"overall","measure":"slo_status","name":"a","slo_id":"12345678910","slo_query_type":"metric"}],"to":1775661933000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.freeze new file mode 100644 index 00000000000..0b96335e1d0 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-15T23:06:44.323Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.yaml new file mode 100644 index 00000000000..90027fc509e --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Metrics/Scenario_Timeseries_cross_product_query_with_spans_data_source_returns_OK_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"formulas":[{"formula":"a","limit":{"count":10,"order":"desc"}}],"from":1776290804000,"interval":5000,"queries":[{"compute":{"aggregation":"count"},"data_source":"spans","indexes":["*"],"name":"a","search":{"query":"*"}}],"to":1776294404000},"type":"timeseries_request"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/query/timeseries + response: + body: '{"data":{"id":"0","type":"timeseries_response","attributes":{"series":[],"times":[],"values":[]}}} + + ' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/features/v2/metrics.feature b/tests/scenarios/features/v2/metrics.feature index 41e753dfb3f..114814e21e2 100644 --- a/tests/scenarios/features/v2/metrics.feature +++ b/tests/scenarios/features/v2/metrics.feature @@ -344,6 +344,177 @@ Feature: Metrics And the response "data.type" is equal to "scalar_response" And the response "data.attributes.columns[0].name" is equal to "a" + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with apm_dependency_stats data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "apm_dependency_stats", "name": "a", "env": "ci", "service": "cassandra", "stat": "avg_duration", "operation_name": "cassandra.query", "resource_name": "DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?", "primary_tag_name": "datacenter", "primary_tag_value": "edge-eu1.prod.dog"}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with apm_metrics data source and span_kind returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "apm_metrics", "name": "a", "stat": "hits", "service": "web-store", "query_filter": "env:prod", "span_kind": "server", "group_by": ["resource_name"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with apm_metrics data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "apm_metrics", "name": "a", "stat": "hits", "service": "web-store", "query_filter": "env:prod", "group_by": ["resource_name"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with apm_resource_stats data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "apm_resource_stats", "name": "a", "env": "staging", "service": "azure-bill-import", "stat": "hits", "operation_name": "cassandra.query", "group_by": ["resource_name"], "primary_tag_name": "datacenter", "primary_tag_value": "*"}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with audit data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "audit", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with ci_pipelines data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "ci_pipelines", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with ci_tests data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "ci_tests", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with container data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "container", "name": "a", "metric": "process.stat.container.cpu.system_pct", "aggregator": "avg", "tag_filters": [], "limit": 10, "sort": "desc"}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with events data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "events", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with logs data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "logs", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with network data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "network", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with on_call_events data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "on_call_events", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with process data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "process", "name": "a", "metric": "process.stat.cpu.total_pct", "aggregator": "avg", "text_filter": "", "tag_filters": [], "limit": 10, "sort": "desc", "is_normalized_cpu": false}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with product_analytics data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "product_analytics", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with profiles data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "profiles", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with rum data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "rum", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with security_signals data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "security_signals", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with slo data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "slo", "name": "a", "slo_id": "12345678910", "measure": "slo_status", "slo_query_type": "metric", "group_mode": "overall", "additional_query_filters": "*"}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Scalar cross product query with spans data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryScalarData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "queries": [{"data_source": "spans", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "scalar_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "scalar_response" + @generated @skip @team:DataDog/metrics-intake Scenario: Submit metrics returns "Bad Request" response Given new "SubmitMetrics" request @@ -415,6 +586,177 @@ Feature: Metrics Then the response status is 200 OK And the response "data.type" is equal to "timeseries_response" + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with apm_dependency_stats data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "apm_dependency_stats", "name": "a", "env": "ci", "service": "cassandra", "stat": "avg_duration", "operation_name": "cassandra.query", "resource_name": "DELETE FROM monitor_history.monitor_state_change_history WHERE org_id = ? AND monitor_id IN ? AND group = ?", "primary_tag_name": "datacenter", "primary_tag_value": "edge-eu1.prod.dog"}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with apm_metrics data source and span_kind returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "apm_metrics", "name": "a", "stat": "hits", "service": "web-store", "query_filter": "env:prod", "span_kind": "server", "group_by": ["resource_name"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with apm_metrics data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "apm_metrics", "name": "a", "stat": "hits", "service": "web-store", "query_filter": "env:prod", "group_by": ["resource_name"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with apm_resource_stats data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "apm_resource_stats", "name": "a", "env": "staging", "service": "azure-bill-import", "stat": "hits", "operation_name": "cassandra.query", "group_by": ["resource_name"], "primary_tag_name": "datacenter", "primary_tag_value": "*"}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with audit data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "audit", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with ci_pipelines data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "ci_pipelines", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with ci_tests data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "ci_tests", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with container data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "container", "name": "a", "metric": "process.stat.container.cpu.system_pct", "tag_filters": [], "limit": 10, "sort": "desc"}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with events data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "events", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with logs data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "logs", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with network data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "network", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with on_call_events data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "on_call_events", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with process data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "process", "name": "a", "metric": "process.stat.cpu.total_pct", "text_filter": "", "tag_filters": [], "limit": 10, "sort": "desc", "is_normalized_cpu": false}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with product_analytics data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "product_analytics", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with profiles data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "profiles", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with rum data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "rum", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with security_signals data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "security_signals", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with slo data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "slo", "name": "a", "slo_id": "12345678910", "measure": "slo_status", "slo_query_type": "metric", "group_mode": "overall", "additional_query_filters": "*"}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + + @skip-validation @team:Datadog/timeseries-query + Scenario: Timeseries cross product query with spans data source returns "OK" response + Given a valid "appKeyAuth" key in the system + And new "QueryTimeseriesData" request + And body with value {"data": {"attributes": {"formulas": [{"formula": "a", "limit": {"count": 10, "order": "desc"}}], "from": {{ timestamp('now - 1h') }}000, "interval": 5000, "queries": [{"data_source": "spans", "name": "a", "compute": {"aggregation": "count"}, "search": {"query": "*"}, "indexes": ["*"]}], "to": {{ timestamp('now') }}000}, "type": "timeseries_request"}} + When the request is sent + Then the response status is 200 OK + And the response "data.type" is equal to "timeseries_response" + @generated @skip @team:DataDog/metrics-experience Scenario: Update a tag configuration returns "Bad Request" response Given a valid "appKeyAuth" key in the system From 5f31934531ad4447046d24134c6265972c437c74 Mon Sep 17 00:00:00 2001 From: "api-clients-generation-pipeline[bot]" <54105614+api-clients-generation-pipeline[bot]@users.noreply.github.com> Date: Fri, 17 Apr 2026 09:08:08 +0000 Subject: [PATCH 2/2] Add API spec for security monitoring terraform export endpoints (#3954) Co-authored-by: ci.datadog-api-spec --- .generator/schemas/v2/openapi.yaml | 277 +++++++++ api/datadog/configuration.go | 547 +++++++++--------- api/datadogV2/api_security_monitoring.go | 267 +++++++++ api/datadogV2/doc.go | 3 + ...toring_terraform_bulk_export_attributes.go | 101 ++++ ...y_monitoring_terraform_bulk_export_data.go | 142 +++++ ...onitoring_terraform_bulk_export_request.go | 110 ++++ ...monitoring_terraform_convert_attributes.go | 101 ++++ ...urity_monitoring_terraform_convert_data.go | 174 ++++++ ...ty_monitoring_terraform_convert_request.go | 110 ++++ ..._monitoring_terraform_export_attributes.go | 168 ++++++ ...curity_monitoring_terraform_export_data.go | 174 ++++++ ...ty_monitoring_terraform_export_response.go | 111 ++++ ...rity_monitoring_terraform_resource_type.go | 66 +++ ...ortSecurityMonitoringTerraformResources.go | 43 ++ ...vertSecurityMonitoringTerraformResource.go | 44 ++ ...portSecurityMonitoringTerraformResource.go | 33 ++ ...ce_to_Terraform_returns_OK_response.freeze | 1 + ...urce_to_Terraform_returns_OK_response.yaml | 25 + ...ce_to_Terraform_returns_OK_response.freeze | 1 + ...urce_to_Terraform_returns_OK_response.yaml | 60 ++ ...es_to_Terraform_returns_OK_response.freeze | 1 + ...rces_to_Terraform_returns_OK_response.yaml | 58 ++ .../features/v2/security_monitoring.feature | 69 +++ tests/scenarios/features/v2/undo.json | 18 + 25 files changed, 2432 insertions(+), 272 deletions(-) create mode 100644 api/datadogV2/model_security_monitoring_terraform_bulk_export_attributes.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_bulk_export_data.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_bulk_export_request.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_convert_attributes.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_convert_data.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_convert_request.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_export_attributes.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_export_data.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_export_response.go create mode 100644 api/datadogV2/model_security_monitoring_terraform_resource_type.go create mode 100644 examples/v2/security-monitoring/BulkExportSecurityMonitoringTerraformResources.go create mode 100644 examples/v2/security-monitoring/ConvertSecurityMonitoringTerraformResource.go create mode 100644 examples/v2/security-monitoring/ExportSecurityMonitoringTerraformResource.go create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.yaml create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.freeze create mode 100644 tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.yaml diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 67ea97d1d67..3ac7ae5f2b7 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -1388,6 +1388,20 @@ components: required: true schema: type: string + SecurityMonitoringTerraformResourceId: + description: The ID of the security monitoring resource to export. + in: path + name: resource_id + required: true + schema: + type: string + SecurityMonitoringTerraformResourceType: + description: The type of security monitoring resource to export. + in: path + name: resource_type + required: true + schema: + $ref: "#/components/schemas/SecurityMonitoringTerraformResourceType" SensitiveDataScannerGroupID: description: The ID of a group of rules. in: path @@ -64963,6 +64977,134 @@ components: $ref: "#/components/schemas/SecurityMonitoringSuppression" type: array type: object + SecurityMonitoringTerraformBulkExportAttributes: + description: Attributes for the bulk export request. + properties: + resource_ids: + description: The list of resource IDs to export. Maximum 1000 items. + example: + - "" + items: + description: The ID of the resource to export. + type: string + maxItems: 1000 + type: array + required: + - resource_ids + type: object + SecurityMonitoringTerraformBulkExportData: + description: The bulk export request data object. + properties: + attributes: + $ref: "#/components/schemas/SecurityMonitoringTerraformBulkExportAttributes" + type: + description: The JSON:API type. Always `bulk_export_resources`. + example: bulk_export_resources + type: string + required: + - type + - attributes + type: object + SecurityMonitoringTerraformBulkExportRequest: + description: Request body for bulk exporting security monitoring resources to Terraform. + properties: + data: + $ref: "#/components/schemas/SecurityMonitoringTerraformBulkExportData" + required: + - data + type: object + SecurityMonitoringTerraformConvertAttributes: + description: Attributes for the convert request. + properties: + resource_json: + additionalProperties: {} + description: The resource attributes as a JSON object, matching the structure returned by the corresponding Datadog API (for example, the attributes of a suppression rule). + example: + enabled: true + name: Custom suppression + rule_query: type:log_detection source:cloudtrail + suppression_query: env:staging status:low + type: object + required: + - resource_json + type: object + SecurityMonitoringTerraformConvertData: + description: The convert request data object. + properties: + attributes: + $ref: "#/components/schemas/SecurityMonitoringTerraformConvertAttributes" + id: + description: The ID of the resource being converted. + example: abc-123 + type: string + type: + description: The JSON:API type. Always `convert_resource`. + example: convert_resource + type: string + required: + - type + - id + - attributes + type: object + SecurityMonitoringTerraformConvertRequest: + description: Request body for converting a security monitoring resource JSON to Terraform. + properties: + data: + $ref: "#/components/schemas/SecurityMonitoringTerraformConvertData" + required: + - data + type: object + SecurityMonitoringTerraformExportAttributes: + description: Attributes of the Terraform export response. + properties: + output: + description: The Terraform configuration for the resource. + type: string + resource_id: + description: The ID of the exported resource. + example: abc-123 + type: string + type_name: + description: The Terraform resource type name. + example: datadog_security_monitoring_suppression + type: string + required: + - type_name + - resource_id + type: object + SecurityMonitoringTerraformExportData: + description: The Terraform export data object. + properties: + attributes: + $ref: "#/components/schemas/SecurityMonitoringTerraformExportAttributes" + id: + description: The resource identifier composed of the Terraform type name and the resource ID separated by `|`. + example: datadog_security_monitoring_suppression|abc-123 + type: string + type: + description: The JSON:API type. Always `format_resource`. + example: format_resource + type: string + required: + - type + - id + - attributes + type: object + SecurityMonitoringTerraformExportResponse: + description: Response containing the Terraform configuration for a security monitoring resource. + properties: + data: + $ref: "#/components/schemas/SecurityMonitoringTerraformExportData" + type: object + SecurityMonitoringTerraformResourceType: + description: The type of security monitoring resource to export to Terraform. + enum: + - suppressions + - critical_assets + type: string + x-enum-varnames: + - SUPPRESSIONS + - CRITICAL_ASSETS SecurityMonitoringThirdPartyRootQuery: description: A query to be combined with the third party case query. properties: @@ -114337,6 +114479,141 @@ paths: permissions: - security_monitoring_rules_read - security_monitoring_signals_read + /api/v2/security_monitoring/terraform/{resource_type}/bulk: + post: + description: |- + Export multiple security monitoring resources to Terraform, packaged as a zip archive. + The `resource_type` path parameter specifies the type of resources to export + and must be one of `suppressions` or `critical_assets`. + A maximum of 1000 resources can be exported in a single request. + operationId: BulkExportSecurityMonitoringTerraformResources + parameters: + - $ref: "#/components/parameters/SecurityMonitoringTerraformResourceType" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SecurityMonitoringTerraformBulkExportRequest" + description: The resource IDs to export. + required: true + responses: + "200": + content: + application/zip: + schema: + format: binary + type: string + description: OK + "400": + $ref: "#/components/responses/BadRequestResponse" + "403": + $ref: "#/components/responses/NotAuthorizedResponse" + "404": + $ref: "#/components/responses/NotFoundResponse" + "429": + $ref: "#/components/responses/TooManyRequestsResponse" + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: + - security_monitoring_suppressions_read + - AuthZ: + - security_monitoring_rules_read + summary: Export security monitoring resources to Terraform + tags: + - Security Monitoring + x-codegen-request-body-name: body + "x-permission": + operator: OR + permissions: + - security_monitoring_suppressions_read + - security_monitoring_rules_read + x-unstable: "**Note**: This endpoint is in Preview. If you have any feedback, contact [Datadog support](https://docs.datadoghq.com/help/)." + /api/v2/security_monitoring/terraform/{resource_type}/convert: + post: + description: |- + Convert a security monitoring resource that doesn't (yet) exist from JSON to Terraform. + The `resource_type` path parameter specifies the type of resource to convert + and must be one of `suppressions` or `critical_assets`. + operationId: ConvertSecurityMonitoringTerraformResource + parameters: + - $ref: "#/components/parameters/SecurityMonitoringTerraformResourceType" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SecurityMonitoringTerraformConvertRequest" + description: The resource JSON to convert. + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SecurityMonitoringTerraformExportResponse" + description: OK + "400": + $ref: "#/components/responses/BadRequestResponse" + "403": + $ref: "#/components/responses/NotAuthorizedResponse" + "429": + $ref: "#/components/responses/TooManyRequestsResponse" + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: + - security_monitoring_suppressions_read + - AuthZ: + - security_monitoring_rules_read + summary: Convert security monitoring resource to Terraform + tags: + - Security Monitoring + x-codegen-request-body-name: body + "x-permission": + operator: OR + permissions: + - security_monitoring_suppressions_read + - security_monitoring_rules_read + x-unstable: "**Note**: This endpoint is in Preview. If you have any feedback, contact [Datadog support](https://docs.datadoghq.com/help/)." + /api/v2/security_monitoring/terraform/{resource_type}/{resource_id}: + get: + description: |- + Export a security monitoring resource to a Terraform configuration. + The `resource_type` path parameter specifies the type of resource to export + and must be one of `suppressions` or `critical_assets`. + operationId: ExportSecurityMonitoringTerraformResource + parameters: + - $ref: "#/components/parameters/SecurityMonitoringTerraformResourceType" + - $ref: "#/components/parameters/SecurityMonitoringTerraformResourceId" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SecurityMonitoringTerraformExportResponse" + description: OK + "403": + $ref: "#/components/responses/NotAuthorizedResponse" + "404": + $ref: "#/components/responses/NotFoundResponse" + "429": + $ref: "#/components/responses/TooManyRequestsResponse" + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: + - security_monitoring_suppressions_read + - AuthZ: + - security_monitoring_rules_read + summary: Export security monitoring resource to Terraform + tags: + - Security Monitoring + "x-permission": + operator: OR + permissions: + - security_monitoring_suppressions_read + - security_monitoring_rules_read + x-unstable: "**Note**: This endpoint is in Preview. If you have any feedback, contact [Datadog support](https://docs.datadoghq.com/help/)." /api/v2/sensitive-data-scanner/config: get: description: List all the Scanning groups in your organization. diff --git a/api/datadog/configuration.go b/api/datadog/configuration.go index 1ffb7ae427e..96c08b2d027 100644 --- a/api/datadog/configuration.go +++ b/api/datadog/configuration.go @@ -649,278 +649,281 @@ func NewConfiguration() *Configuration { }, }, unstableOperations: map[string]bool{ - "v2.CancelFleetDeployment": false, - "v2.CreateFleetDeploymentConfigure": false, - "v2.CreateFleetDeploymentUpgrade": false, - "v2.CreateFleetSchedule": false, - "v2.DeleteFleetSchedule": false, - "v2.GetFleetAgentInfo": false, - "v2.GetFleetDeployment": false, - "v2.GetFleetSchedule": false, - "v2.ListFleetAgents": false, - "v2.ListFleetAgentTracers": false, - "v2.ListFleetAgentVersions": false, - "v2.ListFleetClusters": false, - "v2.ListFleetDeployments": false, - "v2.ListFleetInstrumentedPods": false, - "v2.ListFleetSchedules": false, - "v2.ListFleetTracers": false, - "v2.TriggerFleetSchedule": false, - "v2.UpdateFleetSchedule": false, - "v2.CreateOpenAPI": false, - "v2.DeleteOpenAPI": false, - "v2.GetOpenAPI": false, - "v2.ListAPIs": false, - "v2.UpdateOpenAPI": false, - "v2.GetInvestigation": false, - "v2.ListInvestigations": false, - "v2.TriggerInvestigation": false, - "v2.CreateCaseJiraIssue": false, - "v2.CreateCaseNotebook": false, - "v2.CreateCaseServiceNowTicket": false, - "v2.LinkIncident": false, - "v2.LinkJiraIssueToCase": false, - "v2.MoveCaseToProject": false, - "v2.UnlinkJiraIssue": false, - "v2.CreateChangeRequest": false, - "v2.CreateChangeRequestBranch": false, - "v2.DeleteChangeRequestDecision": false, - "v2.GetChangeRequest": false, - "v2.UpdateChangeRequest": false, - "v2.UpdateChangeRequestDecision": false, - "v2.DeleteTestOptimizationServiceSettings": false, - "v2.GetFlakyTestsManagementPolicies": false, - "v2.GetTestOptimizationServiceSettings": false, - "v2.SearchFlakyTests": false, - "v2.UpdateFlakyTests": false, - "v2.UpdateFlakyTestsManagementPolicies": false, - "v2.UpdateTestOptimizationServiceSettings": false, - "v2.CreateAWSCloudAuthPersonaMapping": false, - "v2.DeleteAWSCloudAuthPersonaMapping": false, - "v2.GetAWSCloudAuthPersonaMapping": false, - "v2.ListAWSCloudAuthPersonaMappings": false, - "v2.ActivateContentPack": false, - "v2.CancelThreatHuntingJob": false, - "v2.ConvertJobResultToSignal": false, - "v2.DeactivateContentPack": false, - "v2.DeleteThreatHuntingJob": false, - "v2.GetContentPacksStates": false, - "v2.GetFinding": false, - "v2.GetIndicatorOfCompromise": false, - "v2.GetRuleVersionHistory": false, - "v2.GetSecretsRules": false, - "v2.GetSecurityMonitoringHistsignal": false, - "v2.GetSecurityMonitoringHistsignalsByJobId": false, - "v2.GetThreatHuntingJob": false, - "v2.ListFindings": false, - "v2.ListIndicatorsOfCompromise": false, - "v2.ListMultipleRulesets": false, - "v2.ListScannedAssetsMetadata": false, - "v2.ListSecurityMonitoringHistsignals": false, - "v2.ListThreatHuntingJobs": false, - "v2.ListVulnerabilities": false, - "v2.ListVulnerableAssets": false, - "v2.MuteFindings": false, - "v2.RunThreatHuntingJob": false, - "v2.SearchSecurityMonitoringHistsignals": false, - "v2.GetCodeCoverageBranchSummary": false, - "v2.GetCodeCoverageCommitSummary": false, - "v2.CreateDashboardSecureEmbed": false, - "v2.DeleteDashboardSecureEmbed": false, - "v2.GetDashboardSecureEmbed": false, - "v2.UpdateDashboardSecureEmbed": false, - "v2.CreateDataset": false, - "v2.DeleteDataset": false, - "v2.GetAllDatasets": false, - "v2.GetDataset": false, - "v2.UpdateDataset": false, - "v2.CancelDataDeletionRequest": false, - "v2.CreateDataDeletionRequest": false, - "v2.GetDataDeletionRequests": false, - "v2.CreateDeploymentGate": false, - "v2.CreateDeploymentRule": false, - "v2.DeleteDeploymentGate": false, - "v2.DeleteDeploymentRule": false, - "v2.GetDeploymentGate": false, - "v2.GetDeploymentGateRules": false, - "v2.GetDeploymentGatesEvaluationResult": false, - "v2.GetDeploymentRule": false, - "v2.ListDeploymentGates": false, - "v2.TriggerDeploymentGatesEvaluation": false, - "v2.UpdateDeploymentGate": false, - "v2.UpdateDeploymentRule": false, - "v2.CreateHamrOrgConnection": false, - "v2.GetHamrOrgConnection": false, - "v2.CreateGlobalIncidentHandle": false, - "v2.CreateIncident": false, - "v2.CreateIncidentAttachment": false, - "v2.CreateIncidentIntegration": false, - "v2.CreateIncidentNotificationRule": false, - "v2.CreateIncidentNotificationTemplate": false, - "v2.CreateIncidentPostmortemAttachment": false, - "v2.CreateIncidentPostmortemTemplate": false, - "v2.CreateIncidentTodo": false, - "v2.CreateIncidentType": false, - "v2.CreateIncidentUserDefinedField": false, - "v2.DeleteGlobalIncidentHandle": false, - "v2.DeleteIncident": false, - "v2.DeleteIncidentAttachment": false, - "v2.DeleteIncidentIntegration": false, - "v2.DeleteIncidentNotificationRule": false, - "v2.DeleteIncidentNotificationTemplate": false, - "v2.DeleteIncidentPostmortemTemplate": false, - "v2.DeleteIncidentTodo": false, - "v2.DeleteIncidentType": false, - "v2.DeleteIncidentUserDefinedField": false, - "v2.GetGlobalIncidentSettings": false, - "v2.GetIncident": false, - "v2.GetIncidentIntegration": false, - "v2.GetIncidentNotificationRule": false, - "v2.GetIncidentNotificationTemplate": false, - "v2.GetIncidentPostmortemTemplate": false, - "v2.GetIncidentTodo": false, - "v2.GetIncidentType": false, - "v2.GetIncidentUserDefinedField": false, - "v2.ImportIncident": false, - "v2.ListGlobalIncidentHandles": false, - "v2.ListIncidentAttachments": false, - "v2.ListIncidentIntegrations": false, - "v2.ListIncidentNotificationRules": false, - "v2.ListIncidentNotificationTemplates": false, - "v2.ListIncidentPostmortemTemplates": false, - "v2.ListIncidents": false, - "v2.ListIncidentTodos": false, - "v2.ListIncidentTypes": false, - "v2.ListIncidentUserDefinedFields": false, - "v2.SearchIncidents": false, - "v2.UpdateGlobalIncidentHandle": false, - "v2.UpdateGlobalIncidentSettings": false, - "v2.UpdateIncident": false, - "v2.UpdateIncidentAttachment": false, - "v2.UpdateIncidentIntegration": false, - "v2.UpdateIncidentNotificationRule": false, - "v2.UpdateIncidentNotificationTemplate": false, - "v2.UpdateIncidentPostmortemTemplate": false, - "v2.UpdateIncidentTodo": false, - "v2.UpdateIncidentType": false, - "v2.UpdateIncidentUserDefinedField": false, - "v2.CreateAWSAccountCCMConfig": false, - "v2.DeleteAWSAccountCCMConfig": false, - "v2.GetAWSAccountCCMConfig": false, - "v2.UpdateAWSAccountCCMConfig": false, - "v2.CreateJiraIssueTemplate": false, - "v2.DeleteJiraAccount": false, - "v2.DeleteJiraIssueTemplate": false, - "v2.GetJiraIssueTemplate": false, - "v2.ListJiraAccounts": false, - "v2.ListJiraIssueTemplates": false, - "v2.UpdateJiraIssueTemplate": false, - "v2.CreateTenancyConfig": false, - "v2.GetTenancyConfigs": false, - "v2.CreateLLMObsAnnotationQueue": false, - "v2.CreateLLMObsAnnotationQueueInteractions": false, - "v2.CreateLLMObsDataset": false, - "v2.CreateLLMObsDatasetRecords": false, - "v2.CreateLLMObsExperiment": false, - "v2.CreateLLMObsExperimentEvents": false, - "v2.CreateLLMObsProject": false, - "v2.DeleteLLMObsAnnotationQueue": false, - "v2.DeleteLLMObsAnnotationQueueInteractions": false, - "v2.DeleteLLMObsDatasetRecords": false, - "v2.DeleteLLMObsDatasets": false, - "v2.DeleteLLMObsExperiments": false, - "v2.DeleteLLMObsProjects": false, - "v2.GetLLMObsAnnotatedInteractions": false, - "v2.ListLLMObsAnnotationQueues": false, - "v2.ListLLMObsDatasetRecords": false, - "v2.ListLLMObsDatasets": false, - "v2.ListLLMObsExperiments": false, - "v2.ListLLMObsProjects": false, - "v2.UpdateLLMObsAnnotationQueue": false, - "v2.UpdateLLMObsDataset": false, - "v2.UpdateLLMObsDatasetRecords": false, - "v2.UpdateLLMObsExperiment": false, - "v2.UpdateLLMObsProject": false, - "v2.AddRoleToRestrictionQuery": false, - "v2.CreateRestrictionQuery": false, - "v2.DeleteRestrictionQuery": false, - "v2.GetRestrictionQuery": false, - "v2.GetRoleRestrictionQuery": false, - "v2.ListRestrictionQueries": false, - "v2.ListRestrictionQueryRoles": false, - "v2.ListUserRestrictionQueries": false, - "v2.RemoveRoleFromRestrictionQuery": false, - "v2.ReplaceRestrictionQuery": false, - "v2.UpdateRestrictionQuery": false, - "v2.CreateMonitorUserTemplate": false, - "v2.DeleteMonitorUserTemplate": false, - "v2.GetMonitorUserTemplate": false, - "v2.ListMonitorUserTemplates": false, - "v2.UpdateMonitorUserTemplate": false, - "v2.ValidateExistingMonitorUserTemplate": false, - "v2.ValidateMonitorUserTemplate": false, - "v2.BulkUpdateOrgGroupMemberships": false, - "v2.CreateOrgGroup": false, - "v2.CreateOrgGroupPolicy": false, - "v2.CreateOrgGroupPolicyOverride": false, - "v2.DeleteOrgGroup": false, - "v2.DeleteOrgGroupPolicy": false, - "v2.DeleteOrgGroupPolicyOverride": false, - "v2.GetOrgGroup": false, - "v2.GetOrgGroupMembership": false, - "v2.ListOrgGroupMemberships": false, - "v2.ListOrgGroupPolicies": false, - "v2.ListOrgGroupPolicyConfigs": false, - "v2.ListOrgGroupPolicyOverrides": false, - "v2.ListOrgGroups": false, - "v2.UpdateOrgGroup": false, - "v2.UpdateOrgGroupMembership": false, - "v2.UpdateOrgGroupPolicy": false, - "v2.UpdateOrgGroupPolicyOverride": false, - "v2.ListRoleTemplates": false, - "v2.CreateConnection": false, - "v2.DeleteConnection": false, - "v2.GetAccountFacetInfo": false, - "v2.GetMapping": false, - "v2.GetUserFacetInfo": false, - "v2.ListConnections": false, - "v2.QueryAccounts": false, - "v2.QueryEventFilteredUsers": false, - "v2.QueryUsers": false, - "v2.UpdateConnection": false, - "v2.CreateScorecardOutcomesBatch": false, - "v2.ListEntityRiskScores": false, - "v2.CreateIncidentService": false, - "v2.DeleteIncidentService": false, - "v2.GetIncidentService": false, - "v2.ListIncidentServices": false, - "v2.UpdateIncidentService": false, - "v2.CreateSLOReportJob": false, - "v2.GetSLOReport": false, - "v2.GetSLOReportJobStatus": false, - "v2.GetSloStatus": false, - "v2.GetSPARecommendations": false, - "v2.GetSPARecommendationsWithShard": false, - "v2.CreateCustomRule": false, - "v2.CreateCustomRuleRevision": false, - "v2.CreateSCAResolveVulnerableSymbols": false, - "v2.CreateSCAResult": false, - "v2.DeleteCustomRule": false, - "v2.DeleteCustomRuleset": false, - "v2.GetCustomRule": false, - "v2.GetCustomRuleRevision": false, - "v2.GetCustomRuleset": false, - "v2.ListCustomRuleRevisions": false, - "v2.RevertCustomRuleRevision": false, - "v2.UpdateCustomRuleset": false, - "v2.AddMemberTeam": false, - "v2.ListMemberTeams": false, - "v2.RemoveMemberTeam": false, - "v2.CreateIncidentTeam": false, - "v2.DeleteIncidentTeam": false, - "v2.GetIncidentTeam": false, - "v2.ListIncidentTeams": false, - "v2.UpdateIncidentTeam": false, + "v2.CancelFleetDeployment": false, + "v2.CreateFleetDeploymentConfigure": false, + "v2.CreateFleetDeploymentUpgrade": false, + "v2.CreateFleetSchedule": false, + "v2.DeleteFleetSchedule": false, + "v2.GetFleetAgentInfo": false, + "v2.GetFleetDeployment": false, + "v2.GetFleetSchedule": false, + "v2.ListFleetAgents": false, + "v2.ListFleetAgentTracers": false, + "v2.ListFleetAgentVersions": false, + "v2.ListFleetClusters": false, + "v2.ListFleetDeployments": false, + "v2.ListFleetInstrumentedPods": false, + "v2.ListFleetSchedules": false, + "v2.ListFleetTracers": false, + "v2.TriggerFleetSchedule": false, + "v2.UpdateFleetSchedule": false, + "v2.CreateOpenAPI": false, + "v2.DeleteOpenAPI": false, + "v2.GetOpenAPI": false, + "v2.ListAPIs": false, + "v2.UpdateOpenAPI": false, + "v2.GetInvestigation": false, + "v2.ListInvestigations": false, + "v2.TriggerInvestigation": false, + "v2.CreateCaseJiraIssue": false, + "v2.CreateCaseNotebook": false, + "v2.CreateCaseServiceNowTicket": false, + "v2.LinkIncident": false, + "v2.LinkJiraIssueToCase": false, + "v2.MoveCaseToProject": false, + "v2.UnlinkJiraIssue": false, + "v2.CreateChangeRequest": false, + "v2.CreateChangeRequestBranch": false, + "v2.DeleteChangeRequestDecision": false, + "v2.GetChangeRequest": false, + "v2.UpdateChangeRequest": false, + "v2.UpdateChangeRequestDecision": false, + "v2.DeleteTestOptimizationServiceSettings": false, + "v2.GetFlakyTestsManagementPolicies": false, + "v2.GetTestOptimizationServiceSettings": false, + "v2.SearchFlakyTests": false, + "v2.UpdateFlakyTests": false, + "v2.UpdateFlakyTestsManagementPolicies": false, + "v2.UpdateTestOptimizationServiceSettings": false, + "v2.CreateAWSCloudAuthPersonaMapping": false, + "v2.DeleteAWSCloudAuthPersonaMapping": false, + "v2.GetAWSCloudAuthPersonaMapping": false, + "v2.ListAWSCloudAuthPersonaMappings": false, + "v2.ActivateContentPack": false, + "v2.BulkExportSecurityMonitoringTerraformResources": false, + "v2.CancelThreatHuntingJob": false, + "v2.ConvertJobResultToSignal": false, + "v2.ConvertSecurityMonitoringTerraformResource": false, + "v2.DeactivateContentPack": false, + "v2.DeleteThreatHuntingJob": false, + "v2.ExportSecurityMonitoringTerraformResource": false, + "v2.GetContentPacksStates": false, + "v2.GetFinding": false, + "v2.GetIndicatorOfCompromise": false, + "v2.GetRuleVersionHistory": false, + "v2.GetSecretsRules": false, + "v2.GetSecurityMonitoringHistsignal": false, + "v2.GetSecurityMonitoringHistsignalsByJobId": false, + "v2.GetThreatHuntingJob": false, + "v2.ListFindings": false, + "v2.ListIndicatorsOfCompromise": false, + "v2.ListMultipleRulesets": false, + "v2.ListScannedAssetsMetadata": false, + "v2.ListSecurityMonitoringHistsignals": false, + "v2.ListThreatHuntingJobs": false, + "v2.ListVulnerabilities": false, + "v2.ListVulnerableAssets": false, + "v2.MuteFindings": false, + "v2.RunThreatHuntingJob": false, + "v2.SearchSecurityMonitoringHistsignals": false, + "v2.GetCodeCoverageBranchSummary": false, + "v2.GetCodeCoverageCommitSummary": false, + "v2.CreateDashboardSecureEmbed": false, + "v2.DeleteDashboardSecureEmbed": false, + "v2.GetDashboardSecureEmbed": false, + "v2.UpdateDashboardSecureEmbed": false, + "v2.CreateDataset": false, + "v2.DeleteDataset": false, + "v2.GetAllDatasets": false, + "v2.GetDataset": false, + "v2.UpdateDataset": false, + "v2.CancelDataDeletionRequest": false, + "v2.CreateDataDeletionRequest": false, + "v2.GetDataDeletionRequests": false, + "v2.CreateDeploymentGate": false, + "v2.CreateDeploymentRule": false, + "v2.DeleteDeploymentGate": false, + "v2.DeleteDeploymentRule": false, + "v2.GetDeploymentGate": false, + "v2.GetDeploymentGateRules": false, + "v2.GetDeploymentGatesEvaluationResult": false, + "v2.GetDeploymentRule": false, + "v2.ListDeploymentGates": false, + "v2.TriggerDeploymentGatesEvaluation": false, + "v2.UpdateDeploymentGate": false, + "v2.UpdateDeploymentRule": false, + "v2.CreateHamrOrgConnection": false, + "v2.GetHamrOrgConnection": false, + "v2.CreateGlobalIncidentHandle": false, + "v2.CreateIncident": false, + "v2.CreateIncidentAttachment": false, + "v2.CreateIncidentIntegration": false, + "v2.CreateIncidentNotificationRule": false, + "v2.CreateIncidentNotificationTemplate": false, + "v2.CreateIncidentPostmortemAttachment": false, + "v2.CreateIncidentPostmortemTemplate": false, + "v2.CreateIncidentTodo": false, + "v2.CreateIncidentType": false, + "v2.CreateIncidentUserDefinedField": false, + "v2.DeleteGlobalIncidentHandle": false, + "v2.DeleteIncident": false, + "v2.DeleteIncidentAttachment": false, + "v2.DeleteIncidentIntegration": false, + "v2.DeleteIncidentNotificationRule": false, + "v2.DeleteIncidentNotificationTemplate": false, + "v2.DeleteIncidentPostmortemTemplate": false, + "v2.DeleteIncidentTodo": false, + "v2.DeleteIncidentType": false, + "v2.DeleteIncidentUserDefinedField": false, + "v2.GetGlobalIncidentSettings": false, + "v2.GetIncident": false, + "v2.GetIncidentIntegration": false, + "v2.GetIncidentNotificationRule": false, + "v2.GetIncidentNotificationTemplate": false, + "v2.GetIncidentPostmortemTemplate": false, + "v2.GetIncidentTodo": false, + "v2.GetIncidentType": false, + "v2.GetIncidentUserDefinedField": false, + "v2.ImportIncident": false, + "v2.ListGlobalIncidentHandles": false, + "v2.ListIncidentAttachments": false, + "v2.ListIncidentIntegrations": false, + "v2.ListIncidentNotificationRules": false, + "v2.ListIncidentNotificationTemplates": false, + "v2.ListIncidentPostmortemTemplates": false, + "v2.ListIncidents": false, + "v2.ListIncidentTodos": false, + "v2.ListIncidentTypes": false, + "v2.ListIncidentUserDefinedFields": false, + "v2.SearchIncidents": false, + "v2.UpdateGlobalIncidentHandle": false, + "v2.UpdateGlobalIncidentSettings": false, + "v2.UpdateIncident": false, + "v2.UpdateIncidentAttachment": false, + "v2.UpdateIncidentIntegration": false, + "v2.UpdateIncidentNotificationRule": false, + "v2.UpdateIncidentNotificationTemplate": false, + "v2.UpdateIncidentPostmortemTemplate": false, + "v2.UpdateIncidentTodo": false, + "v2.UpdateIncidentType": false, + "v2.UpdateIncidentUserDefinedField": false, + "v2.CreateAWSAccountCCMConfig": false, + "v2.DeleteAWSAccountCCMConfig": false, + "v2.GetAWSAccountCCMConfig": false, + "v2.UpdateAWSAccountCCMConfig": false, + "v2.CreateJiraIssueTemplate": false, + "v2.DeleteJiraAccount": false, + "v2.DeleteJiraIssueTemplate": false, + "v2.GetJiraIssueTemplate": false, + "v2.ListJiraAccounts": false, + "v2.ListJiraIssueTemplates": false, + "v2.UpdateJiraIssueTemplate": false, + "v2.CreateTenancyConfig": false, + "v2.GetTenancyConfigs": false, + "v2.CreateLLMObsAnnotationQueue": false, + "v2.CreateLLMObsAnnotationQueueInteractions": false, + "v2.CreateLLMObsDataset": false, + "v2.CreateLLMObsDatasetRecords": false, + "v2.CreateLLMObsExperiment": false, + "v2.CreateLLMObsExperimentEvents": false, + "v2.CreateLLMObsProject": false, + "v2.DeleteLLMObsAnnotationQueue": false, + "v2.DeleteLLMObsAnnotationQueueInteractions": false, + "v2.DeleteLLMObsDatasetRecords": false, + "v2.DeleteLLMObsDatasets": false, + "v2.DeleteLLMObsExperiments": false, + "v2.DeleteLLMObsProjects": false, + "v2.GetLLMObsAnnotatedInteractions": false, + "v2.ListLLMObsAnnotationQueues": false, + "v2.ListLLMObsDatasetRecords": false, + "v2.ListLLMObsDatasets": false, + "v2.ListLLMObsExperiments": false, + "v2.ListLLMObsProjects": false, + "v2.UpdateLLMObsAnnotationQueue": false, + "v2.UpdateLLMObsDataset": false, + "v2.UpdateLLMObsDatasetRecords": false, + "v2.UpdateLLMObsExperiment": false, + "v2.UpdateLLMObsProject": false, + "v2.AddRoleToRestrictionQuery": false, + "v2.CreateRestrictionQuery": false, + "v2.DeleteRestrictionQuery": false, + "v2.GetRestrictionQuery": false, + "v2.GetRoleRestrictionQuery": false, + "v2.ListRestrictionQueries": false, + "v2.ListRestrictionQueryRoles": false, + "v2.ListUserRestrictionQueries": false, + "v2.RemoveRoleFromRestrictionQuery": false, + "v2.ReplaceRestrictionQuery": false, + "v2.UpdateRestrictionQuery": false, + "v2.CreateMonitorUserTemplate": false, + "v2.DeleteMonitorUserTemplate": false, + "v2.GetMonitorUserTemplate": false, + "v2.ListMonitorUserTemplates": false, + "v2.UpdateMonitorUserTemplate": false, + "v2.ValidateExistingMonitorUserTemplate": false, + "v2.ValidateMonitorUserTemplate": false, + "v2.BulkUpdateOrgGroupMemberships": false, + "v2.CreateOrgGroup": false, + "v2.CreateOrgGroupPolicy": false, + "v2.CreateOrgGroupPolicyOverride": false, + "v2.DeleteOrgGroup": false, + "v2.DeleteOrgGroupPolicy": false, + "v2.DeleteOrgGroupPolicyOverride": false, + "v2.GetOrgGroup": false, + "v2.GetOrgGroupMembership": false, + "v2.ListOrgGroupMemberships": false, + "v2.ListOrgGroupPolicies": false, + "v2.ListOrgGroupPolicyConfigs": false, + "v2.ListOrgGroupPolicyOverrides": false, + "v2.ListOrgGroups": false, + "v2.UpdateOrgGroup": false, + "v2.UpdateOrgGroupMembership": false, + "v2.UpdateOrgGroupPolicy": false, + "v2.UpdateOrgGroupPolicyOverride": false, + "v2.ListRoleTemplates": false, + "v2.CreateConnection": false, + "v2.DeleteConnection": false, + "v2.GetAccountFacetInfo": false, + "v2.GetMapping": false, + "v2.GetUserFacetInfo": false, + "v2.ListConnections": false, + "v2.QueryAccounts": false, + "v2.QueryEventFilteredUsers": false, + "v2.QueryUsers": false, + "v2.UpdateConnection": false, + "v2.CreateScorecardOutcomesBatch": false, + "v2.ListEntityRiskScores": false, + "v2.CreateIncidentService": false, + "v2.DeleteIncidentService": false, + "v2.GetIncidentService": false, + "v2.ListIncidentServices": false, + "v2.UpdateIncidentService": false, + "v2.CreateSLOReportJob": false, + "v2.GetSLOReport": false, + "v2.GetSLOReportJobStatus": false, + "v2.GetSloStatus": false, + "v2.GetSPARecommendations": false, + "v2.GetSPARecommendationsWithShard": false, + "v2.CreateCustomRule": false, + "v2.CreateCustomRuleRevision": false, + "v2.CreateSCAResolveVulnerableSymbols": false, + "v2.CreateSCAResult": false, + "v2.DeleteCustomRule": false, + "v2.DeleteCustomRuleset": false, + "v2.GetCustomRule": false, + "v2.GetCustomRuleRevision": false, + "v2.GetCustomRuleset": false, + "v2.ListCustomRuleRevisions": false, + "v2.RevertCustomRuleRevision": false, + "v2.UpdateCustomRuleset": false, + "v2.AddMemberTeam": false, + "v2.ListMemberTeams": false, + "v2.RemoveMemberTeam": false, + "v2.CreateIncidentTeam": false, + "v2.DeleteIncidentTeam": false, + "v2.GetIncidentTeam": false, + "v2.ListIncidentTeams": false, + "v2.UpdateIncidentTeam": false, }, RetryConfiguration: RetryConfiguration{ EnableRetry: false, diff --git a/api/datadogV2/api_security_monitoring.go b/api/datadogV2/api_security_monitoring.go index 84b3d8590b3..bdb15f2eeb8 100644 --- a/api/datadogV2/api_security_monitoring.go +++ b/api/datadogV2/api_security_monitoring.go @@ -525,6 +525,91 @@ func (a *SecurityMonitoringApi) BulkExportSecurityMonitoringRules(ctx _context.C return localVarReturnValue, localVarHTTPResponse, nil } +// BulkExportSecurityMonitoringTerraformResources Export security monitoring resources to Terraform. +// Export multiple security monitoring resources to Terraform, packaged as a zip archive. +// The `resource_type` path parameter specifies the type of resources to export +// and must be one of `suppressions` or `critical_assets`. +// A maximum of 1000 resources can be exported in a single request. +func (a *SecurityMonitoringApi) BulkExportSecurityMonitoringTerraformResources(ctx _context.Context, resourceType SecurityMonitoringTerraformResourceType, body SecurityMonitoringTerraformBulkExportRequest) (_io.Reader, *_nethttp.Response, error) { + var ( + localVarHTTPMethod = _nethttp.MethodPost + localVarPostBody interface{} + localVarReturnValue _io.Reader + ) + + operationId := "v2.BulkExportSecurityMonitoringTerraformResources" + isOperationEnabled := a.Client.Cfg.IsUnstableOperationEnabled(operationId) + if !isOperationEnabled { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: _fmt.Sprintf("Unstable operation '%s' is disabled", operationId)} + } + if isOperationEnabled && a.Client.Cfg.Debug { + _log.Printf("WARNING: Using unstable operation '%s'", operationId) + } + + localBasePath, err := a.Client.Cfg.ServerURLWithContext(ctx, "v2.SecurityMonitoringApi.BulkExportSecurityMonitoringTerraformResources") + if err != nil { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: err.Error()} + } + + localVarPath := localBasePath + "/api/v2/security_monitoring/terraform/{resource_type}/bulk" + localVarPath = datadog.ReplacePathParameter(localVarPath, "{resource_type}", _neturl.PathEscape(datadog.ParameterToString(resourceType, ""))) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := _neturl.Values{} + localVarFormParams := _neturl.Values{} + localVarHeaderParams["Content-Type"] = "application/json" + localVarHeaderParams["Accept"] = "application/json" + + // body params + localVarPostBody = &body + if a.Client.Cfg.DelegatedTokenConfig != nil { + err = datadog.UseDelegatedTokenAuth(ctx, &localVarHeaderParams, a.Client.Cfg.DelegatedTokenConfig) + if err != nil { + return localVarReturnValue, nil, err + } + } else { + datadog.SetAuthKeys( + ctx, + &localVarHeaderParams, + [2]string{"apiKeyAuth", "DD-API-KEY"}, + [2]string{"appKeyAuth", "DD-APPLICATION-KEY"}, + ) + } + req, err := a.Client.PrepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, nil) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.Client.CallAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + + localVarBody, err := datadog.ReadBody(localVarHTTPResponse) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + newErr := datadog.GenericOpenAPIError{ + ErrorBody: localVarBody, + ErrorMessage: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 || localVarHTTPResponse.StatusCode == 403 || localVarHTTPResponse.StatusCode == 404 || localVarHTTPResponse.StatusCode == 429 { + var v APIErrorResponse + err = a.Client.Decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.ErrorModel = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + localVarReturnValue = localVarHTTPResponse.Body + + return localVarReturnValue, localVarHTTPResponse, nil +} + // CancelThreatHuntingJob Cancel a threat hunting job. // Cancel a threat hunting job. func (a *SecurityMonitoringApi) CancelThreatHuntingJob(ctx _context.Context, jobId string) (*_nethttp.Response, error) { @@ -851,6 +936,98 @@ func (a *SecurityMonitoringApi) ConvertSecurityMonitoringRuleFromJSONToTerraform return localVarReturnValue, localVarHTTPResponse, nil } +// ConvertSecurityMonitoringTerraformResource Convert security monitoring resource to Terraform. +// Convert a security monitoring resource that doesn't (yet) exist from JSON to Terraform. +// The `resource_type` path parameter specifies the type of resource to convert +// and must be one of `suppressions` or `critical_assets`. +func (a *SecurityMonitoringApi) ConvertSecurityMonitoringTerraformResource(ctx _context.Context, resourceType SecurityMonitoringTerraformResourceType, body SecurityMonitoringTerraformConvertRequest) (SecurityMonitoringTerraformExportResponse, *_nethttp.Response, error) { + var ( + localVarHTTPMethod = _nethttp.MethodPost + localVarPostBody interface{} + localVarReturnValue SecurityMonitoringTerraformExportResponse + ) + + operationId := "v2.ConvertSecurityMonitoringTerraformResource" + isOperationEnabled := a.Client.Cfg.IsUnstableOperationEnabled(operationId) + if !isOperationEnabled { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: _fmt.Sprintf("Unstable operation '%s' is disabled", operationId)} + } + if isOperationEnabled && a.Client.Cfg.Debug { + _log.Printf("WARNING: Using unstable operation '%s'", operationId) + } + + localBasePath, err := a.Client.Cfg.ServerURLWithContext(ctx, "v2.SecurityMonitoringApi.ConvertSecurityMonitoringTerraformResource") + if err != nil { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: err.Error()} + } + + localVarPath := localBasePath + "/api/v2/security_monitoring/terraform/{resource_type}/convert" + localVarPath = datadog.ReplacePathParameter(localVarPath, "{resource_type}", _neturl.PathEscape(datadog.ParameterToString(resourceType, ""))) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := _neturl.Values{} + localVarFormParams := _neturl.Values{} + localVarHeaderParams["Content-Type"] = "application/json" + localVarHeaderParams["Accept"] = "application/json" + + // body params + localVarPostBody = &body + if a.Client.Cfg.DelegatedTokenConfig != nil { + err = datadog.UseDelegatedTokenAuth(ctx, &localVarHeaderParams, a.Client.Cfg.DelegatedTokenConfig) + if err != nil { + return localVarReturnValue, nil, err + } + } else { + datadog.SetAuthKeys( + ctx, + &localVarHeaderParams, + [2]string{"apiKeyAuth", "DD-API-KEY"}, + [2]string{"appKeyAuth", "DD-APPLICATION-KEY"}, + ) + } + req, err := a.Client.PrepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, nil) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.Client.CallAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := datadog.ReadBody(localVarHTTPResponse) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := datadog.GenericOpenAPIError{ + ErrorBody: localVarBody, + ErrorMessage: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 || localVarHTTPResponse.StatusCode == 403 || localVarHTTPResponse.StatusCode == 429 { + var v APIErrorResponse + err = a.Client.Decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.ErrorModel = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.Client.Decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := datadog.GenericOpenAPIError{ + ErrorBody: localVarBody, + ErrorMessage: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + // CreateCases Create cases for security findings. // Create cases for security findings. // You can create up to 50 cases per request and associate up to 50 security findings per case. Security findings that are already attached to another case will be detached from their previous case and attached to the newly created case. @@ -2541,6 +2718,96 @@ func (a *SecurityMonitoringApi) EditSecurityMonitoringSignalState(ctx _context.C return localVarReturnValue, localVarHTTPResponse, nil } +// ExportSecurityMonitoringTerraformResource Export security monitoring resource to Terraform. +// Export a security monitoring resource to a Terraform configuration. +// The `resource_type` path parameter specifies the type of resource to export +// and must be one of `suppressions` or `critical_assets`. +func (a *SecurityMonitoringApi) ExportSecurityMonitoringTerraformResource(ctx _context.Context, resourceType SecurityMonitoringTerraformResourceType, resourceId string) (SecurityMonitoringTerraformExportResponse, *_nethttp.Response, error) { + var ( + localVarHTTPMethod = _nethttp.MethodGet + localVarPostBody interface{} + localVarReturnValue SecurityMonitoringTerraformExportResponse + ) + + operationId := "v2.ExportSecurityMonitoringTerraformResource" + isOperationEnabled := a.Client.Cfg.IsUnstableOperationEnabled(operationId) + if !isOperationEnabled { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: _fmt.Sprintf("Unstable operation '%s' is disabled", operationId)} + } + if isOperationEnabled && a.Client.Cfg.Debug { + _log.Printf("WARNING: Using unstable operation '%s'", operationId) + } + + localBasePath, err := a.Client.Cfg.ServerURLWithContext(ctx, "v2.SecurityMonitoringApi.ExportSecurityMonitoringTerraformResource") + if err != nil { + return localVarReturnValue, nil, datadog.GenericOpenAPIError{ErrorMessage: err.Error()} + } + + localVarPath := localBasePath + "/api/v2/security_monitoring/terraform/{resource_type}/{resource_id}" + localVarPath = datadog.ReplacePathParameter(localVarPath, "{resource_type}", _neturl.PathEscape(datadog.ParameterToString(resourceType, ""))) + localVarPath = datadog.ReplacePathParameter(localVarPath, "{resource_id}", _neturl.PathEscape(datadog.ParameterToString(resourceId, ""))) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := _neturl.Values{} + localVarFormParams := _neturl.Values{} + localVarHeaderParams["Accept"] = "application/json" + + if a.Client.Cfg.DelegatedTokenConfig != nil { + err = datadog.UseDelegatedTokenAuth(ctx, &localVarHeaderParams, a.Client.Cfg.DelegatedTokenConfig) + if err != nil { + return localVarReturnValue, nil, err + } + } else { + datadog.SetAuthKeys( + ctx, + &localVarHeaderParams, + [2]string{"apiKeyAuth", "DD-API-KEY"}, + [2]string{"appKeyAuth", "DD-APPLICATION-KEY"}, + ) + } + req, err := a.Client.PrepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, nil) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.Client.CallAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := datadog.ReadBody(localVarHTTPResponse) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := datadog.GenericOpenAPIError{ + ErrorBody: localVarBody, + ErrorMessage: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 403 || localVarHTTPResponse.StatusCode == 404 || localVarHTTPResponse.StatusCode == 429 { + var v APIErrorResponse + err = a.Client.Decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.ErrorModel = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.Client.Decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := datadog.GenericOpenAPIError{ + ErrorBody: localVarBody, + ErrorMessage: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + // GetContentPacksStates Get content pack states. // Get the activation and configuration states for all security monitoring content packs. // This endpoint returns status information about each content pack including activation state, diff --git a/api/datadogV2/doc.go b/api/datadogV2/doc.go index 304e7f1c087..58cd917c2b0 100644 --- a/api/datadogV2/doc.go +++ b/api/datadogV2/doc.go @@ -734,10 +734,12 @@ // - [SecurityMonitoringApi.BulkEditSecurityMonitoringSignalsAssignee] // - [SecurityMonitoringApi.BulkEditSecurityMonitoringSignalsState] // - [SecurityMonitoringApi.BulkExportSecurityMonitoringRules] +// - [SecurityMonitoringApi.BulkExportSecurityMonitoringTerraformResources] // - [SecurityMonitoringApi.CancelThreatHuntingJob] // - [SecurityMonitoringApi.ConvertExistingSecurityMonitoringRule] // - [SecurityMonitoringApi.ConvertJobResultToSignal] // - [SecurityMonitoringApi.ConvertSecurityMonitoringRuleFromJSONToTerraform] +// - [SecurityMonitoringApi.ConvertSecurityMonitoringTerraformResource] // - [SecurityMonitoringApi.CreateCases] // - [SecurityMonitoringApi.CreateCustomFramework] // - [SecurityMonitoringApi.CreateJiraIssues] @@ -760,6 +762,7 @@ // - [SecurityMonitoringApi.EditSecurityMonitoringSignalAssignee] // - [SecurityMonitoringApi.EditSecurityMonitoringSignalIncidents] // - [SecurityMonitoringApi.EditSecurityMonitoringSignalState] +// - [SecurityMonitoringApi.ExportSecurityMonitoringTerraformResource] // - [SecurityMonitoringApi.GetContentPacksStates] // - [SecurityMonitoringApi.GetCriticalAssetsAffectingRule] // - [SecurityMonitoringApi.GetCustomFramework] diff --git a/api/datadogV2/model_security_monitoring_terraform_bulk_export_attributes.go b/api/datadogV2/model_security_monitoring_terraform_bulk_export_attributes.go new file mode 100644 index 00000000000..6f806ce42db --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_bulk_export_attributes.go @@ -0,0 +1,101 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformBulkExportAttributes Attributes for the bulk export request. +type SecurityMonitoringTerraformBulkExportAttributes struct { + // The list of resource IDs to export. Maximum 1000 items. + ResourceIds []string `json:"resource_ids"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformBulkExportAttributes instantiates a new SecurityMonitoringTerraformBulkExportAttributes object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformBulkExportAttributes(resourceIds []string) *SecurityMonitoringTerraformBulkExportAttributes { + this := SecurityMonitoringTerraformBulkExportAttributes{} + this.ResourceIds = resourceIds + return &this +} + +// NewSecurityMonitoringTerraformBulkExportAttributesWithDefaults instantiates a new SecurityMonitoringTerraformBulkExportAttributes object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformBulkExportAttributesWithDefaults() *SecurityMonitoringTerraformBulkExportAttributes { + this := SecurityMonitoringTerraformBulkExportAttributes{} + return &this +} + +// GetResourceIds returns the ResourceIds field value. +func (o *SecurityMonitoringTerraformBulkExportAttributes) GetResourceIds() []string { + if o == nil { + var ret []string + return ret + } + return o.ResourceIds +} + +// GetResourceIdsOk returns a tuple with the ResourceIds field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformBulkExportAttributes) GetResourceIdsOk() (*[]string, bool) { + if o == nil { + return nil, false + } + return &o.ResourceIds, true +} + +// SetResourceIds sets field value. +func (o *SecurityMonitoringTerraformBulkExportAttributes) SetResourceIds(v []string) { + o.ResourceIds = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformBulkExportAttributes) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["resource_ids"] = o.ResourceIds + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformBulkExportAttributes) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + ResourceIds *[]string `json:"resource_ids"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.ResourceIds == nil { + return fmt.Errorf("required field resource_ids missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"resource_ids"}) + } else { + return err + } + o.ResourceIds = *all.ResourceIds + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_bulk_export_data.go b/api/datadogV2/model_security_monitoring_terraform_bulk_export_data.go new file mode 100644 index 00000000000..12688cd31e6 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_bulk_export_data.go @@ -0,0 +1,142 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformBulkExportData The bulk export request data object. +type SecurityMonitoringTerraformBulkExportData struct { + // Attributes for the bulk export request. + Attributes SecurityMonitoringTerraformBulkExportAttributes `json:"attributes"` + // The JSON:API type. Always `bulk_export_resources`. + Type string `json:"type"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformBulkExportData instantiates a new SecurityMonitoringTerraformBulkExportData object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformBulkExportData(attributes SecurityMonitoringTerraformBulkExportAttributes, typeVar string) *SecurityMonitoringTerraformBulkExportData { + this := SecurityMonitoringTerraformBulkExportData{} + this.Attributes = attributes + this.Type = typeVar + return &this +} + +// NewSecurityMonitoringTerraformBulkExportDataWithDefaults instantiates a new SecurityMonitoringTerraformBulkExportData object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformBulkExportDataWithDefaults() *SecurityMonitoringTerraformBulkExportData { + this := SecurityMonitoringTerraformBulkExportData{} + return &this +} + +// GetAttributes returns the Attributes field value. +func (o *SecurityMonitoringTerraformBulkExportData) GetAttributes() SecurityMonitoringTerraformBulkExportAttributes { + if o == nil { + var ret SecurityMonitoringTerraformBulkExportAttributes + return ret + } + return o.Attributes +} + +// GetAttributesOk returns a tuple with the Attributes field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformBulkExportData) GetAttributesOk() (*SecurityMonitoringTerraformBulkExportAttributes, bool) { + if o == nil { + return nil, false + } + return &o.Attributes, true +} + +// SetAttributes sets field value. +func (o *SecurityMonitoringTerraformBulkExportData) SetAttributes(v SecurityMonitoringTerraformBulkExportAttributes) { + o.Attributes = v +} + +// GetType returns the Type field value. +func (o *SecurityMonitoringTerraformBulkExportData) GetType() string { + if o == nil { + var ret string + return ret + } + return o.Type +} + +// GetTypeOk returns a tuple with the Type field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformBulkExportData) GetTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Type, true +} + +// SetType sets field value. +func (o *SecurityMonitoringTerraformBulkExportData) SetType(v string) { + o.Type = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformBulkExportData) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["attributes"] = o.Attributes + toSerialize["type"] = o.Type + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformBulkExportData) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Attributes *SecurityMonitoringTerraformBulkExportAttributes `json:"attributes"` + Type *string `json:"type"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.Attributes == nil { + return fmt.Errorf("required field attributes missing") + } + if all.Type == nil { + return fmt.Errorf("required field type missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"attributes", "type"}) + } else { + return err + } + + hasInvalidField := false + if all.Attributes.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Attributes = *all.Attributes + o.Type = *all.Type + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_bulk_export_request.go b/api/datadogV2/model_security_monitoring_terraform_bulk_export_request.go new file mode 100644 index 00000000000..6ec64915298 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_bulk_export_request.go @@ -0,0 +1,110 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformBulkExportRequest Request body for bulk exporting security monitoring resources to Terraform. +type SecurityMonitoringTerraformBulkExportRequest struct { + // The bulk export request data object. + Data SecurityMonitoringTerraformBulkExportData `json:"data"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformBulkExportRequest instantiates a new SecurityMonitoringTerraformBulkExportRequest object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformBulkExportRequest(data SecurityMonitoringTerraformBulkExportData) *SecurityMonitoringTerraformBulkExportRequest { + this := SecurityMonitoringTerraformBulkExportRequest{} + this.Data = data + return &this +} + +// NewSecurityMonitoringTerraformBulkExportRequestWithDefaults instantiates a new SecurityMonitoringTerraformBulkExportRequest object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformBulkExportRequestWithDefaults() *SecurityMonitoringTerraformBulkExportRequest { + this := SecurityMonitoringTerraformBulkExportRequest{} + return &this +} + +// GetData returns the Data field value. +func (o *SecurityMonitoringTerraformBulkExportRequest) GetData() SecurityMonitoringTerraformBulkExportData { + if o == nil { + var ret SecurityMonitoringTerraformBulkExportData + return ret + } + return o.Data +} + +// GetDataOk returns a tuple with the Data field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformBulkExportRequest) GetDataOk() (*SecurityMonitoringTerraformBulkExportData, bool) { + if o == nil { + return nil, false + } + return &o.Data, true +} + +// SetData sets field value. +func (o *SecurityMonitoringTerraformBulkExportRequest) SetData(v SecurityMonitoringTerraformBulkExportData) { + o.Data = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformBulkExportRequest) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data"] = o.Data + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformBulkExportRequest) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Data *SecurityMonitoringTerraformBulkExportData `json:"data"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.Data == nil { + return fmt.Errorf("required field data missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data"}) + } else { + return err + } + + hasInvalidField := false + if all.Data.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Data = *all.Data + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_convert_attributes.go b/api/datadogV2/model_security_monitoring_terraform_convert_attributes.go new file mode 100644 index 00000000000..733fb24f670 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_convert_attributes.go @@ -0,0 +1,101 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformConvertAttributes Attributes for the convert request. +type SecurityMonitoringTerraformConvertAttributes struct { + // The resource attributes as a JSON object, matching the structure returned by the corresponding Datadog API (for example, the attributes of a suppression rule). + ResourceJson map[string]interface{} `json:"resource_json"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformConvertAttributes instantiates a new SecurityMonitoringTerraformConvertAttributes object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformConvertAttributes(resourceJson map[string]interface{}) *SecurityMonitoringTerraformConvertAttributes { + this := SecurityMonitoringTerraformConvertAttributes{} + this.ResourceJson = resourceJson + return &this +} + +// NewSecurityMonitoringTerraformConvertAttributesWithDefaults instantiates a new SecurityMonitoringTerraformConvertAttributes object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformConvertAttributesWithDefaults() *SecurityMonitoringTerraformConvertAttributes { + this := SecurityMonitoringTerraformConvertAttributes{} + return &this +} + +// GetResourceJson returns the ResourceJson field value. +func (o *SecurityMonitoringTerraformConvertAttributes) GetResourceJson() map[string]interface{} { + if o == nil { + var ret map[string]interface{} + return ret + } + return o.ResourceJson +} + +// GetResourceJsonOk returns a tuple with the ResourceJson field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformConvertAttributes) GetResourceJsonOk() (*map[string]interface{}, bool) { + if o == nil { + return nil, false + } + return &o.ResourceJson, true +} + +// SetResourceJson sets field value. +func (o *SecurityMonitoringTerraformConvertAttributes) SetResourceJson(v map[string]interface{}) { + o.ResourceJson = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformConvertAttributes) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["resource_json"] = o.ResourceJson + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformConvertAttributes) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + ResourceJson *map[string]interface{} `json:"resource_json"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.ResourceJson == nil { + return fmt.Errorf("required field resource_json missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"resource_json"}) + } else { + return err + } + o.ResourceJson = *all.ResourceJson + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_convert_data.go b/api/datadogV2/model_security_monitoring_terraform_convert_data.go new file mode 100644 index 00000000000..88220077ed3 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_convert_data.go @@ -0,0 +1,174 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformConvertData The convert request data object. +type SecurityMonitoringTerraformConvertData struct { + // Attributes for the convert request. + Attributes SecurityMonitoringTerraformConvertAttributes `json:"attributes"` + // The ID of the resource being converted. + Id string `json:"id"` + // The JSON:API type. Always `convert_resource`. + Type string `json:"type"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformConvertData instantiates a new SecurityMonitoringTerraformConvertData object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformConvertData(attributes SecurityMonitoringTerraformConvertAttributes, id string, typeVar string) *SecurityMonitoringTerraformConvertData { + this := SecurityMonitoringTerraformConvertData{} + this.Attributes = attributes + this.Id = id + this.Type = typeVar + return &this +} + +// NewSecurityMonitoringTerraformConvertDataWithDefaults instantiates a new SecurityMonitoringTerraformConvertData object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformConvertDataWithDefaults() *SecurityMonitoringTerraformConvertData { + this := SecurityMonitoringTerraformConvertData{} + return &this +} + +// GetAttributes returns the Attributes field value. +func (o *SecurityMonitoringTerraformConvertData) GetAttributes() SecurityMonitoringTerraformConvertAttributes { + if o == nil { + var ret SecurityMonitoringTerraformConvertAttributes + return ret + } + return o.Attributes +} + +// GetAttributesOk returns a tuple with the Attributes field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformConvertData) GetAttributesOk() (*SecurityMonitoringTerraformConvertAttributes, bool) { + if o == nil { + return nil, false + } + return &o.Attributes, true +} + +// SetAttributes sets field value. +func (o *SecurityMonitoringTerraformConvertData) SetAttributes(v SecurityMonitoringTerraformConvertAttributes) { + o.Attributes = v +} + +// GetId returns the Id field value. +func (o *SecurityMonitoringTerraformConvertData) GetId() string { + if o == nil { + var ret string + return ret + } + return o.Id +} + +// GetIdOk returns a tuple with the Id field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformConvertData) GetIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Id, true +} + +// SetId sets field value. +func (o *SecurityMonitoringTerraformConvertData) SetId(v string) { + o.Id = v +} + +// GetType returns the Type field value. +func (o *SecurityMonitoringTerraformConvertData) GetType() string { + if o == nil { + var ret string + return ret + } + return o.Type +} + +// GetTypeOk returns a tuple with the Type field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformConvertData) GetTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Type, true +} + +// SetType sets field value. +func (o *SecurityMonitoringTerraformConvertData) SetType(v string) { + o.Type = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformConvertData) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["attributes"] = o.Attributes + toSerialize["id"] = o.Id + toSerialize["type"] = o.Type + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformConvertData) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Attributes *SecurityMonitoringTerraformConvertAttributes `json:"attributes"` + Id *string `json:"id"` + Type *string `json:"type"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.Attributes == nil { + return fmt.Errorf("required field attributes missing") + } + if all.Id == nil { + return fmt.Errorf("required field id missing") + } + if all.Type == nil { + return fmt.Errorf("required field type missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"attributes", "id", "type"}) + } else { + return err + } + + hasInvalidField := false + if all.Attributes.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Attributes = *all.Attributes + o.Id = *all.Id + o.Type = *all.Type + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_convert_request.go b/api/datadogV2/model_security_monitoring_terraform_convert_request.go new file mode 100644 index 00000000000..5fb8b0c1c8a --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_convert_request.go @@ -0,0 +1,110 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformConvertRequest Request body for converting a security monitoring resource JSON to Terraform. +type SecurityMonitoringTerraformConvertRequest struct { + // The convert request data object. + Data SecurityMonitoringTerraformConvertData `json:"data"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformConvertRequest instantiates a new SecurityMonitoringTerraformConvertRequest object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformConvertRequest(data SecurityMonitoringTerraformConvertData) *SecurityMonitoringTerraformConvertRequest { + this := SecurityMonitoringTerraformConvertRequest{} + this.Data = data + return &this +} + +// NewSecurityMonitoringTerraformConvertRequestWithDefaults instantiates a new SecurityMonitoringTerraformConvertRequest object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformConvertRequestWithDefaults() *SecurityMonitoringTerraformConvertRequest { + this := SecurityMonitoringTerraformConvertRequest{} + return &this +} + +// GetData returns the Data field value. +func (o *SecurityMonitoringTerraformConvertRequest) GetData() SecurityMonitoringTerraformConvertData { + if o == nil { + var ret SecurityMonitoringTerraformConvertData + return ret + } + return o.Data +} + +// GetDataOk returns a tuple with the Data field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformConvertRequest) GetDataOk() (*SecurityMonitoringTerraformConvertData, bool) { + if o == nil { + return nil, false + } + return &o.Data, true +} + +// SetData sets field value. +func (o *SecurityMonitoringTerraformConvertRequest) SetData(v SecurityMonitoringTerraformConvertData) { + o.Data = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformConvertRequest) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["data"] = o.Data + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformConvertRequest) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Data *SecurityMonitoringTerraformConvertData `json:"data"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.Data == nil { + return fmt.Errorf("required field data missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data"}) + } else { + return err + } + + hasInvalidField := false + if all.Data.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Data = *all.Data + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_export_attributes.go b/api/datadogV2/model_security_monitoring_terraform_export_attributes.go new file mode 100644 index 00000000000..9be6adea59a --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_export_attributes.go @@ -0,0 +1,168 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformExportAttributes Attributes of the Terraform export response. +type SecurityMonitoringTerraformExportAttributes struct { + // The Terraform configuration for the resource. + Output *string `json:"output,omitempty"` + // The ID of the exported resource. + ResourceId string `json:"resource_id"` + // The Terraform resource type name. + TypeName string `json:"type_name"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformExportAttributes instantiates a new SecurityMonitoringTerraformExportAttributes object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformExportAttributes(resourceId string, typeName string) *SecurityMonitoringTerraformExportAttributes { + this := SecurityMonitoringTerraformExportAttributes{} + this.ResourceId = resourceId + this.TypeName = typeName + return &this +} + +// NewSecurityMonitoringTerraformExportAttributesWithDefaults instantiates a new SecurityMonitoringTerraformExportAttributes object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformExportAttributesWithDefaults() *SecurityMonitoringTerraformExportAttributes { + this := SecurityMonitoringTerraformExportAttributes{} + return &this +} + +// GetOutput returns the Output field value if set, zero value otherwise. +func (o *SecurityMonitoringTerraformExportAttributes) GetOutput() string { + if o == nil || o.Output == nil { + var ret string + return ret + } + return *o.Output +} + +// GetOutputOk returns a tuple with the Output field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportAttributes) GetOutputOk() (*string, bool) { + if o == nil || o.Output == nil { + return nil, false + } + return o.Output, true +} + +// HasOutput returns a boolean if a field has been set. +func (o *SecurityMonitoringTerraformExportAttributes) HasOutput() bool { + return o != nil && o.Output != nil +} + +// SetOutput gets a reference to the given string and assigns it to the Output field. +func (o *SecurityMonitoringTerraformExportAttributes) SetOutput(v string) { + o.Output = &v +} + +// GetResourceId returns the ResourceId field value. +func (o *SecurityMonitoringTerraformExportAttributes) GetResourceId() string { + if o == nil { + var ret string + return ret + } + return o.ResourceId +} + +// GetResourceIdOk returns a tuple with the ResourceId field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportAttributes) GetResourceIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.ResourceId, true +} + +// SetResourceId sets field value. +func (o *SecurityMonitoringTerraformExportAttributes) SetResourceId(v string) { + o.ResourceId = v +} + +// GetTypeName returns the TypeName field value. +func (o *SecurityMonitoringTerraformExportAttributes) GetTypeName() string { + if o == nil { + var ret string + return ret + } + return o.TypeName +} + +// GetTypeNameOk returns a tuple with the TypeName field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportAttributes) GetTypeNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.TypeName, true +} + +// SetTypeName sets field value. +func (o *SecurityMonitoringTerraformExportAttributes) SetTypeName(v string) { + o.TypeName = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformExportAttributes) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + if o.Output != nil { + toSerialize["output"] = o.Output + } + toSerialize["resource_id"] = o.ResourceId + toSerialize["type_name"] = o.TypeName + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformExportAttributes) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Output *string `json:"output,omitempty"` + ResourceId *string `json:"resource_id"` + TypeName *string `json:"type_name"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.ResourceId == nil { + return fmt.Errorf("required field resource_id missing") + } + if all.TypeName == nil { + return fmt.Errorf("required field type_name missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"output", "resource_id", "type_name"}) + } else { + return err + } + o.Output = all.Output + o.ResourceId = *all.ResourceId + o.TypeName = *all.TypeName + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_export_data.go b/api/datadogV2/model_security_monitoring_terraform_export_data.go new file mode 100644 index 00000000000..c71be56d8ca --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_export_data.go @@ -0,0 +1,174 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformExportData The Terraform export data object. +type SecurityMonitoringTerraformExportData struct { + // Attributes of the Terraform export response. + Attributes SecurityMonitoringTerraformExportAttributes `json:"attributes"` + // The resource identifier composed of the Terraform type name and the resource ID separated by `|`. + Id string `json:"id"` + // The JSON:API type. Always `format_resource`. + Type string `json:"type"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformExportData instantiates a new SecurityMonitoringTerraformExportData object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformExportData(attributes SecurityMonitoringTerraformExportAttributes, id string, typeVar string) *SecurityMonitoringTerraformExportData { + this := SecurityMonitoringTerraformExportData{} + this.Attributes = attributes + this.Id = id + this.Type = typeVar + return &this +} + +// NewSecurityMonitoringTerraformExportDataWithDefaults instantiates a new SecurityMonitoringTerraformExportData object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformExportDataWithDefaults() *SecurityMonitoringTerraformExportData { + this := SecurityMonitoringTerraformExportData{} + return &this +} + +// GetAttributes returns the Attributes field value. +func (o *SecurityMonitoringTerraformExportData) GetAttributes() SecurityMonitoringTerraformExportAttributes { + if o == nil { + var ret SecurityMonitoringTerraformExportAttributes + return ret + } + return o.Attributes +} + +// GetAttributesOk returns a tuple with the Attributes field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportData) GetAttributesOk() (*SecurityMonitoringTerraformExportAttributes, bool) { + if o == nil { + return nil, false + } + return &o.Attributes, true +} + +// SetAttributes sets field value. +func (o *SecurityMonitoringTerraformExportData) SetAttributes(v SecurityMonitoringTerraformExportAttributes) { + o.Attributes = v +} + +// GetId returns the Id field value. +func (o *SecurityMonitoringTerraformExportData) GetId() string { + if o == nil { + var ret string + return ret + } + return o.Id +} + +// GetIdOk returns a tuple with the Id field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportData) GetIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Id, true +} + +// SetId sets field value. +func (o *SecurityMonitoringTerraformExportData) SetId(v string) { + o.Id = v +} + +// GetType returns the Type field value. +func (o *SecurityMonitoringTerraformExportData) GetType() string { + if o == nil { + var ret string + return ret + } + return o.Type +} + +// GetTypeOk returns a tuple with the Type field value +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportData) GetTypeOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Type, true +} + +// SetType sets field value. +func (o *SecurityMonitoringTerraformExportData) SetType(v string) { + o.Type = v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformExportData) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + toSerialize["attributes"] = o.Attributes + toSerialize["id"] = o.Id + toSerialize["type"] = o.Type + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformExportData) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Attributes *SecurityMonitoringTerraformExportAttributes `json:"attributes"` + Id *string `json:"id"` + Type *string `json:"type"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + if all.Attributes == nil { + return fmt.Errorf("required field attributes missing") + } + if all.Id == nil { + return fmt.Errorf("required field id missing") + } + if all.Type == nil { + return fmt.Errorf("required field type missing") + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"attributes", "id", "type"}) + } else { + return err + } + + hasInvalidField := false + if all.Attributes.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Attributes = *all.Attributes + o.Id = *all.Id + o.Type = *all.Type + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_export_response.go b/api/datadogV2/model_security_monitoring_terraform_export_response.go new file mode 100644 index 00000000000..ab17ef83840 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_export_response.go @@ -0,0 +1,111 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformExportResponse Response containing the Terraform configuration for a security monitoring resource. +type SecurityMonitoringTerraformExportResponse struct { + // The Terraform export data object. + Data *SecurityMonitoringTerraformExportData `json:"data,omitempty"` + // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct + UnparsedObject map[string]interface{} `json:"-"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// NewSecurityMonitoringTerraformExportResponse instantiates a new SecurityMonitoringTerraformExportResponse object. +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed. +func NewSecurityMonitoringTerraformExportResponse() *SecurityMonitoringTerraformExportResponse { + this := SecurityMonitoringTerraformExportResponse{} + return &this +} + +// NewSecurityMonitoringTerraformExportResponseWithDefaults instantiates a new SecurityMonitoringTerraformExportResponse object. +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set. +func NewSecurityMonitoringTerraformExportResponseWithDefaults() *SecurityMonitoringTerraformExportResponse { + this := SecurityMonitoringTerraformExportResponse{} + return &this +} + +// GetData returns the Data field value if set, zero value otherwise. +func (o *SecurityMonitoringTerraformExportResponse) GetData() SecurityMonitoringTerraformExportData { + if o == nil || o.Data == nil { + var ret SecurityMonitoringTerraformExportData + return ret + } + return *o.Data +} + +// GetDataOk returns a tuple with the Data field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *SecurityMonitoringTerraformExportResponse) GetDataOk() (*SecurityMonitoringTerraformExportData, bool) { + if o == nil || o.Data == nil { + return nil, false + } + return o.Data, true +} + +// HasData returns a boolean if a field has been set. +func (o *SecurityMonitoringTerraformExportResponse) HasData() bool { + return o != nil && o.Data != nil +} + +// SetData gets a reference to the given SecurityMonitoringTerraformExportData and assigns it to the Data field. +func (o *SecurityMonitoringTerraformExportResponse) SetData(v SecurityMonitoringTerraformExportData) { + o.Data = &v +} + +// MarshalJSON serializes the struct using spec logic. +func (o SecurityMonitoringTerraformExportResponse) MarshalJSON() ([]byte, error) { + toSerialize := map[string]interface{}{} + if o.UnparsedObject != nil { + return datadog.Marshal(o.UnparsedObject) + } + if o.Data != nil { + toSerialize["data"] = o.Data + } + + for key, value := range o.AdditionalProperties { + toSerialize[key] = value + } + return datadog.Marshal(toSerialize) +} + +// UnmarshalJSON deserializes the given payload. +func (o *SecurityMonitoringTerraformExportResponse) UnmarshalJSON(bytes []byte) (err error) { + all := struct { + Data *SecurityMonitoringTerraformExportData `json:"data,omitempty"` + }{} + if err = datadog.Unmarshal(bytes, &all); err != nil { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + additionalProperties := make(map[string]interface{}) + if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { + datadog.DeleteKeys(additionalProperties, &[]string{"data"}) + } else { + return err + } + + hasInvalidField := false + if all.Data != nil && all.Data.UnparsedObject != nil && o.UnparsedObject == nil { + hasInvalidField = true + } + o.Data = all.Data + + if len(additionalProperties) > 0 { + o.AdditionalProperties = additionalProperties + } + + if hasInvalidField { + return datadog.Unmarshal(bytes, &o.UnparsedObject) + } + + return nil +} diff --git a/api/datadogV2/model_security_monitoring_terraform_resource_type.go b/api/datadogV2/model_security_monitoring_terraform_resource_type.go new file mode 100644 index 00000000000..201ca47d310 --- /dev/null +++ b/api/datadogV2/model_security_monitoring_terraform_resource_type.go @@ -0,0 +1,66 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +package datadogV2 + +import ( + "fmt" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" +) + +// SecurityMonitoringTerraformResourceType The type of security monitoring resource to export to Terraform. +type SecurityMonitoringTerraformResourceType string + +// List of SecurityMonitoringTerraformResourceType. +const ( + SECURITYMONITORINGTERRAFORMRESOURCETYPE_SUPPRESSIONS SecurityMonitoringTerraformResourceType = "suppressions" + SECURITYMONITORINGTERRAFORMRESOURCETYPE_CRITICAL_ASSETS SecurityMonitoringTerraformResourceType = "critical_assets" +) + +var allowedSecurityMonitoringTerraformResourceTypeEnumValues = []SecurityMonitoringTerraformResourceType{ + SECURITYMONITORINGTERRAFORMRESOURCETYPE_SUPPRESSIONS, + SECURITYMONITORINGTERRAFORMRESOURCETYPE_CRITICAL_ASSETS, +} + +// GetAllowedValues reeturns the list of possible values. +func (v *SecurityMonitoringTerraformResourceType) GetAllowedValues() []SecurityMonitoringTerraformResourceType { + return allowedSecurityMonitoringTerraformResourceTypeEnumValues +} + +// UnmarshalJSON deserializes the given payload. +func (v *SecurityMonitoringTerraformResourceType) UnmarshalJSON(src []byte) error { + var value string + err := datadog.Unmarshal(src, &value) + if err != nil { + return err + } + *v = SecurityMonitoringTerraformResourceType(value) + return nil +} + +// NewSecurityMonitoringTerraformResourceTypeFromValue returns a pointer to a valid SecurityMonitoringTerraformResourceType +// for the value passed as argument, or an error if the value passed is not allowed by the enum. +func NewSecurityMonitoringTerraformResourceTypeFromValue(v string) (*SecurityMonitoringTerraformResourceType, error) { + ev := SecurityMonitoringTerraformResourceType(v) + if ev.IsValid() { + return &ev, nil + } + return nil, fmt.Errorf("invalid value '%v' for SecurityMonitoringTerraformResourceType: valid values are %v", v, allowedSecurityMonitoringTerraformResourceTypeEnumValues) +} + +// IsValid return true if the value is valid for the enum, false otherwise. +func (v SecurityMonitoringTerraformResourceType) IsValid() bool { + for _, existing := range allowedSecurityMonitoringTerraformResourceTypeEnumValues { + if existing == v { + return true + } + } + return false +} + +// Ptr returns reference to SecurityMonitoringTerraformResourceType value. +func (v SecurityMonitoringTerraformResourceType) Ptr() *SecurityMonitoringTerraformResourceType { + return &v +} diff --git a/examples/v2/security-monitoring/BulkExportSecurityMonitoringTerraformResources.go b/examples/v2/security-monitoring/BulkExportSecurityMonitoringTerraformResources.go new file mode 100644 index 00000000000..059f8e4d73a --- /dev/null +++ b/examples/v2/security-monitoring/BulkExportSecurityMonitoringTerraformResources.go @@ -0,0 +1,43 @@ +// Export security monitoring resources to Terraform returns "OK" response + +package main + +import ( + "context" + "fmt" + "io/ioutil" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + // there is a valid "suppression" in the system + SuppressionDataID := os.Getenv("SUPPRESSION_DATA_ID") + + body := datadogV2.SecurityMonitoringTerraformBulkExportRequest{ + Data: datadogV2.SecurityMonitoringTerraformBulkExportData{ + Attributes: datadogV2.SecurityMonitoringTerraformBulkExportAttributes{ + ResourceIds: []string{ + SuppressionDataID, + }, + }, + Type: "bulk_export_resources", + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + configuration.SetUnstableOperationEnabled("v2.BulkExportSecurityMonitoringTerraformResources", true) + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewSecurityMonitoringApi(apiClient) + resp, r, err := api.BulkExportSecurityMonitoringTerraformResources(ctx, datadogV2.SECURITYMONITORINGTERRAFORMRESOURCETYPE_SUPPRESSIONS, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SecurityMonitoringApi.BulkExportSecurityMonitoringTerraformResources`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := ioutil.ReadAll(resp) + fmt.Fprintf(os.Stdout, "Response from `SecurityMonitoringApi.BulkExportSecurityMonitoringTerraformResources`:\n%s\n", responseContent) +} diff --git a/examples/v2/security-monitoring/ConvertSecurityMonitoringTerraformResource.go b/examples/v2/security-monitoring/ConvertSecurityMonitoringTerraformResource.go new file mode 100644 index 00000000000..3062e7c8607 --- /dev/null +++ b/examples/v2/security-monitoring/ConvertSecurityMonitoringTerraformResource.go @@ -0,0 +1,44 @@ +// Convert security monitoring resource to Terraform returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + body := datadogV2.SecurityMonitoringTerraformConvertRequest{ + Data: datadogV2.SecurityMonitoringTerraformConvertData{ + Type: "convert_resource", + Id: "abc-123", + Attributes: datadogV2.SecurityMonitoringTerraformConvertAttributes{ + ResourceJson: map[string]interface{}{ + "enabled": true, + "name": "Example-Security-Monitoring", + "rule_query": "source:cloudtrail", + "suppression_query": "env:test", + }, + }, + }, + } + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + configuration.SetUnstableOperationEnabled("v2.ConvertSecurityMonitoringTerraformResource", true) + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewSecurityMonitoringApi(apiClient) + resp, r, err := api.ConvertSecurityMonitoringTerraformResource(ctx, datadogV2.SECURITYMONITORINGTERRAFORMRESOURCETYPE_SUPPRESSIONS, body) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SecurityMonitoringApi.ConvertSecurityMonitoringTerraformResource`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `SecurityMonitoringApi.ConvertSecurityMonitoringTerraformResource`:\n%s\n", responseContent) +} diff --git a/examples/v2/security-monitoring/ExportSecurityMonitoringTerraformResource.go b/examples/v2/security-monitoring/ExportSecurityMonitoringTerraformResource.go new file mode 100644 index 00000000000..0e6f37fb6f8 --- /dev/null +++ b/examples/v2/security-monitoring/ExportSecurityMonitoringTerraformResource.go @@ -0,0 +1,33 @@ +// Export security monitoring resource to Terraform returns "OK" response + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/DataDog/datadog-api-client-go/v2/api/datadog" + "github.com/DataDog/datadog-api-client-go/v2/api/datadogV2" +) + +func main() { + // there is a valid "suppression" in the system + SuppressionDataID := os.Getenv("SUPPRESSION_DATA_ID") + + ctx := datadog.NewDefaultContext(context.Background()) + configuration := datadog.NewConfiguration() + configuration.SetUnstableOperationEnabled("v2.ExportSecurityMonitoringTerraformResource", true) + apiClient := datadog.NewAPIClient(configuration) + api := datadogV2.NewSecurityMonitoringApi(apiClient) + resp, r, err := api.ExportSecurityMonitoringTerraformResource(ctx, datadogV2.SECURITYMONITORINGTERRAFORMRESOURCETYPE_SUPPRESSIONS, SuppressionDataID) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SecurityMonitoringApi.ExportSecurityMonitoringTerraformResource`: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + + responseContent, _ := json.MarshalIndent(resp, "", " ") + fmt.Fprintf(os.Stdout, "Response from `SecurityMonitoringApi.ExportSecurityMonitoringTerraformResource`:\n%s\n", responseContent) +} diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.freeze new file mode 100644 index 00000000000..58f519964f9 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-10T08:55:44.730Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.yaml new file mode 100644 index 00000000000..535a39ed254 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Convert_security_monitoring_resource_to_Terraform_returns_OK_response.yaml @@ -0,0 +1,25 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"resource_json":{"enabled":true,"name":"Example-Security-Monitoring","rule_query":"source:cloudtrail","suppression_query":"env:test"}},"id":"abc-123","type":"convert_resource"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/security_monitoring/terraform/suppressions/convert + response: + body: '{"data":{"id":"datadog_security_monitoring_suppression|abc-123","type":"format_resource","attributes":{"output":"resource + \"datadog_security_monitoring_suppression\" \"abc-123\" {\n enabled = + true\n name = \"Example-Security-Monitoring\"\n rule_query = + \"source:cloudtrail\"\n suppression_query = \"env:test\"\n}\n","resource_id":"abc-123","type_name":"datadog_security_monitoring_suppression"}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.freeze new file mode 100644 index 00000000000..fb2c8719020 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-10T08:56:17.310Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.yaml new file mode 100644 index 00000000000..eacf8763986 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resource_to_Terraform_returns_OK_response.yaml @@ -0,0 +1,60 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"description":"Test-Export_security_monitoring_resource_to_Terraform_returns_OK_response-1775811377","enabled":true,"name":"suppression 934620bff161fb60","rule_query":"source:cloudtrail","suppression_query":"env:test","tags":["technique:T1110-brute-force","source:cloudtrail"]},"type":"suppressions"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/security_monitoring/configuration/suppressions + response: + body: '{"data":{"id":"urh-ldl-f7e","type":"suppressions","attributes":{"creation_date":1775811377666,"creator":{"handle":"frog@datadoghq.com","name":"frog"},"data_exclusion_query":"","description":"Test-Export_security_monitoring_resource_to_Terraform_returns_OK_response-1775811377","editable":true,"enabled":true,"name":"suppression + 934620bff161fb60","rule_query":"source:cloudtrail","suppression_query":"env:test","tags":["source:cloudtrail","technique:T1110-brute-force"],"update_date":1775811377666,"updater":{"handle":"frog@datadoghq.com","name":"frog"},"version":1}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +- request: + body: '' + form: {} + headers: + Accept: + - application/json + id: 1 + method: GET + url: https://api.datadoghq.com/api/v2/security_monitoring/terraform/suppressions/urh-ldl-f7e + response: + body: '{"data":{"id":"datadog_security_monitoring_suppression|urh-ldl-f7e","type":"format_resource","attributes":{"output":"resource + \"datadog_security_monitoring_suppression\" \"urh-ldl-f7e\" {\n description = + \"Test-Export_security_monitoring_resource_to_Terraform_returns_OK_response-1775811377\"\n enabled = + true\n name = \"suppression 934620bff161fb60\"\n rule_query = + \"source:cloudtrail\"\n suppression_query = \"env:test\"\n tags = + [\"source:cloudtrail\", \"technique:T1110-brute-force\"]\n}\n","resource_id":"urh-ldl-f7e","type_name":"datadog_security_monitoring_suppression"}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +- request: + body: '' + form: {} + headers: + Accept: + - '*/*' + id: 2 + method: DELETE + url: https://api.datadoghq.com/api/v2/security_monitoring/configuration/suppressions/urh-ldl-f7e + response: + body: '' + code: 204 + duration: 0ms + headers: {} + status: 204 No Content +version: 2 diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.freeze b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.freeze new file mode 100644 index 00000000000..2515d910118 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.freeze @@ -0,0 +1 @@ +2026-04-10T08:56:18.628Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.yaml b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.yaml new file mode 100644 index 00000000000..9b3b8014c68 --- /dev/null +++ b/tests/scenarios/cassettes/TestScenarios/v2/Feature_Security_Monitoring/Scenario_Export_security_monitoring_resources_to_Terraform_returns_OK_response.yaml @@ -0,0 +1,58 @@ +interactions: +- request: + body: | + {"data":{"attributes":{"description":"Test-Export_security_monitoring_resources_to_Terraform_returns_OK_response-1775811378","enabled":true,"name":"suppression 281e64d265076a2a","rule_query":"source:cloudtrail","suppression_query":"env:test","tags":["technique:T1110-brute-force","source:cloudtrail"]},"type":"suppressions"}} + form: {} + headers: + Accept: + - application/json + Content-Type: + - application/json + id: 0 + method: POST + url: https://api.datadoghq.com/api/v2/security_monitoring/configuration/suppressions + response: + body: '{"data":{"id":"vgr-gcw-s7m","type":"suppressions","attributes":{"creation_date":1775811378970,"creator":{"handle":"frog@datadoghq.com","name":"frog"},"data_exclusion_query":"","description":"Test-Export_security_monitoring_resources_to_Terraform_returns_OK_response-1775811378","editable":true,"enabled":true,"name":"suppression + 281e64d265076a2a","rule_query":"source:cloudtrail","suppression_query":"env:test","tags":["source:cloudtrail","technique:T1110-brute-force"],"update_date":1775811378970,"updater":{"handle":"frog@datadoghq.com","name":"frog"},"version":1}}}' + code: 200 + duration: 0ms + headers: + Content-Type: + - application/vnd.api+json + status: 200 OK +- request: + body: | + {"data":{"attributes":{"resource_ids":["vgr-gcw-s7m"]},"type":"bulk_export_resources"}} + form: {} + headers: + Accept: + - application/zip, application/json + Content-Type: + - application/json + id: 1 + method: POST + url: https://api.datadoghq.com/api/v2/security_monitoring/terraform/suppressions/bulk + response: + body: UEsDBBQACAAIAAAAAAAAAAAAAAAAAAAAAAAaAAAAc3VwcHJlc3Npb25fdmdyLWdjdy1zN20udGZsjj1PMzEQhPv7FSvXr6U4L8lFka6koqBJh5Dl2JvD0p33srsORIj/jo4PEVCmfuaZYRSqHBFMChoS9V4wVs569iOVrMS59F7qNDGKZCoGzKln28dnK+1o4LUBSCiR86SZCnymA7NDUXv7MhHrVef3snglv0PmcCAePaNWLuLv72ZioiJoXduuNs79bzemAcAS9gMm+EkHyhUbgBJGhF/pwFych+XG4fomLderRbsOyzD7uA7ojxX5fFn6+LaNA9WkHPIwkxemr0IHBstpqyg6Axp6+bv/cMX1D4xifCr5WHG7c84t7J6roj0QRzSPzVvzHgAA//9QSwcIw6lzsPYAAACZAQAAUEsBAhQAFAAIAAgAAAAAAMOpc7D2AAAAmQEAABoAAAAAAAAAAAAAAAAAAAAAAHN1cHByZXNzaW9uX3Znci1nY3ctczdtLnRmUEsFBgAAAAABAAEASAAAAD4BAAAAAA== + code: 200 + duration: 0ms + headers: + Content-Type: + - application/zip + status: 200 OK +- request: + body: '' + form: {} + headers: + Accept: + - '*/*' + id: 2 + method: DELETE + url: https://api.datadoghq.com/api/v2/security_monitoring/configuration/suppressions/vgr-gcw-s7m + response: + body: '' + code: 204 + duration: 0ms + headers: {} + status: 204 No Content +version: 2 diff --git a/tests/scenarios/features/v2/security_monitoring.feature b/tests/scenarios/features/v2/security_monitoring.feature index d2cfdeaea0b..1da592aca20 100644 --- a/tests/scenarios/features/v2/security_monitoring.feature +++ b/tests/scenarios/features/v2/security_monitoring.feature @@ -326,6 +326,26 @@ Feature: Security Monitoring Then the response status is 200 OK And the response "terraformContent" is equal to "resource \"datadog_security_monitoring_rule\" \"_{{ unique_hash }}\" {\n\tname = \"_{{ unique_hash }}\"\n\tenabled = true\n\tquery {\n\t\tquery = \"@test:true\"\n\t\tgroup_by_fields = []\n\t\thas_optional_group_by_fields = false\n\t\tdistinct_fields = []\n\t\taggregation = \"count\"\n\t\tname = \"\"\n\t\tdata_source = \"logs\"\n\t}\n\toptions {\n\t\tkeep_alive = 3600\n\t\tmax_signal_duration = 86400\n\t\tdetection_method = \"threshold\"\n\t\tevaluation_window = 900\n\t}\n\tcase {\n\t\tname = \"\"\n\t\tstatus = \"info\"\n\t\tnotifications = []\n\t\tcondition = \"a > 0\"\n\t}\n\tmessage = \"Test rule\"\n\ttags = []\n\thas_extended_title = false\n\ttype = \"log_detection\"\n}\n" + @generated @skip @team:DataDog/k9-cloud-siem + Scenario: Convert security monitoring resource to Terraform returns "Bad Request" response + Given operation "ConvertSecurityMonitoringTerraformResource" enabled + And new "ConvertSecurityMonitoringTerraformResource" request + And request contains "resource_type" parameter from "REPLACE.ME" + And body with value {"data": {"attributes": {"resource_json": {"enabled": true, "name": "Custom suppression", "rule_query": "type:log_detection source:cloudtrail", "suppression_query": "env:staging status:low"}}, "id": "abc-123", "type": "convert_resource"}} + When the request is sent + Then the response status is 400 Bad Request + + @replay-only @team:DataDog/k9-cloud-siem + Scenario: Convert security monitoring resource to Terraform returns "OK" response + Given operation "ConvertSecurityMonitoringTerraformResource" enabled + And new "ConvertSecurityMonitoringTerraformResource" request + And request contains "resource_type" parameter with value "suppressions" + And body with value {"data": {"type": "convert_resource", "id": "abc-123", "attributes": {"resource_json": {"enabled": true, "name": "Example-Security-Monitoring", "rule_query": "source:cloudtrail", "suppression_query": "env:test"}}}} + When the request is sent + Then the response status is 200 OK + And the response "data.attributes.type_name" is equal to "datadog_security_monitoring_suppression" + And the response "data.attributes.resource_id" is equal to "abc-123" + @team:DataDog/k9-investigation Scenario: Create Jira issue for security finding returns "Created" response Given new "CreateJiraIssues" request @@ -935,6 +955,55 @@ Feature: Security Monitoring When the request is sent Then the response status is 404 Not Found + @generated @skip @team:DataDog/k9-cloud-siem + Scenario: Export security monitoring resource to Terraform returns "Not Found" response + Given operation "ExportSecurityMonitoringTerraformResource" enabled + And new "ExportSecurityMonitoringTerraformResource" request + And request contains "resource_type" parameter from "REPLACE.ME" + And request contains "resource_id" parameter from "REPLACE.ME" + When the request is sent + Then the response status is 404 Not Found + + @replay-only @team:DataDog/k9-cloud-siem + Scenario: Export security monitoring resource to Terraform returns "OK" response + Given operation "ExportSecurityMonitoringTerraformResource" enabled + And there is a valid "suppression" in the system + And new "ExportSecurityMonitoringTerraformResource" request + And request contains "resource_type" parameter with value "suppressions" + And request contains "resource_id" parameter from "suppression.data.id" + When the request is sent + Then the response status is 200 OK + And the response "data.attributes.type_name" is equal to "datadog_security_monitoring_suppression" + And the response "data.attributes.resource_id" has the same value as "suppression.data.id" + + @generated @skip @team:DataDog/k9-cloud-siem + Scenario: Export security monitoring resources to Terraform returns "Bad Request" response + Given operation "BulkExportSecurityMonitoringTerraformResources" enabled + And new "BulkExportSecurityMonitoringTerraformResources" request + And request contains "resource_type" parameter from "REPLACE.ME" + And body with value {"data": {"attributes": {"resource_ids": [""]}, "type": "bulk_export_resources"}} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/k9-cloud-siem + Scenario: Export security monitoring resources to Terraform returns "Not Found" response + Given operation "BulkExportSecurityMonitoringTerraformResources" enabled + And new "BulkExportSecurityMonitoringTerraformResources" request + And request contains "resource_type" parameter from "REPLACE.ME" + And body with value {"data": {"attributes": {"resource_ids": [""]}, "type": "bulk_export_resources"}} + When the request is sent + Then the response status is 404 Not Found + + @replay-only @team:DataDog/k9-cloud-siem + Scenario: Export security monitoring resources to Terraform returns "OK" response + Given operation "BulkExportSecurityMonitoringTerraformResources" enabled + And there is a valid "suppression" in the system + And new "BulkExportSecurityMonitoringTerraformResources" request + And request contains "resource_type" parameter with value "suppressions" + And body with value {"data": {"attributes": {"resource_ids": ["{{ suppression.data.id }}"]}, "type": "bulk_export_resources"}} + When the request is sent + Then the response status is 200 OK + @generated @skip @team:DataDog/k9-cloud-vm Scenario: Get SBOM returns "Bad request: The server cannot process the request due to invalid syntax in the request." response Given new "GetSBOM" request diff --git a/tests/scenarios/features/v2/undo.json b/tests/scenarios/features/v2/undo.json index c7ef9624df9..f78cd823b2c 100644 --- a/tests/scenarios/features/v2/undo.json +++ b/tests/scenarios/features/v2/undo.json @@ -5716,6 +5716,24 @@ "type": "safe" } }, + "BulkExportSecurityMonitoringTerraformResources": { + "tag": "Security Monitoring", + "undo": { + "type": "safe" + } + }, + "ConvertSecurityMonitoringTerraformResource": { + "tag": "Security Monitoring", + "undo": { + "type": "safe" + } + }, + "ExportSecurityMonitoringTerraformResource": { + "tag": "Security Monitoring", + "undo": { + "type": "safe" + } + }, "ListScanningGroups": { "tag": "Sensitive Data Scanner", "undo": {