diff --git a/.apigentools-info b/.apigentools-info index f50525e68cf..964e856f905 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-04 20:19:28.929107", - "spec_repo_commit": "3909ab62" + "regenerated": "2025-04-07 18:49:32.102281", + "spec_repo_commit": "d0287df0" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-04 20:19:28.944541", - "spec_repo_commit": "3909ab62" + "regenerated": "2025-04-07 18:49:32.119706", + "spec_repo_commit": "d0287df0" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 87935c5f60b..4070b85967e 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -21478,6 +21478,668 @@ components: - id - type type: object + ObservabilityPipeline: + description: Top-level schema representing a pipeline. + properties: + data: + $ref: '#/components/schemas/ObservabilityPipelineData' + required: + - data + type: object + ObservabilityPipelineAddFieldsProcessor: + description: The `add_fields` processor adds static key-value fields to logs. + properties: + fields: + description: A list of static fields (key-value pairs) that is added to + each log event processed by this component. + items: + $ref: '#/components/schemas/ObservabilityPipelineFieldValue' + type: array + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineAddFieldsProcessorType: + default: add_fields + description: The processor type. The value should always be `add_fields`. + enum: + - add_fields + example: add_fields + type: string + x-enum-varnames: + - ADD_FIELDS + ObservabilityPipelineConfig: + description: Specifies the pipeline's configuration, including its sources, + processors, and destinations. + properties: + destinations: + description: A list of destination components where processed logs are sent. + example: + - id: datadog-logs-destination + inputs: + - filter-processor + type: datadog_logs + items: + $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' + type: array + processors: + description: A list of processors that transform or enrich log data. + example: + - id: filter-processor + include: service:my-service + inputs: + - datadog-agent-source + type: filter + items: + $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorItem' + type: array + sources: + description: A list of configured data sources for the pipeline. + example: + - id: datadog-agent-source + type: datadog_agent + items: + $ref: '#/components/schemas/ObservabilityPipelineConfigSourceItem' + type: array + required: + - sources + - processors + - destinations + type: object + ObservabilityPipelineConfigDestinationItem: + description: A destination for the pipeline. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + ObservabilityPipelineConfigProcessorItem: + description: A processor for the pipeline. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' + ObservabilityPipelineConfigSourceItem: + description: A data source for the pipeline. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' + ObservabilityPipelineCreateRequest: + description: Top-level schema representing a pipeline. + properties: + data: + $ref: '#/components/schemas/ObservabilityPipelineCreateRequestData' + required: + - data + type: object + ObservabilityPipelineCreateRequestData: + description: "Contains the pipeline\u2019s ID, type, and configuration attributes." + properties: + attributes: + $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' + type: + default: pipelines + description: The resource type identifier. For pipeline resources, this + should always be set to `pipelines`. + example: pipelines + type: string + required: + - type + - attributes + type: object + ObservabilityPipelineData: + description: "Contains the pipeline\u2019s ID, type, and configuration attributes." + properties: + attributes: + $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' + id: + description: Unique identifier for the pipeline. + example: 3fa85f64-5717-4562-b3fc-2c963f66afa6 + type: string + type: + default: pipelines + description: The resource type identifier. For pipeline resources, this + should always be set to `pipelines`. + example: pipelines + type: string + required: + - id + - type + - attributes + type: object + ObservabilityPipelineDataAttributes: + description: "Defines the pipeline\u2019s name and its components (sources, + processors, and destinations)." + properties: + config: + $ref: '#/components/schemas/ObservabilityPipelineConfig' + name: + description: Name of the pipeline. + example: Main Observability Pipeline + type: string + required: + - name + - config + type: object + ObservabilityPipelineDatadogAgentSource: + description: The `datadog_agent` source collects logs from the Datadog Agent. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: datadog-agent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSourceType' + required: + - id + - type + type: object + ObservabilityPipelineDatadogAgentSourceType: + default: datadog_agent + description: The source type. The value should always be `datadog_agent`. + enum: + - datadog_agent + example: datadog_agent + type: string + x-enum-varnames: + - DATADOG_AGENT + ObservabilityPipelineDatadogLogsDestination: + description: The `datadog_logs` destination forwards logs to Datadog Log Management. + properties: + id: + description: The unique identifier for this component. + example: datadog-logs-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineDatadogLogsDestinationType: + default: datadog_logs + description: The destination type. The value should always be `datadog_logs`. + enum: + - datadog_logs + example: datadog_logs + type: string + x-enum-varnames: + - DATADOG_LOGS + ObservabilityPipelineFieldValue: + description: Represents a static key-value pair used in various processors. + properties: + name: + description: The field name. + example: field_name + type: string + value: + description: The field value. + example: field_value + type: string + required: + - name + - value + type: object + ObservabilityPipelineFilterProcessor: + description: The `filter` processor allows conditional processing of logs based + on a Datadog search query. Logs that match the `include` query are passed + through; others are discarded. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: filter-processor + type: string + include: + description: A Datadog search query used to determine which logs should + pass through the filter. Logs that match this query continue to downstream + components; others are dropped. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineFilterProcessorType: + default: filter + description: The processor type. The value should always be `filter`. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineKafkaSource: + description: The `kafka` source ingests data from Apache Kafka topics. + properties: + group_id: + description: Consumer group ID used by the Kafka client. + example: consumer-group-0 + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: kafka-source + type: string + librdkafka_options: + description: Optional list of advanced Kafka client configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + type: array + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topics: + description: A list of Kafka topic names to subscribe to. The source ingests + messages from each topic specified. + example: + - topic1 + - topic2 + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + required: + - id + - type + - group_id + - topics + type: object + ObservabilityPipelineKafkaSourceLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka sources, such as timeouts, buffer sizes, and security + settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSourceSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + type: object + ObservabilityPipelineKafkaSourceType: + default: kafka + description: The source type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineParseJSONProcessor: + description: The `parse_json` processor extracts JSON from a specified field + and flattens it into the event. This is useful when logs contain embedded + JSON as a string. + properties: + field: + description: The name of the log field that contains a JSON string. + example: message + type: string + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: parse-json-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType' + required: + - id + - type + - include + - field + - inputs + type: object + ObservabilityPipelineParseJSONProcessorType: + default: parse_json + description: The processor type. The value should always be `parse_json`. + enum: + - parse_json + example: parse_json + type: string + x-enum-varnames: + - PARSE_JSON + ObservabilityPipelinePipelineKafkaSourceSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 + ObservabilityPipelineQuotaProcessor: + description: The Quota Processor measures logging traffic for logs that match + a specified filter. When the configured daily quota is met, the processor + can drop or alert. + properties: + drop_events: + description: If set to `true`, logs that matched the quota filter and sent + after the quota has been met are dropped; only logs that did not match + the filter query continue through the pipeline. + example: false + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: quota-processor + type: string + ignore_when_missing_partitions: + description: If `true`, the processor skips quota checks when partition + fields are missing from the logs. + type: boolean + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + name: + description: Name for identifying the processor. + example: MyPipelineQuotaProcessor + type: string + overrides: + description: A list of alternate quota rules that apply to specific sets + of events, identified by matching field values. Each override can define + a custom limit. + items: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride' + type: array + partition_fields: + description: A list of fields used to segment log traffic for quota enforcement. + Quotas are tracked independently by unique combinations of these field + values. + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' + required: + - id + - type + - include + - name + - drop_events + - limit + - inputs + type: object + ObservabilityPipelineQuotaProcessorLimit: + description: The maximum amount of data or number of events allowed before the + quota is enforced. Can be specified in bytes or events. + properties: + enforce: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType' + limit: + description: The limit for quota enforcement. + example: 1000 + format: int64 + type: integer + required: + - enforce + - limit + type: object + ObservabilityPipelineQuotaProcessorLimitEnforceType: + description: Unit for quota enforcement in bytes for data size or events for + count. + enum: + - bytes + - events + example: bytes + type: string + x-enum-varnames: + - BYTES + - EVENTS + ObservabilityPipelineQuotaProcessorOverride: + description: Defines a custom quota limit that applies to specific log events + based on matching field values. + properties: + fields: + description: A list of field matchers used to apply a specific override. + If an event matches all listed key-value pairs, the corresponding override + limit is enforced. + items: + $ref: '#/components/schemas/ObservabilityPipelineFieldValue' + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + required: + - fields + - limit + type: object + ObservabilityPipelineQuotaProcessorType: + default: quota + description: The processor type. The value should always be `quota`. + enum: + - quota + example: quota + type: string + x-enum-varnames: + - QUOTA + ObservabilityPipelineRemoveFieldsProcessor: + description: The `remove_fields` processor deletes specified fields from logs. + properties: + fields: + description: A list of field names to be removed from each log event. + example: + - field1 + - field2 + items: + type: string + type: array + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: remove-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: The `PipelineRemoveFieldsProcessor` `inputs`. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRemoveFieldsProcessorType: + default: remove_fields + description: The processor type. The value should always be `remove_fields`. + enum: + - remove_fields + example: remove_fields + type: string + x-enum-varnames: + - REMOVE_FIELDS + ObservabilityPipelineRenameFieldsProcessor: + description: The `rename_fields` processor changes field names. + properties: + fields: + description: A list of rename rules specifying which fields to rename in + the event, what to rename them to, and whether to preserve the original + fields. + items: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField' + type: array + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: rename-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRenameFieldsProcessorField: + description: Defines how to rename a field in log events. + properties: + destination: + description: The field name to assign the renamed value to. + example: destination_field + type: string + preserve_source: + description: Indicates whether the original field, that is received from + the source, should be kept (`true`) or removed (`false`) after renaming. + example: false + type: boolean + source: + description: The original field name in the log event that should be renamed. + example: source_field + type: string + required: + - source + - destination + - preserve_source + type: object + ObservabilityPipelineRenameFieldsProcessorType: + default: rename_fields + description: The processor type. The value should always be `rename_fields`. + enum: + - rename_fields + example: rename_fields + type: string + x-enum-varnames: + - RENAME_FIELDS + ObservabilityPipelineTls: + description: Configuration for enabling TLS encryption. + properties: + ca_file: + description: "Path to the Certificate Authority (CA) file used to validate + the server\u2019s TLS certificate." + type: string + crt_file: + description: Path to the TLS client certificate file used to authenticate + the pipeline component with upstream or downstream services. + example: /path/to/cert.crt + type: string + key_file: + description: Path to the private key file associated with the TLS client + certificate. Used for mutual TLS authentication. + type: string + required: + - crt_file + type: object OktaAccount: description: Schema for an Okta account. properties: @@ -45671,6 +46333,167 @@ paths: summary: Get the latest CSM Threats policy tags: - CSM Threats + /api/v2/remote_config/products/obs_pipelines/pipelines: + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineCreateRequest' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Bad Request + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-unstable: '**Note**: This endpoint is in Preview.' + /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: + delete: + description: Delete a pipeline. + operationId: DeletePipeline + parameters: + - description: The ID of the pipeline to delete. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '204': + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Delete a pipeline + tags: + - Observability Pipelines + x-unstable: '**Note**: This endpoint is in Preview.' + get: + description: Get a specific pipeline by its ID. + operationId: GetPipeline + parameters: + - description: The ID of the pipeline to retrieve. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Get a specific pipeline + tags: + - Observability Pipelines + x-unstable: '**Note**: This endpoint is in Preview.' + put: + description: Update a pipeline. + operationId: UpdatePipeline + parameters: + - description: The ID of the pipeline to update. + in: path + name: pipeline_id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Bad Request + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Update a pipeline + tags: + - Observability Pipelines + x-unstable: '**Note**: This endpoint is in Preview.' /api/v2/restriction_policy/{resource_id}: delete: description: Deletes the restriction policy associated with a specified resource. @@ -54015,6 +54838,12 @@ tags: and their attributes. See the [Network Device Monitoring page](https://docs.datadoghq.com/network_monitoring/) for more information. name: Network Device Monitoring +- description: Observability Pipelines allows you to collect and process logs within + your own infrastructure, and then route them to downstream integrations. + externalDocs: + description: Find out more at + url: https://docs.datadoghq.com/observability_pipelines/ + name: Observability Pipelines - description: Configure your [Datadog Okta integration](https://docs.datadoghq.com/integrations/okta/) directly through the Datadog API. name: Okta Integration diff --git a/examples/v2/observability-pipelines/CreatePipeline.java b/examples/v2/observability-pipelines/CreatePipeline.java new file mode 100644 index 00000000000..0b0aca9369e --- /dev/null +++ b/examples/v2/observability-pipelines/CreatePipeline.java @@ -0,0 +1,81 @@ +// Create a new pipeline returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; +import com.datadog.api.client.v2.model.ObservabilityPipeline; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfig; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest; +import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequestData; +import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType; +import java.util.Collections; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.createPipeline", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + ObservabilityPipelineCreateRequest body = + new ObservabilityPipelineCreateRequest() + .data( + new ObservabilityPipelineCreateRequestData() + .attributes( + new ObservabilityPipelineDataAttributes() + .config( + new ObservabilityPipelineConfig() + .destinations( + Collections.singletonList( + new ObservabilityPipelineConfigDestinationItem( + new ObservabilityPipelineDatadogLogsDestination() + .id("datadog-logs-destination") + .inputs( + Collections.singletonList( + "filter-processor")) + .type( + ObservabilityPipelineDatadogLogsDestinationType + .DATADOG_LOGS)))) + .processors( + Collections.singletonList( + new ObservabilityPipelineConfigProcessorItem( + new ObservabilityPipelineFilterProcessor() + .id("filter-processor") + .include("service:my-service") + .inputs( + Collections.singletonList( + "datadog-agent-source")) + .type( + ObservabilityPipelineFilterProcessorType + .FILTER)))) + .sources( + Collections.singletonList( + new ObservabilityPipelineConfigSourceItem( + new ObservabilityPipelineDatadogAgentSource() + .id("datadog-agent-source") + .type( + ObservabilityPipelineDatadogAgentSourceType + .DATADOG_AGENT))))) + .name("Main Observability Pipeline")) + .type("pipelines")); + + try { + ObservabilityPipeline result = apiInstance.createPipeline(body); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#createPipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/examples/v2/observability-pipelines/DeletePipeline.java b/examples/v2/observability-pipelines/DeletePipeline.java new file mode 100644 index 00000000000..ccbb1723426 --- /dev/null +++ b/examples/v2/observability-pipelines/DeletePipeline.java @@ -0,0 +1,26 @@ +// Delete a pipeline returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.deletePipeline", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + // there is a valid "pipeline" in the system + String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID"); + + try { + apiInstance.deletePipeline(PIPELINE_DATA_ID); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#deletePipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/examples/v2/observability-pipelines/GetPipeline.java b/examples/v2/observability-pipelines/GetPipeline.java new file mode 100644 index 00000000000..39c8942b9da --- /dev/null +++ b/examples/v2/observability-pipelines/GetPipeline.java @@ -0,0 +1,28 @@ +// Get a specific pipeline returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; +import com.datadog.api.client.v2.model.ObservabilityPipeline; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.getPipeline", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + // there is a valid "pipeline" in the system + String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID"); + + try { + ObservabilityPipeline result = apiInstance.getPipeline(PIPELINE_DATA_ID); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#getPipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/examples/v2/observability-pipelines/UpdatePipeline.java b/examples/v2/observability-pipelines/UpdatePipeline.java new file mode 100644 index 00000000000..0f8df0509d3 --- /dev/null +++ b/examples/v2/observability-pipelines/UpdatePipeline.java @@ -0,0 +1,84 @@ +// Update a pipeline returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; +import com.datadog.api.client.v2.model.ObservabilityPipeline; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfig; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineData; +import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType; +import java.util.Collections; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.updatePipeline", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + // there is a valid "pipeline" in the system + String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID"); + + ObservabilityPipeline body = + new ObservabilityPipeline() + .data( + new ObservabilityPipelineData() + .attributes( + new ObservabilityPipelineDataAttributes() + .config( + new ObservabilityPipelineConfig() + .destinations( + Collections.singletonList( + new ObservabilityPipelineConfigDestinationItem( + new ObservabilityPipelineDatadogLogsDestination() + .id("updated-datadog-logs-destination-id") + .inputs( + Collections.singletonList( + "filter-processor")) + .type( + ObservabilityPipelineDatadogLogsDestinationType + .DATADOG_LOGS)))) + .processors( + Collections.singletonList( + new ObservabilityPipelineConfigProcessorItem( + new ObservabilityPipelineFilterProcessor() + .id("filter-processor") + .include("service:my-service") + .inputs( + Collections.singletonList( + "datadog-agent-source")) + .type( + ObservabilityPipelineFilterProcessorType + .FILTER)))) + .sources( + Collections.singletonList( + new ObservabilityPipelineConfigSourceItem( + new ObservabilityPipelineDatadogAgentSource() + .id("datadog-agent-source") + .type( + ObservabilityPipelineDatadogAgentSourceType + .DATADOG_AGENT))))) + .name("Updated Pipeline Name")) + .id(PIPELINE_DATA_ID) + .type("pipelines")); + + try { + ObservabilityPipeline result = apiInstance.updatePipeline(PIPELINE_DATA_ID, body); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#updatePipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/src/main/java/com/datadog/api/client/ApiClient.java b/src/main/java/com/datadog/api/client/ApiClient.java index 3e0e7436dd7..cd61faea3a8 100644 --- a/src/main/java/com/datadog/api/client/ApiClient.java +++ b/src/main/java/com/datadog/api/client/ApiClient.java @@ -422,6 +422,10 @@ public class ApiClient { put("v2.listVulnerableAssets", false); put("v2.muteFindings", false); put("v2.runHistoricalJob", false); + put("v2.createPipeline", false); + put("v2.deletePipeline", false); + put("v2.getPipeline", false); + put("v2.updatePipeline", false); put("v2.createScorecardOutcomesBatch", false); put("v2.createScorecardRule", false); put("v2.deleteScorecardRule", false); diff --git a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java new file mode 100644 index 00000000000..0b67bc579b5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java @@ -0,0 +1,679 @@ +package com.datadog.api.client.v2.api; + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.ApiResponse; +import com.datadog.api.client.Pair; +import com.datadog.api.client.v2.model.ObservabilityPipeline; +import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest; +import jakarta.ws.rs.client.Invocation; +import jakarta.ws.rs.core.GenericType; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelinesApi { + private ApiClient apiClient; + + public ObservabilityPipelinesApi() { + this(ApiClient.getDefaultApiClient()); + } + + public ObservabilityPipelinesApi(ApiClient apiClient) { + this.apiClient = apiClient; + } + + /** + * Get the API client. + * + * @return API client + */ + public ApiClient getApiClient() { + return apiClient; + } + + /** + * Set the API client. + * + * @param apiClient an instance of API client + */ + public void setApiClient(ApiClient apiClient) { + this.apiClient = apiClient; + } + + /** + * Create a new pipeline. + * + *

See {@link #createPipelineWithHttpInfo}. + * + * @param body (required) + * @return ObservabilityPipeline + * @throws ApiException if fails to make API call + */ + public ObservabilityPipeline createPipeline(ObservabilityPipelineCreateRequest body) + throws ApiException { + return createPipelineWithHttpInfo(body).getData(); + } + + /** + * Create a new pipeline. + * + *

See {@link #createPipelineWithHttpInfoAsync}. + * + * @param body (required) + * @return CompletableFuture<ObservabilityPipeline> + */ + public CompletableFuture createPipelineAsync( + ObservabilityPipelineCreateRequest body) { + return createPipelineWithHttpInfoAsync(body) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Create a new pipeline. + * + * @param body (required) + * @return ApiResponse<ObservabilityPipeline> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
201 OK -
400 Bad Request -
403 Forbidden -
409 Conflict -
429 Too many requests -
+ */ + public ApiResponse createPipelineWithHttpInfo( + ObservabilityPipelineCreateRequest body) throws ApiException { + // Check if unstable operation is enabled + String operationId = "createPipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = body; + + // verify the required parameter 'body' is set + if (body == null) { + throw new ApiException( + 400, "Missing the required parameter 'body' when calling createPipeline"); + } + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.createPipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "POST", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Create a new pipeline. + * + *

See {@link #createPipelineWithHttpInfo}. + * + * @param body (required) + * @return CompletableFuture<ApiResponse<ObservabilityPipeline>> + */ + public CompletableFuture> createPipelineWithHttpInfoAsync( + ObservabilityPipelineCreateRequest body) { + // Check if unstable operation is enabled + String operationId = "createPipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = body; + + // verify the required parameter 'body' is set + if (body == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'body' when calling createPipeline")); + return result; + } + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.createPipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "POST", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Delete a pipeline. + * + *

See {@link #deletePipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to delete. (required) + * @throws ApiException if fails to make API call + */ + public void deletePipeline(String pipelineId) throws ApiException { + deletePipelineWithHttpInfo(pipelineId); + } + + /** + * Delete a pipeline. + * + *

See {@link #deletePipelineWithHttpInfoAsync}. + * + * @param pipelineId The ID of the pipeline to delete. (required) + * @return CompletableFuture + */ + public CompletableFuture deletePipelineAsync(String pipelineId) { + return deletePipelineWithHttpInfoAsync(pipelineId) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Delete a pipeline. + * + * @param pipelineId The ID of the pipeline to delete. (required) + * @return ApiResponse<Void> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
204 OK -
403 Forbidden -
404 Not Found -
409 Conflict -
429 Too many requests -
+ */ + public ApiResponse deletePipelineWithHttpInfo(String pipelineId) throws ApiException { + // Check if unstable operation is enabled + String operationId = "deletePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = null; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + throw new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling deletePipeline"); + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.deletePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"*/*"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "DELETE", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + null); + } + + /** + * Delete a pipeline. + * + *

See {@link #deletePipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to delete. (required) + * @return CompletableFuture<ApiResponse<Void>> + */ + public CompletableFuture> deletePipelineWithHttpInfoAsync(String pipelineId) { + // Check if unstable operation is enabled + String operationId = "deletePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = null; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling deletePipeline")); + return result; + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.deletePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"*/*"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "DELETE", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + null); + } + + /** + * Get a specific pipeline. + * + *

See {@link #getPipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to retrieve. (required) + * @return ObservabilityPipeline + * @throws ApiException if fails to make API call + */ + public ObservabilityPipeline getPipeline(String pipelineId) throws ApiException { + return getPipelineWithHttpInfo(pipelineId).getData(); + } + + /** + * Get a specific pipeline. + * + *

See {@link #getPipelineWithHttpInfoAsync}. + * + * @param pipelineId The ID of the pipeline to retrieve. (required) + * @return CompletableFuture<ObservabilityPipeline> + */ + public CompletableFuture getPipelineAsync(String pipelineId) { + return getPipelineWithHttpInfoAsync(pipelineId) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Get a specific pipeline by its ID. + * + * @param pipelineId The ID of the pipeline to retrieve. (required) + * @return ApiResponse<ObservabilityPipeline> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
200 OK -
403 Forbidden -
429 Too many requests -
+ */ + public ApiResponse getPipelineWithHttpInfo(String pipelineId) + throws ApiException { + // Check if unstable operation is enabled + String operationId = "getPipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = null; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + throw new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling getPipeline"); + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.getPipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "GET", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Get a specific pipeline. + * + *

See {@link #getPipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to retrieve. (required) + * @return CompletableFuture<ApiResponse<ObservabilityPipeline>> + */ + public CompletableFuture> getPipelineWithHttpInfoAsync( + String pipelineId) { + // Check if unstable operation is enabled + String operationId = "getPipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = null; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling getPipeline")); + return result; + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.getPipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "GET", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Update a pipeline. + * + *

See {@link #updatePipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to update. (required) + * @param body (required) + * @return ObservabilityPipeline + * @throws ApiException if fails to make API call + */ + public ObservabilityPipeline updatePipeline(String pipelineId, ObservabilityPipeline body) + throws ApiException { + return updatePipelineWithHttpInfo(pipelineId, body).getData(); + } + + /** + * Update a pipeline. + * + *

See {@link #updatePipelineWithHttpInfoAsync}. + * + * @param pipelineId The ID of the pipeline to update. (required) + * @param body (required) + * @return CompletableFuture<ObservabilityPipeline> + */ + public CompletableFuture updatePipelineAsync( + String pipelineId, ObservabilityPipeline body) { + return updatePipelineWithHttpInfoAsync(pipelineId, body) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Update a pipeline. + * + * @param pipelineId The ID of the pipeline to update. (required) + * @param body (required) + * @return ApiResponse<ObservabilityPipeline> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
200 OK -
400 Bad Request -
403 Forbidden -
404 Not Found -
409 Conflict -
429 Too many requests -
+ */ + public ApiResponse updatePipelineWithHttpInfo( + String pipelineId, ObservabilityPipeline body) throws ApiException { + // Check if unstable operation is enabled + String operationId = "updatePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = body; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + throw new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling updatePipeline"); + } + + // verify the required parameter 'body' is set + if (body == null) { + throw new ApiException( + 400, "Missing the required parameter 'body' when calling updatePipeline"); + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.updatePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "PUT", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Update a pipeline. + * + *

See {@link #updatePipelineWithHttpInfo}. + * + * @param pipelineId The ID of the pipeline to update. (required) + * @param body (required) + * @return CompletableFuture<ApiResponse<ObservabilityPipeline>> + */ + public CompletableFuture> updatePipelineWithHttpInfoAsync( + String pipelineId, ObservabilityPipeline body) { + // Check if unstable operation is enabled + String operationId = "updatePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = body; + + // verify the required parameter 'pipelineId' is set + if (pipelineId == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'pipelineId' when calling updatePipeline")); + return result; + } + + // verify the required parameter 'body' is set + if (body == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'body' when calling updatePipeline")); + return result; + } + // create path and map variables + String localVarPath = + "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}" + .replaceAll( + "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString())); + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.updatePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "PUT", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java new file mode 100644 index 00000000000..f7f96865b8f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java @@ -0,0 +1,145 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Top-level schema representing a pipeline. */ +@JsonPropertyOrder({ObservabilityPipeline.JSON_PROPERTY_DATA}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipeline { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DATA = "data"; + private ObservabilityPipelineData data; + + public ObservabilityPipeline() {} + + @JsonCreator + public ObservabilityPipeline( + @JsonProperty(required = true, value = JSON_PROPERTY_DATA) ObservabilityPipelineData data) { + this.data = data; + this.unparsed |= data.unparsed; + } + + public ObservabilityPipeline data(ObservabilityPipelineData data) { + this.data = data; + this.unparsed |= data.unparsed; + return this; + } + + /** + * Contains the pipeline’s ID, type, and configuration attributes. + * + * @return data + */ + @JsonProperty(JSON_PROPERTY_DATA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineData getData() { + return data; + } + + public void setData(ObservabilityPipelineData data) { + this.data = data; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipeline + */ + @JsonAnySetter + public ObservabilityPipeline putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipeline object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipeline observabilityPipeline = (ObservabilityPipeline) o; + return Objects.equals(this.data, observabilityPipeline.data) + && Objects.equals(this.additionalProperties, observabilityPipeline.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(data, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipeline {\n"); + sb.append(" data: ").append(toIndentedString(data)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java new file mode 100644 index 00000000000..942e68603c8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java @@ -0,0 +1,289 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The add_fields processor adds static key-value fields to logs. */ +@JsonPropertyOrder({ + ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_FIELDS, + ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddFieldsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAddFieldsProcessorType type = + ObservabilityPipelineAddFieldsProcessorType.ADD_FIELDS; + + public ObservabilityPipelineAddFieldsProcessor() {} + + @JsonCreator + public ObservabilityPipelineAddFieldsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) + List fields, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAddFieldsProcessorType type) { + this.fields = fields; + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAddFieldsProcessor fields( + List fields) { + this.fields = fields; + for (ObservabilityPipelineFieldValue item : fields) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineAddFieldsProcessor addFieldsItem( + ObservabilityPipelineFieldValue fieldsItem) { + this.fields.add(fieldsItem); + this.unparsed |= fieldsItem.unparsed; + return this; + } + + /** + * A list of static fields (key-value pairs) that is added to each log event processed by this + * component. + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public ObservabilityPipelineAddFieldsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAddFieldsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineAddFieldsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineAddFieldsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineAddFieldsProcessor type( + ObservabilityPipelineAddFieldsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be add_fields. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAddFieldsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineAddFieldsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddFieldsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineAddFieldsProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddFieldsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddFieldsProcessor observabilityPipelineAddFieldsProcessor = + (ObservabilityPipelineAddFieldsProcessor) o; + return Objects.equals(this.fields, observabilityPipelineAddFieldsProcessor.fields) + && Objects.equals(this.id, observabilityPipelineAddFieldsProcessor.id) + && Objects.equals(this.include, observabilityPipelineAddFieldsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineAddFieldsProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineAddFieldsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddFieldsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, id, include, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddFieldsProcessor {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java new file mode 100644 index 00000000000..22e3da400a6 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be add_fields. */ +@JsonSerialize( + using = + ObservabilityPipelineAddFieldsProcessorType + .ObservabilityPipelineAddFieldsProcessorTypeSerializer.class) +public class ObservabilityPipelineAddFieldsProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("add_fields")); + + public static final ObservabilityPipelineAddFieldsProcessorType ADD_FIELDS = + new ObservabilityPipelineAddFieldsProcessorType("add_fields"); + + ObservabilityPipelineAddFieldsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAddFieldsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAddFieldsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAddFieldsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAddFieldsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAddFieldsProcessorType fromValue(String value) { + return new ObservabilityPipelineAddFieldsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java new file mode 100644 index 00000000000..0be81362fe3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java @@ -0,0 +1,239 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Specifies the pipeline's configuration, including its sources, processors, and destinations. */ +@JsonPropertyOrder({ + ObservabilityPipelineConfig.JSON_PROPERTY_DESTINATIONS, + ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSORS, + ObservabilityPipelineConfig.JSON_PROPERTY_SOURCES +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineConfig { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESTINATIONS = "destinations"; + private List destinations = new ArrayList<>(); + + public static final String JSON_PROPERTY_PROCESSORS = "processors"; + private List processors = new ArrayList<>(); + + public static final String JSON_PROPERTY_SOURCES = "sources"; + private List sources = new ArrayList<>(); + + public ObservabilityPipelineConfig() {} + + @JsonCreator + public ObservabilityPipelineConfig( + @JsonProperty(required = true, value = JSON_PROPERTY_DESTINATIONS) + List destinations, + @JsonProperty(required = true, value = JSON_PROPERTY_PROCESSORS) + List processors, + @JsonProperty(required = true, value = JSON_PROPERTY_SOURCES) + List sources) { + this.destinations = destinations; + this.processors = processors; + this.sources = sources; + } + + public ObservabilityPipelineConfig destinations( + List destinations) { + this.destinations = destinations; + for (ObservabilityPipelineConfigDestinationItem item : destinations) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineConfig addDestinationsItem( + ObservabilityPipelineConfigDestinationItem destinationsItem) { + this.destinations.add(destinationsItem); + this.unparsed |= destinationsItem.unparsed; + return this; + } + + /** + * A list of destination components where processed logs are sent. + * + * @return destinations + */ + @JsonProperty(JSON_PROPERTY_DESTINATIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getDestinations() { + return destinations; + } + + public void setDestinations(List destinations) { + this.destinations = destinations; + } + + public ObservabilityPipelineConfig processors( + List processors) { + this.processors = processors; + for (ObservabilityPipelineConfigProcessorItem item : processors) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineConfig addProcessorsItem( + ObservabilityPipelineConfigProcessorItem processorsItem) { + this.processors.add(processorsItem); + this.unparsed |= processorsItem.unparsed; + return this; + } + + /** + * A list of processors that transform or enrich log data. + * + * @return processors + */ + @JsonProperty(JSON_PROPERTY_PROCESSORS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getProcessors() { + return processors; + } + + public void setProcessors(List processors) { + this.processors = processors; + } + + public ObservabilityPipelineConfig sources(List sources) { + this.sources = sources; + for (ObservabilityPipelineConfigSourceItem item : sources) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineConfig addSourcesItem( + ObservabilityPipelineConfigSourceItem sourcesItem) { + this.sources.add(sourcesItem); + this.unparsed |= sourcesItem.unparsed; + return this; + } + + /** + * A list of configured data sources for the pipeline. + * + * @return sources + */ + @JsonProperty(JSON_PROPERTY_SOURCES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getSources() { + return sources; + } + + public void setSources(List sources) { + this.sources = sources; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineConfig + */ + @JsonAnySetter + public ObservabilityPipelineConfig putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineConfig object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineConfig observabilityPipelineConfig = (ObservabilityPipelineConfig) o; + return Objects.equals(this.destinations, observabilityPipelineConfig.destinations) + && Objects.equals(this.processors, observabilityPipelineConfig.processors) + && Objects.equals(this.sources, observabilityPipelineConfig.sources) + && Objects.equals( + this.additionalProperties, observabilityPipelineConfig.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(destinations, processors, sources, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineConfig {\n"); + sb.append(" destinations: ").append(toIndentedString(destinations)).append("\n"); + sb.append(" processors: ").append(toIndentedString(processors)).append("\n"); + sb.append(" sources: ").append(toIndentedString(sources)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java new file mode 100644 index 00000000000..f6d74a9f38a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -0,0 +1,240 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineConfigDestinationItem + .ObservabilityPipelineConfigDestinationItemDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineConfigDestinationItem + .ObservabilityPipelineConfigDestinationItemSerializer.class) +public class ObservabilityPipelineConfigDestinationItem extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineConfigDestinationItem.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineConfigDestinationItemSerializer + extends StdSerializer { + public ObservabilityPipelineConfigDestinationItemSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineConfigDestinationItemSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineConfigDestinationItem value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineConfigDestinationItemDeserializer + extends StdDeserializer { + public ObservabilityPipelineConfigDestinationItemDeserializer() { + this(ObservabilityPipelineConfigDestinationItem.class); + } + + public ObservabilityPipelineConfigDestinationItemDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineConfigDestinationItem deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineDatadogLogsDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class) + || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineDatadogLogsDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'", + e); + } + + ObservabilityPipelineConfigDestinationItem ret = + new ObservabilityPipelineConfigDestinationItem(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineConfigDestinationItem getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "ObservabilityPipelineConfigDestinationItem cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineConfigDestinationItem() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLogsDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineDatadogLogsDestination", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineConfigDestinationItem.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineDatadogLogsDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineDatadogLogsDestination + * + * @return The actual instance (ObservabilityPipelineDatadogLogsDestination) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDatadogLogsDestination`. If the actual + * instance is not `ObservabilityPipelineDatadogLogsDestination`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineDatadogLogsDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogLogsDestination` + */ + public ObservabilityPipelineDatadogLogsDestination + getObservabilityPipelineDatadogLogsDestination() throws ClassCastException { + return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java new file mode 100644 index 00000000000..aa99b3edb23 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -0,0 +1,626 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineConfigProcessorItem + .ObservabilityPipelineConfigProcessorItemDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineConfigProcessorItem.ObservabilityPipelineConfigProcessorItemSerializer + .class) +public class ObservabilityPipelineConfigProcessorItem extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineConfigProcessorItem.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineConfigProcessorItemSerializer + extends StdSerializer { + public ObservabilityPipelineConfigProcessorItemSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineConfigProcessorItemSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineConfigProcessorItem value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineConfigProcessorItemDeserializer + extends StdDeserializer { + public ObservabilityPipelineConfigProcessorItemDeserializer() { + this(ObservabilityPipelineConfigProcessorItem.class); + } + + public ObservabilityPipelineConfigProcessorItemDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineConfigProcessorItem deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineFilterProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class) + || ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class) + || ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + || ObservabilityPipelineFilterProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineFilterProcessor.class.equals(Integer.class) + || ObservabilityPipelineFilterProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineFilterProcessor.class.equals(Float.class) + || ObservabilityPipelineFilterProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineFilterProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineFilterProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFilterProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineFilterProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFilterProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineFilterProcessor'", + e); + } + + // deserialize ObservabilityPipelineParseJSONProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineParseJSONProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineParseJSONProcessor.class.equals(Float.class) + || ObservabilityPipelineParseJSONProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineParseJSONProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineParseJSONProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineParseJSONProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineParseJSONProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseJSONProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineParseJSONProcessor'", + e); + } + + // deserialize ObservabilityPipelineQuotaProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + || ObservabilityPipelineQuotaProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineQuotaProcessor.class.equals(Integer.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineQuotaProcessor.class.equals(Float.class) + || ObservabilityPipelineQuotaProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineQuotaProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineQuotaProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineQuotaProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineQuotaProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineQuotaProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineQuotaProcessor'", + e); + } + + // deserialize ObservabilityPipelineAddFieldsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAddFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddFieldsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAddFieldsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAddFieldsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddFieldsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAddFieldsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddFieldsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAddFieldsProcessor'", + e); + } + + // deserialize ObservabilityPipelineRemoveFieldsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineRemoveFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRemoveFieldsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineRemoveFieldsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRemoveFieldsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineRemoveFieldsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRemoveFieldsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineRemoveFieldsProcessor'", + e); + } + + // deserialize ObservabilityPipelineRenameFieldsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Integer.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineRenameFieldsProcessor.class.equals(Float.class) + || ObservabilityPipelineRenameFieldsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineRenameFieldsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineRenameFieldsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRenameFieldsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineRenameFieldsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineRenameFieldsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineRenameFieldsProcessor'", + e); + } + + ObservabilityPipelineConfigProcessorItem ret = new ObservabilityPipelineConfigProcessorItem(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineConfigProcessorItem getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "ObservabilityPipelineConfigProcessorItem cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineConfigProcessorItem() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineFilterProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseJSONProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineQuotaProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddFieldsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRemoveFieldsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameFieldsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineFilterProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseJSONProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineQuotaProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddFieldsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineRemoveFieldsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineRenameFieldsProcessor", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineConfigProcessorItem.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, + * ObservabilityPipelineRenameFieldsProcessor + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineFilterProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineParseJSONProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineQuotaProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAddFieldsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineRemoveFieldsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineRenameFieldsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineFilterProcessor," + + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," + + " ObservabilityPipelineAddFieldsProcessor," + + " ObservabilityPipelineRemoveFieldsProcessor," + + " ObservabilityPipelineRenameFieldsProcessor"); + } + + /** + * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, + * ObservabilityPipelineRenameFieldsProcessor + * + * @return The actual instance (ObservabilityPipelineFilterProcessor, + * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, + * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, + * ObservabilityPipelineRenameFieldsProcessor) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineFilterProcessor`. If the actual instance is + * not `ObservabilityPipelineFilterProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFilterProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFilterProcessor` + */ + public ObservabilityPipelineFilterProcessor getObservabilityPipelineFilterProcessor() + throws ClassCastException { + return (ObservabilityPipelineFilterProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineParseJSONProcessor`. If the actual instance is + * not `ObservabilityPipelineParseJSONProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineParseJSONProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseJSONProcessor` + */ + public ObservabilityPipelineParseJSONProcessor getObservabilityPipelineParseJSONProcessor() + throws ClassCastException { + return (ObservabilityPipelineParseJSONProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineQuotaProcessor`. If the actual instance is not + * `ObservabilityPipelineQuotaProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineQuotaProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineQuotaProcessor` + */ + public ObservabilityPipelineQuotaProcessor getObservabilityPipelineQuotaProcessor() + throws ClassCastException { + return (ObservabilityPipelineQuotaProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAddFieldsProcessor`. If the actual instance is + * not `ObservabilityPipelineAddFieldsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAddFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddFieldsProcessor` + */ + public ObservabilityPipelineAddFieldsProcessor getObservabilityPipelineAddFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineAddFieldsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineRemoveFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRemoveFieldsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineRemoveFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRemoveFieldsProcessor` + */ + public ObservabilityPipelineRemoveFieldsProcessor getObservabilityPipelineRemoveFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRemoveFieldsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineRenameFieldsProcessor`. If the actual instance + * is not `ObservabilityPipelineRenameFieldsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineRenameFieldsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRenameFieldsProcessor` + */ + public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRenameFieldsProcessor() + throws ClassCastException { + return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java new file mode 100644 index 00000000000..68261e72ac1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java @@ -0,0 +1,309 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineConfigSourceItem.ObservabilityPipelineConfigSourceItemDeserializer + .class) +@JsonSerialize( + using = + ObservabilityPipelineConfigSourceItem.ObservabilityPipelineConfigSourceItemSerializer.class) +public class ObservabilityPipelineConfigSourceItem extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineConfigSourceItem.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineConfigSourceItemSerializer + extends StdSerializer { + public ObservabilityPipelineConfigSourceItemSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineConfigSourceItemSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineConfigSourceItem value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineConfigSourceItemDeserializer + extends StdDeserializer { + public ObservabilityPipelineConfigSourceItemDeserializer() { + this(ObservabilityPipelineConfigSourceItem.class); + } + + public ObservabilityPipelineConfigSourceItemDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineConfigSourceItem deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineKafkaSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineKafkaSource.class.equals(Integer.class) + || ObservabilityPipelineKafkaSource.class.equals(Long.class) + || ObservabilityPipelineKafkaSource.class.equals(Float.class) + || ObservabilityPipelineKafkaSource.class.equals(Double.class) + || ObservabilityPipelineKafkaSource.class.equals(Boolean.class) + || ObservabilityPipelineKafkaSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineKafkaSource.class.equals(Integer.class) + || ObservabilityPipelineKafkaSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineKafkaSource.class.equals(Float.class) + || ObservabilityPipelineKafkaSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineKafkaSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineKafkaSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineKafkaSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineKafkaSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineKafkaSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, "Input data does not match schema 'ObservabilityPipelineKafkaSource'", e); + } + + // deserialize ObservabilityPipelineDatadogAgentSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineDatadogAgentSource.class.equals(Integer.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Long.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Float.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Double.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Boolean.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineDatadogAgentSource.class.equals(Integer.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineDatadogAgentSource.class.equals(Float.class) + || ObservabilityPipelineDatadogAgentSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineDatadogAgentSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineDatadogAgentSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineDatadogAgentSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineDatadogAgentSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineDatadogAgentSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineDatadogAgentSource'", + e); + } + + ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineConfigSourceItem getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "ObservabilityPipelineConfigSourceItem cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineConfigSourceItem() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineKafkaSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineKafkaSource", new GenericType() {}); + schemas.put( + "ObservabilityPipelineDatadogAgentSource", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineConfigSourceItem.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineKafkaSource, + * ObservabilityPipelineDatadogAgentSource + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineKafkaSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineDatadogAgentSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineKafkaSource," + + " ObservabilityPipelineDatadogAgentSource"); + } + + /** + * Get the actual instance, which can be the following: ObservabilityPipelineKafkaSource, + * ObservabilityPipelineDatadogAgentSource + * + * @return The actual instance (ObservabilityPipelineKafkaSource, + * ObservabilityPipelineDatadogAgentSource) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineKafkaSource`. If the actual instance is not + * `ObservabilityPipelineKafkaSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineKafkaSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineKafkaSource` + */ + public ObservabilityPipelineKafkaSource getObservabilityPipelineKafkaSource() + throws ClassCastException { + return (ObservabilityPipelineKafkaSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDatadogAgentSource`. If the actual instance is + * not `ObservabilityPipelineDatadogAgentSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineDatadogAgentSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDatadogAgentSource` + */ + public ObservabilityPipelineDatadogAgentSource getObservabilityPipelineDatadogAgentSource() + throws ClassCastException { + return (ObservabilityPipelineDatadogAgentSource) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java new file mode 100644 index 00000000000..8f865cc65d3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java @@ -0,0 +1,148 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Top-level schema representing a pipeline. */ +@JsonPropertyOrder({ObservabilityPipelineCreateRequest.JSON_PROPERTY_DATA}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineCreateRequest { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DATA = "data"; + private ObservabilityPipelineCreateRequestData data; + + public ObservabilityPipelineCreateRequest() {} + + @JsonCreator + public ObservabilityPipelineCreateRequest( + @JsonProperty(required = true, value = JSON_PROPERTY_DATA) + ObservabilityPipelineCreateRequestData data) { + this.data = data; + this.unparsed |= data.unparsed; + } + + public ObservabilityPipelineCreateRequest data(ObservabilityPipelineCreateRequestData data) { + this.data = data; + this.unparsed |= data.unparsed; + return this; + } + + /** + * Contains the pipeline’s ID, type, and configuration attributes. + * + * @return data + */ + @JsonProperty(JSON_PROPERTY_DATA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineCreateRequestData getData() { + return data; + } + + public void setData(ObservabilityPipelineCreateRequestData data) { + this.data = data; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineCreateRequest + */ + @JsonAnySetter + public ObservabilityPipelineCreateRequest putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineCreateRequest object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineCreateRequest observabilityPipelineCreateRequest = + (ObservabilityPipelineCreateRequest) o; + return Objects.equals(this.data, observabilityPipelineCreateRequest.data) + && Objects.equals( + this.additionalProperties, observabilityPipelineCreateRequest.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(data, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineCreateRequest {\n"); + sb.append(" data: ").append(toIndentedString(data)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java new file mode 100644 index 00000000000..a0145ea17d1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java @@ -0,0 +1,180 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Contains the pipeline’s ID, type, and configuration attributes. */ +@JsonPropertyOrder({ + ObservabilityPipelineCreateRequestData.JSON_PROPERTY_ATTRIBUTES, + ObservabilityPipelineCreateRequestData.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineCreateRequestData { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ATTRIBUTES = "attributes"; + private ObservabilityPipelineDataAttributes attributes; + + public static final String JSON_PROPERTY_TYPE = "type"; + private String type = "pipelines"; + + public ObservabilityPipelineCreateRequestData() {} + + @JsonCreator + public ObservabilityPipelineCreateRequestData( + @JsonProperty(required = true, value = JSON_PROPERTY_ATTRIBUTES) + ObservabilityPipelineDataAttributes attributes, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) String type) { + this.attributes = attributes; + this.unparsed |= attributes.unparsed; + this.type = type; + } + + public ObservabilityPipelineCreateRequestData attributes( + ObservabilityPipelineDataAttributes attributes) { + this.attributes = attributes; + this.unparsed |= attributes.unparsed; + return this; + } + + /** + * Defines the pipeline’s name and its components (sources, processors, and destinations). + * + * @return attributes + */ + @JsonProperty(JSON_PROPERTY_ATTRIBUTES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDataAttributes getAttributes() { + return attributes; + } + + public void setAttributes(ObservabilityPipelineDataAttributes attributes) { + this.attributes = attributes; + } + + public ObservabilityPipelineCreateRequestData type(String type) { + this.type = type; + return this; + } + + /** + * The resource type identifier. For pipeline resources, this should always be set to + * pipelines. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineCreateRequestData + */ + @JsonAnySetter + public ObservabilityPipelineCreateRequestData putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineCreateRequestData object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineCreateRequestData observabilityPipelineCreateRequestData = + (ObservabilityPipelineCreateRequestData) o; + return Objects.equals(this.attributes, observabilityPipelineCreateRequestData.attributes) + && Objects.equals(this.type, observabilityPipelineCreateRequestData.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineCreateRequestData.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(attributes, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineCreateRequestData {\n"); + sb.append(" attributes: ").append(toIndentedString(attributes)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineData.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineData.java new file mode 100644 index 00000000000..9452e8f062d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineData.java @@ -0,0 +1,206 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Contains the pipeline’s ID, type, and configuration attributes. */ +@JsonPropertyOrder({ + ObservabilityPipelineData.JSON_PROPERTY_ATTRIBUTES, + ObservabilityPipelineData.JSON_PROPERTY_ID, + ObservabilityPipelineData.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineData { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ATTRIBUTES = "attributes"; + private ObservabilityPipelineDataAttributes attributes; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TYPE = "type"; + private String type = "pipelines"; + + public ObservabilityPipelineData() {} + + @JsonCreator + public ObservabilityPipelineData( + @JsonProperty(required = true, value = JSON_PROPERTY_ATTRIBUTES) + ObservabilityPipelineDataAttributes attributes, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) String type) { + this.attributes = attributes; + this.unparsed |= attributes.unparsed; + this.id = id; + this.type = type; + } + + public ObservabilityPipelineData attributes(ObservabilityPipelineDataAttributes attributes) { + this.attributes = attributes; + this.unparsed |= attributes.unparsed; + return this; + } + + /** + * Defines the pipeline’s name and its components (sources, processors, and destinations). + * + * @return attributes + */ + @JsonProperty(JSON_PROPERTY_ATTRIBUTES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDataAttributes getAttributes() { + return attributes; + } + + public void setAttributes(ObservabilityPipelineDataAttributes attributes) { + this.attributes = attributes; + } + + public ObservabilityPipelineData id(String id) { + this.id = id; + return this; + } + + /** + * Unique identifier for the pipeline. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineData type(String type) { + this.type = type; + return this; + } + + /** + * The resource type identifier. For pipeline resources, this should always be set to + * pipelines. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineData + */ + @JsonAnySetter + public ObservabilityPipelineData putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineData object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineData observabilityPipelineData = (ObservabilityPipelineData) o; + return Objects.equals(this.attributes, observabilityPipelineData.attributes) + && Objects.equals(this.id, observabilityPipelineData.id) + && Objects.equals(this.type, observabilityPipelineData.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineData.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(attributes, id, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineData {\n"); + sb.append(" attributes: ").append(toIndentedString(attributes)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDataAttributes.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDataAttributes.java new file mode 100644 index 00000000000..4fd4d0b4ee3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDataAttributes.java @@ -0,0 +1,178 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines the pipeline’s name and its components (sources, processors, and destinations). */ +@JsonPropertyOrder({ + ObservabilityPipelineDataAttributes.JSON_PROPERTY_CONFIG, + ObservabilityPipelineDataAttributes.JSON_PROPERTY_NAME +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineDataAttributes { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CONFIG = "config"; + private ObservabilityPipelineConfig config; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public ObservabilityPipelineDataAttributes() {} + + @JsonCreator + public ObservabilityPipelineDataAttributes( + @JsonProperty(required = true, value = JSON_PROPERTY_CONFIG) + ObservabilityPipelineConfig config, + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name) { + this.config = config; + this.unparsed |= config.unparsed; + this.name = name; + } + + public ObservabilityPipelineDataAttributes config(ObservabilityPipelineConfig config) { + this.config = config; + this.unparsed |= config.unparsed; + return this; + } + + /** + * Specifies the pipeline's configuration, including its sources, processors, and destinations. + * + * @return config + */ + @JsonProperty(JSON_PROPERTY_CONFIG) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineConfig getConfig() { + return config; + } + + public void setConfig(ObservabilityPipelineConfig config) { + this.config = config; + } + + public ObservabilityPipelineDataAttributes name(String name) { + this.name = name; + return this; + } + + /** + * Name of the pipeline. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineDataAttributes + */ + @JsonAnySetter + public ObservabilityPipelineDataAttributes putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineDataAttributes object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineDataAttributes observabilityPipelineDataAttributes = + (ObservabilityPipelineDataAttributes) o; + return Objects.equals(this.config, observabilityPipelineDataAttributes.config) + && Objects.equals(this.name, observabilityPipelineDataAttributes.name) + && Objects.equals( + this.additionalProperties, observabilityPipelineDataAttributes.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(config, name, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineDataAttributes {\n"); + sb.append(" config: ").append(toIndentedString(config)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java new file mode 100644 index 00000000000..ba5b215bf5e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java @@ -0,0 +1,213 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The datadog_agent source collects logs from the Datadog Agent. */ +@JsonPropertyOrder({ + ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_ID, + ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_TLS, + ObservabilityPipelineDatadogAgentSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineDatadogAgentSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineDatadogAgentSourceType type = + ObservabilityPipelineDatadogAgentSourceType.DATADOG_AGENT; + + public ObservabilityPipelineDatadogAgentSource() {} + + @JsonCreator + public ObservabilityPipelineDatadogAgentSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineDatadogAgentSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineDatadogAgentSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineDatadogAgentSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineDatadogAgentSource type( + ObservabilityPipelineDatadogAgentSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be datadog_agent. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDatadogAgentSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineDatadogAgentSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineDatadogAgentSource + */ + @JsonAnySetter + public ObservabilityPipelineDatadogAgentSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineDatadogAgentSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineDatadogAgentSource observabilityPipelineDatadogAgentSource = + (ObservabilityPipelineDatadogAgentSource) o; + return Objects.equals(this.id, observabilityPipelineDatadogAgentSource.id) + && Objects.equals(this.tls, observabilityPipelineDatadogAgentSource.tls) + && Objects.equals(this.type, observabilityPipelineDatadogAgentSource.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineDatadogAgentSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineDatadogAgentSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSourceType.java new file mode 100644 index 00000000000..cae83310309 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be datadog_agent. */ +@JsonSerialize( + using = + ObservabilityPipelineDatadogAgentSourceType + .ObservabilityPipelineDatadogAgentSourceTypeSerializer.class) +public class ObservabilityPipelineDatadogAgentSourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("datadog_agent")); + + public static final ObservabilityPipelineDatadogAgentSourceType DATADOG_AGENT = + new ObservabilityPipelineDatadogAgentSourceType("datadog_agent"); + + ObservabilityPipelineDatadogAgentSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDatadogAgentSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineDatadogAgentSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineDatadogAgentSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDatadogAgentSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDatadogAgentSourceType fromValue(String value) { + return new ObservabilityPipelineDatadogAgentSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java new file mode 100644 index 00000000000..c408c2da16f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestination.java @@ -0,0 +1,220 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The datadog_logs destination forwards logs to Datadog Log Management. */ +@JsonPropertyOrder({ + ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_ID, + ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineDatadogLogsDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineDatadogLogsDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineDatadogLogsDestinationType type = + ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS; + + public ObservabilityPipelineDatadogLogsDestination() {} + + @JsonCreator + public ObservabilityPipelineDatadogLogsDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineDatadogLogsDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineDatadogLogsDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineDatadogLogsDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineDatadogLogsDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineDatadogLogsDestination type( + ObservabilityPipelineDatadogLogsDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be datadog_logs. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDatadogLogsDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineDatadogLogsDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineDatadogLogsDestination + */ + @JsonAnySetter + public ObservabilityPipelineDatadogLogsDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineDatadogLogsDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineDatadogLogsDestination observabilityPipelineDatadogLogsDestination = + (ObservabilityPipelineDatadogLogsDestination) o; + return Objects.equals(this.id, observabilityPipelineDatadogLogsDestination.id) + && Objects.equals(this.inputs, observabilityPipelineDatadogLogsDestination.inputs) + && Objects.equals(this.type, observabilityPipelineDatadogLogsDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineDatadogLogsDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineDatadogLogsDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestinationType.java new file mode 100644 index 00000000000..bdb48877812 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogLogsDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be datadog_logs. */ +@JsonSerialize( + using = + ObservabilityPipelineDatadogLogsDestinationType + .ObservabilityPipelineDatadogLogsDestinationTypeSerializer.class) +public class ObservabilityPipelineDatadogLogsDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("datadog_logs")); + + public static final ObservabilityPipelineDatadogLogsDestinationType DATADOG_LOGS = + new ObservabilityPipelineDatadogLogsDestinationType("datadog_logs"); + + ObservabilityPipelineDatadogLogsDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDatadogLogsDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineDatadogLogsDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineDatadogLogsDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDatadogLogsDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDatadogLogsDestinationType fromValue(String value) { + return new ObservabilityPipelineDatadogLogsDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFieldValue.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFieldValue.java new file mode 100644 index 00000000000..b26bf728130 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFieldValue.java @@ -0,0 +1,175 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Represents a static key-value pair used in various processors. */ +@JsonPropertyOrder({ + ObservabilityPipelineFieldValue.JSON_PROPERTY_NAME, + ObservabilityPipelineFieldValue.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineFieldValue { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private String value; + + public ObservabilityPipelineFieldValue() {} + + @JsonCreator + public ObservabilityPipelineFieldValue( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { + this.name = name; + this.value = value; + } + + public ObservabilityPipelineFieldValue name(String name) { + this.name = name; + return this; + } + + /** + * The field name. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineFieldValue value(String value) { + this.value = value; + return this; + } + + /** + * The field value. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineFieldValue + */ + @JsonAnySetter + public ObservabilityPipelineFieldValue putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineFieldValue object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineFieldValue observabilityPipelineFieldValue = + (ObservabilityPipelineFieldValue) o; + return Objects.equals(this.name, observabilityPipelineFieldValue.name) + && Objects.equals(this.value, observabilityPipelineFieldValue.value) + && Objects.equals( + this.additionalProperties, observabilityPipelineFieldValue.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineFieldValue {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java new file mode 100644 index 00000000000..8eeee9526e2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessor.java @@ -0,0 +1,250 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The filter processor allows conditional processing of logs based on a Datadog search + * query. Logs that match the include query are passed through; others are discarded. + */ +@JsonPropertyOrder({ + ObservabilityPipelineFilterProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineFilterProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineFilterProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineFilterProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineFilterProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineFilterProcessorType type = + ObservabilityPipelineFilterProcessorType.FILTER; + + public ObservabilityPipelineFilterProcessor() {} + + @JsonCreator + public ObservabilityPipelineFilterProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineFilterProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineFilterProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineFilterProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs should pass through the filter. Logs that + * match this query continue to downstream components; others are dropped. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineFilterProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineFilterProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineFilterProcessor type(ObservabilityPipelineFilterProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be filter. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineFilterProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineFilterProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineFilterProcessor + */ + @JsonAnySetter + public ObservabilityPipelineFilterProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineFilterProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineFilterProcessor observabilityPipelineFilterProcessor = + (ObservabilityPipelineFilterProcessor) o; + return Objects.equals(this.id, observabilityPipelineFilterProcessor.id) + && Objects.equals(this.include, observabilityPipelineFilterProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineFilterProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineFilterProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineFilterProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineFilterProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessorType.java new file mode 100644 index 00000000000..24909c21649 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFilterProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be filter. */ +@JsonSerialize( + using = + ObservabilityPipelineFilterProcessorType.ObservabilityPipelineFilterProcessorTypeSerializer + .class) +public class ObservabilityPipelineFilterProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("filter")); + + public static final ObservabilityPipelineFilterProcessorType FILTER = + new ObservabilityPipelineFilterProcessorType("filter"); + + ObservabilityPipelineFilterProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineFilterProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineFilterProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineFilterProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineFilterProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineFilterProcessorType fromValue(String value) { + return new ObservabilityPipelineFilterProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java new file mode 100644 index 00000000000..6cedbd4e48b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java @@ -0,0 +1,346 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The kafka source ingests data from Apache Kafka topics. */ +@JsonPropertyOrder({ + ObservabilityPipelineKafkaSource.JSON_PROPERTY_GROUP_ID, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_ID, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_LIBRDKAFKA_OPTIONS, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_SASL, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_TLS, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_TOPICS, + ObservabilityPipelineKafkaSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GROUP_ID = "group_id"; + private String groupId; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_LIBRDKAFKA_OPTIONS = "librdkafka_options"; + private List librdkafkaOptions = null; + + public static final String JSON_PROPERTY_SASL = "sasl"; + private ObservabilityPipelineKafkaSourceSasl sasl; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TOPICS = "topics"; + private List topics = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineKafkaSourceType type = ObservabilityPipelineKafkaSourceType.KAFKA; + + public ObservabilityPipelineKafkaSource() {} + + @JsonCreator + public ObservabilityPipelineKafkaSource( + @JsonProperty(required = true, value = JSON_PROPERTY_GROUP_ID) String groupId, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TOPICS) List topics, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineKafkaSourceType type) { + this.groupId = groupId; + this.id = id; + this.topics = topics; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineKafkaSource groupId(String groupId) { + this.groupId = groupId; + return this; + } + + /** + * Consumer group ID used by the Kafka client. + * + * @return groupId + */ + @JsonProperty(JSON_PROPERTY_GROUP_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getGroupId() { + return groupId; + } + + public void setGroupId(String groupId) { + this.groupId = groupId; + } + + public ObservabilityPipelineKafkaSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineKafkaSource librdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + for (ObservabilityPipelineKafkaSourceLibrdkafkaOption item : librdkafkaOptions) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineKafkaSource addLibrdkafkaOptionsItem( + ObservabilityPipelineKafkaSourceLibrdkafkaOption librdkafkaOptionsItem) { + if (this.librdkafkaOptions == null) { + this.librdkafkaOptions = new ArrayList<>(); + } + this.librdkafkaOptions.add(librdkafkaOptionsItem); + this.unparsed |= librdkafkaOptionsItem.unparsed; + return this; + } + + /** + * Optional list of advanced Kafka client configuration options, defined as key-value pairs. + * + * @return librdkafkaOptions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_LIBRDKAFKA_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getLibrdkafkaOptions() { + return librdkafkaOptions; + } + + public void setLibrdkafkaOptions( + List librdkafkaOptions) { + this.librdkafkaOptions = librdkafkaOptions; + } + + public ObservabilityPipelineKafkaSource sasl(ObservabilityPipelineKafkaSourceSasl sasl) { + this.sasl = sasl; + this.unparsed |= sasl.unparsed; + return this; + } + + /** + * Specifies the SASL mechanism for authenticating with a Kafka cluster. + * + * @return sasl + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SASL) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineKafkaSourceSasl getSasl() { + return sasl; + } + + public void setSasl(ObservabilityPipelineKafkaSourceSasl sasl) { + this.sasl = sasl; + } + + public ObservabilityPipelineKafkaSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineKafkaSource topics(List topics) { + this.topics = topics; + return this; + } + + public ObservabilityPipelineKafkaSource addTopicsItem(String topicsItem) { + this.topics.add(topicsItem); + return this; + } + + /** + * A list of Kafka topic names to subscribe to. The source ingests messages from each topic + * specified. + * + * @return topics + */ + @JsonProperty(JSON_PROPERTY_TOPICS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getTopics() { + return topics; + } + + public void setTopics(List topics) { + this.topics = topics; + } + + public ObservabilityPipelineKafkaSource type(ObservabilityPipelineKafkaSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be kafka. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineKafkaSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineKafkaSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaSource + */ + @JsonAnySetter + public ObservabilityPipelineKafkaSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaSource observabilityPipelineKafkaSource = + (ObservabilityPipelineKafkaSource) o; + return Objects.equals(this.groupId, observabilityPipelineKafkaSource.groupId) + && Objects.equals(this.id, observabilityPipelineKafkaSource.id) + && Objects.equals( + this.librdkafkaOptions, observabilityPipelineKafkaSource.librdkafkaOptions) + && Objects.equals(this.sasl, observabilityPipelineKafkaSource.sasl) + && Objects.equals(this.tls, observabilityPipelineKafkaSource.tls) + && Objects.equals(this.topics, observabilityPipelineKafkaSource.topics) + && Objects.equals(this.type, observabilityPipelineKafkaSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineKafkaSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + groupId, id, librdkafkaOptions, sasl, tls, topics, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaSource {\n"); + sb.append(" groupId: ").append(toIndentedString(groupId)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" librdkafkaOptions: ").append(toIndentedString(librdkafkaOptions)).append("\n"); + sb.append(" sasl: ").append(toIndentedString(sasl)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" topics: ").append(toIndentedString(topics)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java new file mode 100644 index 00000000000..e7211139e49 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceLibrdkafkaOption.java @@ -0,0 +1,181 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Represents a key-value pair used to configure low-level librdkafka client options + * for Kafka sources, such as timeouts, buffer sizes, and security settings. + */ +@JsonPropertyOrder({ + ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_NAME, + ObservabilityPipelineKafkaSourceLibrdkafkaOption.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaSourceLibrdkafkaOption { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private String value; + + public ObservabilityPipelineKafkaSourceLibrdkafkaOption() {} + + @JsonCreator + public ObservabilityPipelineKafkaSourceLibrdkafkaOption( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { + this.name = name; + this.value = value; + } + + public ObservabilityPipelineKafkaSourceLibrdkafkaOption name(String name) { + this.name = name; + return this; + } + + /** + * The name of the librdkafka configuration option to set. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineKafkaSourceLibrdkafkaOption value(String value) { + this.value = value; + return this; + } + + /** + * The value assigned to the specified librdkafka configuration option. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaSourceLibrdkafkaOption + */ + @JsonAnySetter + public ObservabilityPipelineKafkaSourceLibrdkafkaOption putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaSourceLibrdkafkaOption object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaSourceLibrdkafkaOption + observabilityPipelineKafkaSourceLibrdkafkaOption = + (ObservabilityPipelineKafkaSourceLibrdkafkaOption) o; + return Objects.equals(this.name, observabilityPipelineKafkaSourceLibrdkafkaOption.name) + && Objects.equals(this.value, observabilityPipelineKafkaSourceLibrdkafkaOption.value) + && Objects.equals( + this.additionalProperties, + observabilityPipelineKafkaSourceLibrdkafkaOption.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaSourceLibrdkafkaOption {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java new file mode 100644 index 00000000000..0475f35416b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceSasl.java @@ -0,0 +1,142 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Specifies the SASL mechanism for authenticating with a Kafka cluster. */ +@JsonPropertyOrder({ObservabilityPipelineKafkaSourceSasl.JSON_PROPERTY_MECHANISM}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineKafkaSourceSasl { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_MECHANISM = "mechanism"; + private ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism; + + public ObservabilityPipelineKafkaSourceSasl mechanism( + ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + this.mechanism = mechanism; + this.unparsed |= !mechanism.isValid(); + return this; + } + + /** + * SASL mechanism used for Kafka authentication. + * + * @return mechanism + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_MECHANISM) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelinePipelineKafkaSourceSaslMechanism getMechanism() { + return mechanism; + } + + public void setMechanism(ObservabilityPipelinePipelineKafkaSourceSaslMechanism mechanism) { + if (!mechanism.isValid()) { + this.unparsed = true; + } + this.mechanism = mechanism; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineKafkaSourceSasl + */ + @JsonAnySetter + public ObservabilityPipelineKafkaSourceSasl putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineKafkaSourceSasl object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineKafkaSourceSasl observabilityPipelineKafkaSourceSasl = + (ObservabilityPipelineKafkaSourceSasl) o; + return Objects.equals(this.mechanism, observabilityPipelineKafkaSourceSasl.mechanism) + && Objects.equals( + this.additionalProperties, observabilityPipelineKafkaSourceSasl.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(mechanism, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineKafkaSourceSasl {\n"); + sb.append(" mechanism: ").append(toIndentedString(mechanism)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceType.java new file mode 100644 index 00000000000..b52ccfb1faf --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSourceType.java @@ -0,0 +1,59 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be kafka. */ +@JsonSerialize( + using = + ObservabilityPipelineKafkaSourceType.ObservabilityPipelineKafkaSourceTypeSerializer.class) +public class ObservabilityPipelineKafkaSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("kafka")); + + public static final ObservabilityPipelineKafkaSourceType KAFKA = + new ObservabilityPipelineKafkaSourceType("kafka"); + + ObservabilityPipelineKafkaSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineKafkaSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineKafkaSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineKafkaSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineKafkaSourceType value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineKafkaSourceType fromValue(String value) { + return new ObservabilityPipelineKafkaSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java new file mode 100644 index 00000000000..b46a50b9ecf --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessor.java @@ -0,0 +1,279 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The parse_json processor extracts JSON from a specified field and flattens it into + * the event. This is useful when logs contain embedded JSON as a string. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_FIELD, + ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineParseJSONProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseJSONProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineParseJSONProcessorType type = + ObservabilityPipelineParseJSONProcessorType.PARSE_JSON; + + public ObservabilityPipelineParseJSONProcessor() {} + + @JsonCreator + public ObservabilityPipelineParseJSONProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineParseJSONProcessorType type) { + this.field = field; + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineParseJSONProcessor field(String field) { + this.field = field; + return this; + } + + /** + * The name of the log field that contains a JSON string. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineParseJSONProcessor id(String id) { + this.id = id; + return this; + } + + /** + * A unique identifier for this component. Used to reference this component in other parts of the + * pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineParseJSONProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineParseJSONProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineParseJSONProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineParseJSONProcessor type( + ObservabilityPipelineParseJSONProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be parse_json. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineParseJSONProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineParseJSONProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseJSONProcessor + */ + @JsonAnySetter + public ObservabilityPipelineParseJSONProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseJSONProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseJSONProcessor observabilityPipelineParseJsonProcessor = + (ObservabilityPipelineParseJSONProcessor) o; + return Objects.equals(this.field, observabilityPipelineParseJsonProcessor.field) + && Objects.equals(this.id, observabilityPipelineParseJsonProcessor.id) + && Objects.equals(this.include, observabilityPipelineParseJsonProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineParseJsonProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineParseJsonProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineParseJsonProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, id, include, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseJSONProcessor {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessorType.java new file mode 100644 index 00000000000..a7a9a31e0a0 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseJSONProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be parse_json. */ +@JsonSerialize( + using = + ObservabilityPipelineParseJSONProcessorType + .ObservabilityPipelineParseJSONProcessorTypeSerializer.class) +public class ObservabilityPipelineParseJSONProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("parse_json")); + + public static final ObservabilityPipelineParseJSONProcessorType PARSE_JSON = + new ObservabilityPipelineParseJSONProcessorType("parse_json"); + + ObservabilityPipelineParseJSONProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineParseJSONProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineParseJSONProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineParseJSONProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineParseJSONProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineParseJSONProcessorType fromValue(String value) { + return new ObservabilityPipelineParseJSONProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java new file mode 100644 index 00000000000..6ec67b62962 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelinePipelineKafkaSourceSaslMechanism.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** SASL mechanism used for Kafka authentication. */ +@JsonSerialize( + using = + ObservabilityPipelinePipelineKafkaSourceSaslMechanism + .ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer.class) +public class ObservabilityPipelinePipelineKafkaSourceSaslMechanism extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512")); + + public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism PLAIN = + new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("PLAIN"); + public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_256 = + new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256"); + public static final ObservabilityPipelinePipelineKafkaSourceSaslMechanism SCRAMNOT_SHANOT_512 = + new ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512"); + + ObservabilityPipelinePipelineKafkaSourceSaslMechanism(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer + extends StdSerializer { + public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelinePipelineKafkaSourceSaslMechanismSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelinePipelineKafkaSourceSaslMechanism value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelinePipelineKafkaSourceSaslMechanism fromValue(String value) { + return new ObservabilityPipelinePipelineKafkaSourceSaslMechanism(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java new file mode 100644 index 00000000000..d9101e2ccfc --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java @@ -0,0 +1,462 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The Quota Processor measures logging traffic for logs that match a specified filter. When the + * configured daily quota is met, the processor can drop or alert. + */ +@JsonPropertyOrder({ + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_DROP_EVENTS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_IGNORE_WHEN_MISSING_PARTITIONS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_LIMIT, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_NAME, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERRIDES, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_PARTITION_FIELDS, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineQuotaProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DROP_EVENTS = "drop_events"; + private Boolean dropEvents; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_IGNORE_WHEN_MISSING_PARTITIONS = + "ignore_when_missing_partitions"; + private Boolean ignoreWhenMissingPartitions; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_LIMIT = "limit"; + private ObservabilityPipelineQuotaProcessorLimit limit; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_OVERRIDES = "overrides"; + private List overrides = null; + + public static final String JSON_PROPERTY_PARTITION_FIELDS = "partition_fields"; + private List partitionFields = null; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineQuotaProcessorType type = + ObservabilityPipelineQuotaProcessorType.QUOTA; + + public ObservabilityPipelineQuotaProcessor() {} + + @JsonCreator + public ObservabilityPipelineQuotaProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_DROP_EVENTS) Boolean dropEvents, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_LIMIT) + ObservabilityPipelineQuotaProcessorLimit limit, + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineQuotaProcessorType type) { + this.dropEvents = dropEvents; + this.id = id; + this.include = include; + this.inputs = inputs; + this.limit = limit; + this.unparsed |= limit.unparsed; + this.name = name; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineQuotaProcessor dropEvents(Boolean dropEvents) { + this.dropEvents = dropEvents; + return this; + } + + /** + * If set to true, logs that matched the quota filter and sent after the quota has + * been met are dropped; only logs that did not match the filter query continue through the + * pipeline. + * + * @return dropEvents + */ + @JsonProperty(JSON_PROPERTY_DROP_EVENTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getDropEvents() { + return dropEvents; + } + + public void setDropEvents(Boolean dropEvents) { + this.dropEvents = dropEvents; + } + + public ObservabilityPipelineQuotaProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineQuotaProcessor ignoreWhenMissingPartitions( + Boolean ignoreWhenMissingPartitions) { + this.ignoreWhenMissingPartitions = ignoreWhenMissingPartitions; + return this; + } + + /** + * If true, the processor skips quota checks when partition fields are missing from + * the logs. + * + * @return ignoreWhenMissingPartitions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_IGNORE_WHEN_MISSING_PARTITIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getIgnoreWhenMissingPartitions() { + return ignoreWhenMissingPartitions; + } + + public void setIgnoreWhenMissingPartitions(Boolean ignoreWhenMissingPartitions) { + this.ignoreWhenMissingPartitions = ignoreWhenMissingPartitions; + } + + public ObservabilityPipelineQuotaProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineQuotaProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineQuotaProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineQuotaProcessor limit(ObservabilityPipelineQuotaProcessorLimit limit) { + this.limit = limit; + this.unparsed |= limit.unparsed; + return this; + } + + /** + * The maximum amount of data or number of events allowed before the quota is enforced. Can be + * specified in bytes or events. + * + * @return limit + */ + @JsonProperty(JSON_PROPERTY_LIMIT) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineQuotaProcessorLimit getLimit() { + return limit; + } + + public void setLimit(ObservabilityPipelineQuotaProcessorLimit limit) { + this.limit = limit; + } + + public ObservabilityPipelineQuotaProcessor name(String name) { + this.name = name; + return this; + } + + /** + * Name for identifying the processor. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineQuotaProcessor overrides( + List overrides) { + this.overrides = overrides; + for (ObservabilityPipelineQuotaProcessorOverride item : overrides) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineQuotaProcessor addOverridesItem( + ObservabilityPipelineQuotaProcessorOverride overridesItem) { + if (this.overrides == null) { + this.overrides = new ArrayList<>(); + } + this.overrides.add(overridesItem); + this.unparsed |= overridesItem.unparsed; + return this; + } + + /** + * A list of alternate quota rules that apply to specific sets of events, identified by matching + * field values. Each override can define a custom limit. + * + * @return overrides + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_OVERRIDES) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getOverrides() { + return overrides; + } + + public void setOverrides(List overrides) { + this.overrides = overrides; + } + + public ObservabilityPipelineQuotaProcessor partitionFields(List partitionFields) { + this.partitionFields = partitionFields; + return this; + } + + public ObservabilityPipelineQuotaProcessor addPartitionFieldsItem(String partitionFieldsItem) { + if (this.partitionFields == null) { + this.partitionFields = new ArrayList<>(); + } + this.partitionFields.add(partitionFieldsItem); + return this; + } + + /** + * A list of fields used to segment log traffic for quota enforcement. Quotas are tracked + * independently by unique combinations of these field values. + * + * @return partitionFields + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PARTITION_FIELDS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getPartitionFields() { + return partitionFields; + } + + public void setPartitionFields(List partitionFields) { + this.partitionFields = partitionFields; + } + + public ObservabilityPipelineQuotaProcessor type(ObservabilityPipelineQuotaProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be quota. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineQuotaProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineQuotaProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineQuotaProcessor + */ + @JsonAnySetter + public ObservabilityPipelineQuotaProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineQuotaProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineQuotaProcessor observabilityPipelineQuotaProcessor = + (ObservabilityPipelineQuotaProcessor) o; + return Objects.equals(this.dropEvents, observabilityPipelineQuotaProcessor.dropEvents) + && Objects.equals(this.id, observabilityPipelineQuotaProcessor.id) + && Objects.equals( + this.ignoreWhenMissingPartitions, + observabilityPipelineQuotaProcessor.ignoreWhenMissingPartitions) + && Objects.equals(this.include, observabilityPipelineQuotaProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineQuotaProcessor.inputs) + && Objects.equals(this.limit, observabilityPipelineQuotaProcessor.limit) + && Objects.equals(this.name, observabilityPipelineQuotaProcessor.name) + && Objects.equals(this.overrides, observabilityPipelineQuotaProcessor.overrides) + && Objects.equals(this.partitionFields, observabilityPipelineQuotaProcessor.partitionFields) + && Objects.equals(this.type, observabilityPipelineQuotaProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineQuotaProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + dropEvents, + id, + ignoreWhenMissingPartitions, + include, + inputs, + limit, + name, + overrides, + partitionFields, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineQuotaProcessor {\n"); + sb.append(" dropEvents: ").append(toIndentedString(dropEvents)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" ignoreWhenMissingPartitions: ") + .append(toIndentedString(ignoreWhenMissingPartitions)) + .append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" limit: ").append(toIndentedString(limit)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" overrides: ").append(toIndentedString(overrides)).append("\n"); + sb.append(" partitionFields: ").append(toIndentedString(partitionFields)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimit.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimit.java new file mode 100644 index 00000000000..af7a3f324c2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimit.java @@ -0,0 +1,186 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The maximum amount of data or number of events allowed before the quota is enforced. Can be + * specified in bytes or events. + */ +@JsonPropertyOrder({ + ObservabilityPipelineQuotaProcessorLimit.JSON_PROPERTY_ENFORCE, + ObservabilityPipelineQuotaProcessorLimit.JSON_PROPERTY_LIMIT +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineQuotaProcessorLimit { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ENFORCE = "enforce"; + private ObservabilityPipelineQuotaProcessorLimitEnforceType enforce; + + public static final String JSON_PROPERTY_LIMIT = "limit"; + private Long limit; + + public ObservabilityPipelineQuotaProcessorLimit() {} + + @JsonCreator + public ObservabilityPipelineQuotaProcessorLimit( + @JsonProperty(required = true, value = JSON_PROPERTY_ENFORCE) + ObservabilityPipelineQuotaProcessorLimitEnforceType enforce, + @JsonProperty(required = true, value = JSON_PROPERTY_LIMIT) Long limit) { + this.enforce = enforce; + this.unparsed |= !enforce.isValid(); + this.limit = limit; + } + + public ObservabilityPipelineQuotaProcessorLimit enforce( + ObservabilityPipelineQuotaProcessorLimitEnforceType enforce) { + this.enforce = enforce; + this.unparsed |= !enforce.isValid(); + return this; + } + + /** + * Unit for quota enforcement in bytes for data size or events for count. + * + * @return enforce + */ + @JsonProperty(JSON_PROPERTY_ENFORCE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineQuotaProcessorLimitEnforceType getEnforce() { + return enforce; + } + + public void setEnforce(ObservabilityPipelineQuotaProcessorLimitEnforceType enforce) { + if (!enforce.isValid()) { + this.unparsed = true; + } + this.enforce = enforce; + } + + public ObservabilityPipelineQuotaProcessorLimit limit(Long limit) { + this.limit = limit; + return this; + } + + /** + * The limit for quota enforcement. + * + * @return limit + */ + @JsonProperty(JSON_PROPERTY_LIMIT) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Long getLimit() { + return limit; + } + + public void setLimit(Long limit) { + this.limit = limit; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineQuotaProcessorLimit + */ + @JsonAnySetter + public ObservabilityPipelineQuotaProcessorLimit putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineQuotaProcessorLimit object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineQuotaProcessorLimit observabilityPipelineQuotaProcessorLimit = + (ObservabilityPipelineQuotaProcessorLimit) o; + return Objects.equals(this.enforce, observabilityPipelineQuotaProcessorLimit.enforce) + && Objects.equals(this.limit, observabilityPipelineQuotaProcessorLimit.limit) + && Objects.equals( + this.additionalProperties, + observabilityPipelineQuotaProcessorLimit.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(enforce, limit, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineQuotaProcessorLimit {\n"); + sb.append(" enforce: ").append(toIndentedString(enforce)).append("\n"); + sb.append(" limit: ").append(toIndentedString(limit)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimitEnforceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimitEnforceType.java new file mode 100644 index 00000000000..566470d4fec --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorLimitEnforceType.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Unit for quota enforcement in bytes for data size or events for count. */ +@JsonSerialize( + using = + ObservabilityPipelineQuotaProcessorLimitEnforceType + .ObservabilityPipelineQuotaProcessorLimitEnforceTypeSerializer.class) +public class ObservabilityPipelineQuotaProcessorLimitEnforceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("bytes", "events")); + + public static final ObservabilityPipelineQuotaProcessorLimitEnforceType BYTES = + new ObservabilityPipelineQuotaProcessorLimitEnforceType("bytes"); + public static final ObservabilityPipelineQuotaProcessorLimitEnforceType EVENTS = + new ObservabilityPipelineQuotaProcessorLimitEnforceType("events"); + + ObservabilityPipelineQuotaProcessorLimitEnforceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineQuotaProcessorLimitEnforceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineQuotaProcessorLimitEnforceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineQuotaProcessorLimitEnforceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineQuotaProcessorLimitEnforceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineQuotaProcessorLimitEnforceType fromValue(String value) { + return new ObservabilityPipelineQuotaProcessorLimitEnforceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverride.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverride.java new file mode 100644 index 00000000000..84912309f99 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverride.java @@ -0,0 +1,199 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Defines a custom quota limit that applies to specific log events based on matching field values. + */ +@JsonPropertyOrder({ + ObservabilityPipelineQuotaProcessorOverride.JSON_PROPERTY_FIELDS, + ObservabilityPipelineQuotaProcessorOverride.JSON_PROPERTY_LIMIT +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineQuotaProcessorOverride { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public static final String JSON_PROPERTY_LIMIT = "limit"; + private ObservabilityPipelineQuotaProcessorLimit limit; + + public ObservabilityPipelineQuotaProcessorOverride() {} + + @JsonCreator + public ObservabilityPipelineQuotaProcessorOverride( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) + List fields, + @JsonProperty(required = true, value = JSON_PROPERTY_LIMIT) + ObservabilityPipelineQuotaProcessorLimit limit) { + this.fields = fields; + this.limit = limit; + this.unparsed |= limit.unparsed; + } + + public ObservabilityPipelineQuotaProcessorOverride fields( + List fields) { + this.fields = fields; + for (ObservabilityPipelineFieldValue item : fields) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineQuotaProcessorOverride addFieldsItem( + ObservabilityPipelineFieldValue fieldsItem) { + this.fields.add(fieldsItem); + this.unparsed |= fieldsItem.unparsed; + return this; + } + + /** + * A list of field matchers used to apply a specific override. If an event matches all listed + * key-value pairs, the corresponding override limit is enforced. + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public ObservabilityPipelineQuotaProcessorOverride limit( + ObservabilityPipelineQuotaProcessorLimit limit) { + this.limit = limit; + this.unparsed |= limit.unparsed; + return this; + } + + /** + * The maximum amount of data or number of events allowed before the quota is enforced. Can be + * specified in bytes or events. + * + * @return limit + */ + @JsonProperty(JSON_PROPERTY_LIMIT) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineQuotaProcessorLimit getLimit() { + return limit; + } + + public void setLimit(ObservabilityPipelineQuotaProcessorLimit limit) { + this.limit = limit; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineQuotaProcessorOverride + */ + @JsonAnySetter + public ObservabilityPipelineQuotaProcessorOverride putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineQuotaProcessorOverride object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineQuotaProcessorOverride observabilityPipelineQuotaProcessorOverride = + (ObservabilityPipelineQuotaProcessorOverride) o; + return Objects.equals(this.fields, observabilityPipelineQuotaProcessorOverride.fields) + && Objects.equals(this.limit, observabilityPipelineQuotaProcessorOverride.limit) + && Objects.equals( + this.additionalProperties, + observabilityPipelineQuotaProcessorOverride.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, limit, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineQuotaProcessorOverride {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" limit: ").append(toIndentedString(limit)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorType.java new file mode 100644 index 00000000000..ff2b938e512 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be quota. */ +@JsonSerialize( + using = + ObservabilityPipelineQuotaProcessorType.ObservabilityPipelineQuotaProcessorTypeSerializer + .class) +public class ObservabilityPipelineQuotaProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("quota")); + + public static final ObservabilityPipelineQuotaProcessorType QUOTA = + new ObservabilityPipelineQuotaProcessorType("quota"); + + ObservabilityPipelineQuotaProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineQuotaProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineQuotaProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineQuotaProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineQuotaProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineQuotaProcessorType fromValue(String value) { + return new ObservabilityPipelineQuotaProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java new file mode 100644 index 00000000000..bd36b694985 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessor.java @@ -0,0 +1,282 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The remove_fields processor deletes specified fields from logs. */ +@JsonPropertyOrder({ + ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_FIELDS, + ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineRemoveFieldsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineRemoveFieldsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineRemoveFieldsProcessorType type = + ObservabilityPipelineRemoveFieldsProcessorType.REMOVE_FIELDS; + + public ObservabilityPipelineRemoveFieldsProcessor() {} + + @JsonCreator + public ObservabilityPipelineRemoveFieldsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) List fields, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineRemoveFieldsProcessorType type) { + this.fields = fields; + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineRemoveFieldsProcessor fields(List fields) { + this.fields = fields; + return this; + } + + public ObservabilityPipelineRemoveFieldsProcessor addFieldsItem(String fieldsItem) { + this.fields.add(fieldsItem); + return this; + } + + /** + * A list of field names to be removed from each log event. + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public ObservabilityPipelineRemoveFieldsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineRemoveFieldsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineRemoveFieldsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineRemoveFieldsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * The PipelineRemoveFieldsProcessor inputs. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineRemoveFieldsProcessor type( + ObservabilityPipelineRemoveFieldsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be remove_fields. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineRemoveFieldsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineRemoveFieldsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineRemoveFieldsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineRemoveFieldsProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineRemoveFieldsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineRemoveFieldsProcessor observabilityPipelineRemoveFieldsProcessor = + (ObservabilityPipelineRemoveFieldsProcessor) o; + return Objects.equals(this.fields, observabilityPipelineRemoveFieldsProcessor.fields) + && Objects.equals(this.id, observabilityPipelineRemoveFieldsProcessor.id) + && Objects.equals(this.include, observabilityPipelineRemoveFieldsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineRemoveFieldsProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineRemoveFieldsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineRemoveFieldsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, id, include, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineRemoveFieldsProcessor {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessorType.java new file mode 100644 index 00000000000..8f260d95024 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRemoveFieldsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be remove_fields. */ +@JsonSerialize( + using = + ObservabilityPipelineRemoveFieldsProcessorType + .ObservabilityPipelineRemoveFieldsProcessorTypeSerializer.class) +public class ObservabilityPipelineRemoveFieldsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("remove_fields")); + + public static final ObservabilityPipelineRemoveFieldsProcessorType REMOVE_FIELDS = + new ObservabilityPipelineRemoveFieldsProcessorType("remove_fields"); + + ObservabilityPipelineRemoveFieldsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineRemoveFieldsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineRemoveFieldsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineRemoveFieldsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineRemoveFieldsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineRemoveFieldsProcessorType fromValue(String value) { + return new ObservabilityPipelineRemoveFieldsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java new file mode 100644 index 00000000000..232d14b56ab --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessor.java @@ -0,0 +1,290 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The rename_fields processor changes field names. */ +@JsonPropertyOrder({ + ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_FIELDS, + ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineRenameFieldsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineRenameFieldsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineRenameFieldsProcessorType type = + ObservabilityPipelineRenameFieldsProcessorType.RENAME_FIELDS; + + public ObservabilityPipelineRenameFieldsProcessor() {} + + @JsonCreator + public ObservabilityPipelineRenameFieldsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) + List fields, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineRenameFieldsProcessorType type) { + this.fields = fields; + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineRenameFieldsProcessor fields( + List fields) { + this.fields = fields; + for (ObservabilityPipelineRenameFieldsProcessorField item : fields) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineRenameFieldsProcessor addFieldsItem( + ObservabilityPipelineRenameFieldsProcessorField fieldsItem) { + this.fields.add(fieldsItem); + this.unparsed |= fieldsItem.unparsed; + return this; + } + + /** + * A list of rename rules specifying which fields to rename in the event, what to rename them to, + * and whether to preserve the original fields. + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public ObservabilityPipelineRenameFieldsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * A unique identifier for this component. Used to reference this component in other parts of the + * pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineRenameFieldsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineRenameFieldsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineRenameFieldsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineRenameFieldsProcessor type( + ObservabilityPipelineRenameFieldsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be rename_fields. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineRenameFieldsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineRenameFieldsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineRenameFieldsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineRenameFieldsProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineRenameFieldsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineRenameFieldsProcessor observabilityPipelineRenameFieldsProcessor = + (ObservabilityPipelineRenameFieldsProcessor) o; + return Objects.equals(this.fields, observabilityPipelineRenameFieldsProcessor.fields) + && Objects.equals(this.id, observabilityPipelineRenameFieldsProcessor.id) + && Objects.equals(this.include, observabilityPipelineRenameFieldsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineRenameFieldsProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineRenameFieldsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineRenameFieldsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, id, include, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineRenameFieldsProcessor {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorField.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorField.java new file mode 100644 index 00000000000..8ca41ec0726 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorField.java @@ -0,0 +1,209 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines how to rename a field in log events. */ +@JsonPropertyOrder({ + ObservabilityPipelineRenameFieldsProcessorField.JSON_PROPERTY_DESTINATION, + ObservabilityPipelineRenameFieldsProcessorField.JSON_PROPERTY_PRESERVE_SOURCE, + ObservabilityPipelineRenameFieldsProcessorField.JSON_PROPERTY_SOURCE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineRenameFieldsProcessorField { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DESTINATION = "destination"; + private String destination; + + public static final String JSON_PROPERTY_PRESERVE_SOURCE = "preserve_source"; + private Boolean preserveSource; + + public static final String JSON_PROPERTY_SOURCE = "source"; + private String source; + + public ObservabilityPipelineRenameFieldsProcessorField() {} + + @JsonCreator + public ObservabilityPipelineRenameFieldsProcessorField( + @JsonProperty(required = true, value = JSON_PROPERTY_DESTINATION) String destination, + @JsonProperty(required = true, value = JSON_PROPERTY_PRESERVE_SOURCE) Boolean preserveSource, + @JsonProperty(required = true, value = JSON_PROPERTY_SOURCE) String source) { + this.destination = destination; + this.preserveSource = preserveSource; + this.source = source; + } + + public ObservabilityPipelineRenameFieldsProcessorField destination(String destination) { + this.destination = destination; + return this; + } + + /** + * The field name to assign the renamed value to. + * + * @return destination + */ + @JsonProperty(JSON_PROPERTY_DESTINATION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getDestination() { + return destination; + } + + public void setDestination(String destination) { + this.destination = destination; + } + + public ObservabilityPipelineRenameFieldsProcessorField preserveSource(Boolean preserveSource) { + this.preserveSource = preserveSource; + return this; + } + + /** + * Indicates whether the original field, that is received from the source, should be kept ( + * true) or removed (false) after renaming. + * + * @return preserveSource + */ + @JsonProperty(JSON_PROPERTY_PRESERVE_SOURCE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getPreserveSource() { + return preserveSource; + } + + public void setPreserveSource(Boolean preserveSource) { + this.preserveSource = preserveSource; + } + + public ObservabilityPipelineRenameFieldsProcessorField source(String source) { + this.source = source; + return this; + } + + /** + * The original field name in the log event that should be renamed. + * + * @return source + */ + @JsonProperty(JSON_PROPERTY_SOURCE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineRenameFieldsProcessorField + */ + @JsonAnySetter + public ObservabilityPipelineRenameFieldsProcessorField putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineRenameFieldsProcessorField object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineRenameFieldsProcessorField + observabilityPipelineRenameFieldsProcessorField = + (ObservabilityPipelineRenameFieldsProcessorField) o; + return Objects.equals( + this.destination, observabilityPipelineRenameFieldsProcessorField.destination) + && Objects.equals( + this.preserveSource, observabilityPipelineRenameFieldsProcessorField.preserveSource) + && Objects.equals(this.source, observabilityPipelineRenameFieldsProcessorField.source) + && Objects.equals( + this.additionalProperties, + observabilityPipelineRenameFieldsProcessorField.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(destination, preserveSource, source, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineRenameFieldsProcessorField {\n"); + sb.append(" destination: ").append(toIndentedString(destination)).append("\n"); + sb.append(" preserveSource: ").append(toIndentedString(preserveSource)).append("\n"); + sb.append(" source: ").append(toIndentedString(source)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorType.java new file mode 100644 index 00000000000..81c9eab038b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRenameFieldsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be rename_fields. */ +@JsonSerialize( + using = + ObservabilityPipelineRenameFieldsProcessorType + .ObservabilityPipelineRenameFieldsProcessorTypeSerializer.class) +public class ObservabilityPipelineRenameFieldsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("rename_fields")); + + public static final ObservabilityPipelineRenameFieldsProcessorType RENAME_FIELDS = + new ObservabilityPipelineRenameFieldsProcessorType("rename_fields"); + + ObservabilityPipelineRenameFieldsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineRenameFieldsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineRenameFieldsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineRenameFieldsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineRenameFieldsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineRenameFieldsProcessorType fromValue(String value) { + return new ObservabilityPipelineRenameFieldsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java new file mode 100644 index 00000000000..e737cc86e11 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java @@ -0,0 +1,201 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for enabling TLS encryption. */ +@JsonPropertyOrder({ + ObservabilityPipelineTls.JSON_PROPERTY_CA_FILE, + ObservabilityPipelineTls.JSON_PROPERTY_CRT_FILE, + ObservabilityPipelineTls.JSON_PROPERTY_KEY_FILE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineTls { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CA_FILE = "ca_file"; + private String caFile; + + public static final String JSON_PROPERTY_CRT_FILE = "crt_file"; + private String crtFile; + + public static final String JSON_PROPERTY_KEY_FILE = "key_file"; + private String keyFile; + + public ObservabilityPipelineTls() {} + + @JsonCreator + public ObservabilityPipelineTls( + @JsonProperty(required = true, value = JSON_PROPERTY_CRT_FILE) String crtFile) { + this.crtFile = crtFile; + } + + public ObservabilityPipelineTls caFile(String caFile) { + this.caFile = caFile; + return this; + } + + /** + * Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate. + * + * @return caFile + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_CA_FILE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getCaFile() { + return caFile; + } + + public void setCaFile(String caFile) { + this.caFile = caFile; + } + + public ObservabilityPipelineTls crtFile(String crtFile) { + this.crtFile = crtFile; + return this; + } + + /** + * Path to the TLS client certificate file used to authenticate the pipeline component with + * upstream or downstream services. + * + * @return crtFile + */ + @JsonProperty(JSON_PROPERTY_CRT_FILE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getCrtFile() { + return crtFile; + } + + public void setCrtFile(String crtFile) { + this.crtFile = crtFile; + } + + public ObservabilityPipelineTls keyFile(String keyFile) { + this.keyFile = keyFile; + return this; + } + + /** + * Path to the private key file associated with the TLS client certificate. Used for mutual TLS + * authentication. + * + * @return keyFile + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_FILE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyFile() { + return keyFile; + } + + public void setKeyFile(String keyFile) { + this.keyFile = keyFile; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineTls + */ + @JsonAnySetter + public ObservabilityPipelineTls putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineTls object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineTls observabilityPipelineTls = (ObservabilityPipelineTls) o; + return Objects.equals(this.caFile, observabilityPipelineTls.caFile) + && Objects.equals(this.crtFile, observabilityPipelineTls.crtFile) + && Objects.equals(this.keyFile, observabilityPipelineTls.keyFile) + && Objects.equals(this.additionalProperties, observabilityPipelineTls.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(caFile, crtFile, keyFile, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineTls {\n"); + sb.append(" caFile: ").append(toIndentedString(caFile)).append("\n"); + sb.append(" crtFile: ").append(toIndentedString(crtFile)).append("\n"); + sb.append(" keyFile: ").append(toIndentedString(keyFile)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze new file mode 100644 index 00000000000..d1b8c94c601 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:36:58.031Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json new file mode 100644 index 00000000000..459f7c2f70f --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.json @@ -0,0 +1,32 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"unknown-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Component with ID filter-processor is an unknown component\",\"meta\":{\"message\":\"Component with ID filter-processor is an unknown component\"}},{\"title\":\"The following components are unused: [datadog-agent-source unknown-processor]\",\"meta\":{\"message\":\"The following components are unused: [datadog-agent-source unknown-processor]\"}}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 400, + "reasonPhrase": "Bad Request" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "27cad2cb-136c-f531-08ff-45a0fc536c02" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze new file mode 100644 index 00000000000..0fc9b398ff7 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:36:58.537Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json new file mode 100644 index 00000000000..6d1613a1f3d --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json @@ -0,0 +1,57 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"be354bf0-1140-11f0-9a6b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f4319" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/be354bf0-1140-11f0-9a6b-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "212e06b7-2870-882f-a7c6-ff252ecb2958" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze new file mode 100644 index 00000000000..4f32c245d61 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:36:59.510Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json new file mode 100644 index 00000000000..633377ecb4c --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.json @@ -0,0 +1,28 @@ +[ + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Resource Not Found\"}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 404, + "reasonPhrase": "Not Found" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "d504871e-95a9-3921-d8b6-426d67fce395" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze new file mode 100644 index 00000000000..5489af2d1a6 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:37:00.184Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json new file mode 100644 index 00000000000..417ef6931b6 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json @@ -0,0 +1,83 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"bf2c23da-1140-11f0-9a95-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431b" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "0445a79a-5afd-bfc6-6aad-bf9e83e08d8f" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Resource Not Found\"}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 404, + "reasonPhrase": "Not Found" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "0445a79a-5afd-bfc6-6aad-bf9e83e08d90" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze new file mode 100644 index 00000000000..50029945824 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:37:02.118Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json new file mode 100644 index 00000000000..6f52bd8f609 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json @@ -0,0 +1,83 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"c06831ee-1140-11f0-9fe5-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431d" + }, + { + "httpRequest": { + "headers": {}, + "method": "GET", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"c06831ee-1140-11f0-9fe5-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "1b08c3e8-e45d-c983-c7d1-8fa834c00d39" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "157c5e04-ad54-8e72-7615-8bd9095245a7" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze new file mode 100644 index 00000000000..01305de98f7 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:37:04.190Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json new file mode 100644 index 00000000000..586d4087434 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json @@ -0,0 +1,87 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"c193d7da-1140-11f0-9a6d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431a" + }, + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"unknown-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"id\":\"3fa85f64-5717-4562-b3fc-2c963f66afa6\",\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "PUT", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Component with ID filter-processor is an unknown component\",\"meta\":{\"message\":\"Component with ID filter-processor is an unknown component\"}},{\"title\":\"The following components are unused: [datadog-agent-source unknown-processor]\",\"meta\":{\"message\":\"The following components are unused: [datadog-agent-source unknown-processor]\"}}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 400, + "reasonPhrase": "Bad Request" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "6afff49a-081f-0b40-67f4-ae1d3de8cfe5" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "6d34d800-bf5f-73da-5d16-d90b90837ea3" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze new file mode 100644 index 00000000000..b453d330d37 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:37:06.031Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json new file mode 100644 index 00000000000..6873d455a2b --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.json @@ -0,0 +1,32 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"id\":\"3fa85f64-5717-4562-b3fc-2c963f66afa6\",\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "PUT", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Not Found\"}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 404, + "reasonPhrase": "Not Found" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ecb64254-1046-4382-57cb-30f1fe25da2a" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze new file mode 100644 index 00000000000..4fba4617c1e --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-04T10:37:06.485Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json new file mode 100644 index 00000000000..ea97bcc8c03 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json @@ -0,0 +1,87 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"c2ee25ae-1140-11f0-9fe7-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431c" + }, + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Updated Pipeline Name\"},\"id\":\"3fa85f64-5717-4562-b3fc-2c963f66afa6\",\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "PUT", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"c2ee25ae-1140-11f0-9fe7-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Updated Pipeline Name\",\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "050e84d7-7940-6295-f299-faa6f7c0e81d" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "2d68f649-589e-1e0c-78cd-02a141246b4e" + } +] \ No newline at end of file diff --git a/src/test/resources/com/datadog/api/client/v2/api/given.json b/src/test/resources/com/datadog/api/client/v2/api/given.json index 3355eda9ed4..3a41fda169b 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/given.json +++ b/src/test/resources/com/datadog/api/client/v2/api/given.json @@ -503,6 +503,18 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentRule" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"filter-processor\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"filter-processor\",\n \"include\":\"service:my-service\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"type\":\"filter\"\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { diff --git a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature new file mode 100644 index 00000000000..f66f9bc6185 --- /dev/null +++ b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature @@ -0,0 +1,124 @@ +@endpoint(observability-pipelines) @endpoint(observability-pipelines-v2) +Feature: Observability Pipelines + Observability Pipelines allows you to collect and process logs within your + own infrastructure, and then route them to downstream integrations. + + Background: + Given a valid "apiKeyAuth" key in the system + And a valid "appKeyAuth" key in the system + And an instance of "ObservabilityPipelines" API + + @team:DataDog/observability-pipelines + Scenario: Create a new pipeline returns "Bad Request" response + Given operation "CreatePipeline" enabled + And new "CreatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/observability-pipelines + Scenario: Create a new pipeline returns "Conflict" response + Given operation "CreatePipeline" enabled + And new "CreatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 409 Conflict + + @team:DataDog/observability-pipelines + Scenario: Create a new pipeline returns "OK" response + Given operation "CreatePipeline" enabled + And new "CreatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 201 OK + And the response "data" has field "id" + And the response "data.type" is equal to "pipelines" + And the response "data.attributes.name" is equal to "Main Observability Pipeline" + And the response "data.attributes.config.sources" has length 1 + And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.destinations" has length 1 + + @generated @skip @team:DataDog/observability-pipelines + Scenario: Delete a pipeline returns "Conflict" response + Given operation "DeletePipeline" enabled + And new "DeletePipeline" request + And request contains "pipeline_id" parameter from "REPLACE.ME" + When the request is sent + Then the response status is 409 Conflict + + @team:DataDog/observability-pipelines + Scenario: Delete a pipeline returns "Not Found" response + Given operation "DeletePipeline" enabled + And new "DeletePipeline" request + And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" + When the request is sent + Then the response status is 404 Not Found + + @team:DataDog/observability-pipelines + Scenario: Delete a pipeline returns "OK" response + Given operation "DeletePipeline" enabled + And there is a valid "pipeline" in the system + And new "DeletePipeline" request + And request contains "pipeline_id" parameter from "pipeline.data.id" + When the request is sent + Then the response status is 204 OK + + @team:DataDog/observability-pipelines + Scenario: Get a specific pipeline returns "OK" response + Given operation "GetPipeline" enabled + And there is a valid "pipeline" in the system + And new "GetPipeline" request + And request contains "pipeline_id" parameter from "pipeline.data.id" + When the request is sent + Then the response status is 200 OK + And the response "data" has field "id" + And the response "data.type" is equal to "pipelines" + And the response "data.attributes.name" is equal to "Main Observability Pipeline" + And the response "data.attributes.config.sources" has length 1 + And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.destinations" has length 1 + + @team:DataDog/observability-pipelines + Scenario: Update a pipeline returns "Bad Request" response + Given operation "UpdatePipeline" enabled + And new "UpdatePipeline" request + And there is a valid "pipeline" in the system + And request contains "pipeline_id" parameter from "pipeline.data.id" + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/observability-pipelines + Scenario: Update a pipeline returns "Conflict" response + Given operation "UpdatePipeline" enabled + And new "UpdatePipeline" request + And request contains "pipeline_id" parameter from "REPLACE.ME" + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + When the request is sent + Then the response status is 409 Conflict + + @team:DataDog/observability-pipelines + Scenario: Update a pipeline returns "Not Found" response + Given operation "UpdatePipeline" enabled + And new "UpdatePipeline" request + And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + When the request is sent + Then the response status is 404 Not Found + + @team:DataDog/observability-pipelines + Scenario: Update a pipeline returns "OK" response + Given operation "UpdatePipeline" enabled + And there is a valid "pipeline" in the system + And new "UpdatePipeline" request + And request contains "pipeline_id" parameter from "pipeline.data.id" + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + When the request is sent + Then the response status is 200 OK + And the response "data" has field "id" + And the response "data.type" is equal to "pipelines" + And the response "data.attributes.name" is equal to "Updated Pipeline Name" + And the response "data.attributes.config.sources" has length 1 + And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.destinations" has length 1 + And the response "data.attributes.config.destinations[0].id" is equal to "updated-datadog-logs-destination-id" diff --git a/src/test/resources/com/datadog/api/client/v2/api/undo.json b/src/test/resources/com/datadog/api/client/v2/api/undo.json index f8e2c4ef1bd..72669c55bab 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/undo.json +++ b/src/test/resources/com/datadog/api/client/v2/api/undo.json @@ -1964,6 +1964,37 @@ "type": "safe" } }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "DeletePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, + "GetPipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "UpdatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, "DeleteRestrictionPolicy": { "tag": "Restriction Policies", "undo": {