diff --git a/.apigentools-info b/.apigentools-info
index f50525e68cf..964e856f905 100644
--- a/.apigentools-info
+++ b/.apigentools-info
@@ -4,13 +4,13 @@
"spec_versions": {
"v1": {
"apigentools_version": "1.6.6",
- "regenerated": "2025-04-04 20:19:28.929107",
- "spec_repo_commit": "3909ab62"
+ "regenerated": "2025-04-07 18:49:32.102281",
+ "spec_repo_commit": "d0287df0"
},
"v2": {
"apigentools_version": "1.6.6",
- "regenerated": "2025-04-04 20:19:28.944541",
- "spec_repo_commit": "3909ab62"
+ "regenerated": "2025-04-07 18:49:32.119706",
+ "spec_repo_commit": "d0287df0"
}
}
}
\ No newline at end of file
diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml
index 87935c5f60b..4070b85967e 100644
--- a/.generator/schemas/v2/openapi.yaml
+++ b/.generator/schemas/v2/openapi.yaml
@@ -21478,6 +21478,668 @@ components:
- id
- type
type: object
+ ObservabilityPipeline:
+ description: Top-level schema representing a pipeline.
+ properties:
+ data:
+ $ref: '#/components/schemas/ObservabilityPipelineData'
+ required:
+ - data
+ type: object
+ ObservabilityPipelineAddFieldsProcessor:
+ description: The `add_fields` processor adds static key-value fields to logs.
+ properties:
+ fields:
+ description: A list of static fields (key-value pairs) that is added to
+ each log event processed by this component.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineFieldValue'
+ type: array
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (for example, as the `input`
+ to downstream components).
+ example: add-fields-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which logs this processor
+ targets.
+ example: service:my-service
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - fields
+ - inputs
+ type: object
+ ObservabilityPipelineAddFieldsProcessorType:
+ default: add_fields
+ description: The processor type. The value should always be `add_fields`.
+ enum:
+ - add_fields
+ example: add_fields
+ type: string
+ x-enum-varnames:
+ - ADD_FIELDS
+ ObservabilityPipelineConfig:
+ description: Specifies the pipeline's configuration, including its sources,
+ processors, and destinations.
+ properties:
+ destinations:
+ description: A list of destination components where processed logs are sent.
+ example:
+ - id: datadog-logs-destination
+ inputs:
+ - filter-processor
+ type: datadog_logs
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem'
+ type: array
+ processors:
+ description: A list of processors that transform or enrich log data.
+ example:
+ - id: filter-processor
+ include: service:my-service
+ inputs:
+ - datadog-agent-source
+ type: filter
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorItem'
+ type: array
+ sources:
+ description: A list of configured data sources for the pipeline.
+ example:
+ - id: datadog-agent-source
+ type: datadog_agent
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineConfigSourceItem'
+ type: array
+ required:
+ - sources
+ - processors
+ - destinations
+ type: object
+ ObservabilityPipelineConfigDestinationItem:
+ description: A destination for the pipeline.
+ oneOf:
+ - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination'
+ ObservabilityPipelineConfigProcessorItem:
+ description: A processor for the pipeline.
+ oneOf:
+ - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor'
+ - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor'
+ ObservabilityPipelineConfigSourceItem:
+ description: A data source for the pipeline.
+ oneOf:
+ - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource'
+ - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource'
+ ObservabilityPipelineCreateRequest:
+ description: Top-level schema representing a pipeline.
+ properties:
+ data:
+ $ref: '#/components/schemas/ObservabilityPipelineCreateRequestData'
+ required:
+ - data
+ type: object
+ ObservabilityPipelineCreateRequestData:
+ description: "Contains the pipeline\u2019s ID, type, and configuration attributes."
+ properties:
+ attributes:
+ $ref: '#/components/schemas/ObservabilityPipelineDataAttributes'
+ type:
+ default: pipelines
+ description: The resource type identifier. For pipeline resources, this
+ should always be set to `pipelines`.
+ example: pipelines
+ type: string
+ required:
+ - type
+ - attributes
+ type: object
+ ObservabilityPipelineData:
+ description: "Contains the pipeline\u2019s ID, type, and configuration attributes."
+ properties:
+ attributes:
+ $ref: '#/components/schemas/ObservabilityPipelineDataAttributes'
+ id:
+ description: Unique identifier for the pipeline.
+ example: 3fa85f64-5717-4562-b3fc-2c963f66afa6
+ type: string
+ type:
+ default: pipelines
+ description: The resource type identifier. For pipeline resources, this
+ should always be set to `pipelines`.
+ example: pipelines
+ type: string
+ required:
+ - id
+ - type
+ - attributes
+ type: object
+ ObservabilityPipelineDataAttributes:
+ description: "Defines the pipeline\u2019s name and its components (sources,
+ processors, and destinations)."
+ properties:
+ config:
+ $ref: '#/components/schemas/ObservabilityPipelineConfig'
+ name:
+ description: Name of the pipeline.
+ example: Main Observability Pipeline
+ type: string
+ required:
+ - name
+ - config
+ type: object
+ ObservabilityPipelineDatadogAgentSource:
+ description: The `datadog_agent` source collects logs from the Datadog Agent.
+ properties:
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (e.g., as input to downstream
+ components).
+ example: datadog-agent-source
+ type: string
+ tls:
+ $ref: '#/components/schemas/ObservabilityPipelineTls'
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSourceType'
+ required:
+ - id
+ - type
+ type: object
+ ObservabilityPipelineDatadogAgentSourceType:
+ default: datadog_agent
+ description: The source type. The value should always be `datadog_agent`.
+ enum:
+ - datadog_agent
+ example: datadog_agent
+ type: string
+ x-enum-varnames:
+ - DATADOG_AGENT
+ ObservabilityPipelineDatadogLogsDestination:
+ description: The `datadog_logs` destination forwards logs to Datadog Log Management.
+ properties:
+ id:
+ description: The unique identifier for this component.
+ example: datadog-logs-destination
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - filter-processor
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestinationType'
+ required:
+ - id
+ - type
+ - inputs
+ type: object
+ ObservabilityPipelineDatadogLogsDestinationType:
+ default: datadog_logs
+ description: The destination type. The value should always be `datadog_logs`.
+ enum:
+ - datadog_logs
+ example: datadog_logs
+ type: string
+ x-enum-varnames:
+ - DATADOG_LOGS
+ ObservabilityPipelineFieldValue:
+ description: Represents a static key-value pair used in various processors.
+ properties:
+ name:
+ description: The field name.
+ example: field_name
+ type: string
+ value:
+ description: The field value.
+ example: field_value
+ type: string
+ required:
+ - name
+ - value
+ type: object
+ ObservabilityPipelineFilterProcessor:
+ description: The `filter` processor allows conditional processing of logs based
+ on a Datadog search query. Logs that match the `include` query are passed
+ through; others are discarded.
+ properties:
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (for example, as the `input`
+ to downstream components).
+ example: filter-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which logs should
+ pass through the filter. Logs that match this query continue to downstream
+ components; others are dropped.
+ example: service:my-service
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - inputs
+ type: object
+ ObservabilityPipelineFilterProcessorType:
+ default: filter
+ description: The processor type. The value should always be `filter`.
+ enum:
+ - filter
+ example: filter
+ type: string
+ x-enum-varnames:
+ - FILTER
+ ObservabilityPipelineKafkaSource:
+ description: The `kafka` source ingests data from Apache Kafka topics.
+ properties:
+ group_id:
+ description: Consumer group ID used by the Kafka client.
+ example: consumer-group-0
+ type: string
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (e.g., as input to downstream
+ components).
+ example: kafka-source
+ type: string
+ librdkafka_options:
+ description: Optional list of advanced Kafka client configuration options,
+ defined as key-value pairs.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption'
+ type: array
+ sasl:
+ $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl'
+ tls:
+ $ref: '#/components/schemas/ObservabilityPipelineTls'
+ topics:
+ description: A list of Kafka topic names to subscribe to. The source ingests
+ messages from each topic specified.
+ example:
+ - topic1
+ - topic2
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType'
+ required:
+ - id
+ - type
+ - group_id
+ - topics
+ type: object
+ ObservabilityPipelineKafkaSourceLibrdkafkaOption:
+ description: Represents a key-value pair used to configure low-level `librdkafka`
+ client options for Kafka sources, such as timeouts, buffer sizes, and security
+ settings.
+ properties:
+ name:
+ description: The name of the `librdkafka` configuration option to set.
+ example: fetch.message.max.bytes
+ type: string
+ value:
+ description: The value assigned to the specified `librdkafka` configuration
+ option.
+ example: '1048576'
+ type: string
+ required:
+ - name
+ - value
+ type: object
+ ObservabilityPipelineKafkaSourceSasl:
+ description: Specifies the SASL mechanism for authenticating with a Kafka cluster.
+ properties:
+ mechanism:
+ $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism'
+ type: object
+ ObservabilityPipelineKafkaSourceType:
+ default: kafka
+ description: The source type. The value should always be `kafka`.
+ enum:
+ - kafka
+ example: kafka
+ type: string
+ x-enum-varnames:
+ - KAFKA
+ ObservabilityPipelineParseJSONProcessor:
+ description: The `parse_json` processor extracts JSON from a specified field
+ and flattens it into the event. This is useful when logs contain embedded
+ JSON as a string.
+ properties:
+ field:
+ description: The name of the log field that contains a JSON string.
+ example: message
+ type: string
+ id:
+ description: A unique identifier for this component. Used to reference this
+ component in other parts of the pipeline (e.g., as input to downstream
+ components).
+ example: parse-json-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which logs this processor
+ targets.
+ example: service:my-service
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - field
+ - inputs
+ type: object
+ ObservabilityPipelineParseJSONProcessorType:
+ default: parse_json
+ description: The processor type. The value should always be `parse_json`.
+ enum:
+ - parse_json
+ example: parse_json
+ type: string
+ x-enum-varnames:
+ - PARSE_JSON
+ ObservabilityPipelinePipelineKafkaSourceSaslMechanism:
+ description: SASL mechanism used for Kafka authentication.
+ enum:
+ - PLAIN
+ - SCRAM-SHA-256
+ - SCRAM-SHA-512
+ type: string
+ x-enum-varnames:
+ - PLAIN
+ - SCRAMNOT_SHANOT_256
+ - SCRAMNOT_SHANOT_512
+ ObservabilityPipelineQuotaProcessor:
+ description: The Quota Processor measures logging traffic for logs that match
+ a specified filter. When the configured daily quota is met, the processor
+ can drop or alert.
+ properties:
+ drop_events:
+ description: If set to `true`, logs that matched the quota filter and sent
+ after the quota has been met are dropped; only logs that did not match
+ the filter query continue through the pipeline.
+ example: false
+ type: boolean
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (for example, as the `input`
+ to downstream components).
+ example: quota-processor
+ type: string
+ ignore_when_missing_partitions:
+ description: If `true`, the processor skips quota checks when partition
+ fields are missing from the logs.
+ type: boolean
+ include:
+ description: A Datadog search query used to determine which logs this processor
+ targets.
+ example: service:my-service
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ limit:
+ $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit'
+ name:
+ description: Name for identifying the processor.
+ example: MyPipelineQuotaProcessor
+ type: string
+ overrides:
+ description: A list of alternate quota rules that apply to specific sets
+ of events, identified by matching field values. Each override can define
+ a custom limit.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride'
+ type: array
+ partition_fields:
+ description: A list of fields used to segment log traffic for quota enforcement.
+ Quotas are tracked independently by unique combinations of these field
+ values.
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - name
+ - drop_events
+ - limit
+ - inputs
+ type: object
+ ObservabilityPipelineQuotaProcessorLimit:
+ description: The maximum amount of data or number of events allowed before the
+ quota is enforced. Can be specified in bytes or events.
+ properties:
+ enforce:
+ $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType'
+ limit:
+ description: The limit for quota enforcement.
+ example: 1000
+ format: int64
+ type: integer
+ required:
+ - enforce
+ - limit
+ type: object
+ ObservabilityPipelineQuotaProcessorLimitEnforceType:
+ description: Unit for quota enforcement in bytes for data size or events for
+ count.
+ enum:
+ - bytes
+ - events
+ example: bytes
+ type: string
+ x-enum-varnames:
+ - BYTES
+ - EVENTS
+ ObservabilityPipelineQuotaProcessorOverride:
+ description: Defines a custom quota limit that applies to specific log events
+ based on matching field values.
+ properties:
+ fields:
+ description: A list of field matchers used to apply a specific override.
+ If an event matches all listed key-value pairs, the corresponding override
+ limit is enforced.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineFieldValue'
+ type: array
+ limit:
+ $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit'
+ required:
+ - fields
+ - limit
+ type: object
+ ObservabilityPipelineQuotaProcessorType:
+ default: quota
+ description: The processor type. The value should always be `quota`.
+ enum:
+ - quota
+ example: quota
+ type: string
+ x-enum-varnames:
+ - QUOTA
+ ObservabilityPipelineRemoveFieldsProcessor:
+ description: The `remove_fields` processor deletes specified fields from logs.
+ properties:
+ fields:
+ description: A list of field names to be removed from each log event.
+ example:
+ - field1
+ - field2
+ items:
+ type: string
+ type: array
+ id:
+ description: The unique identifier for this component. Used to reference
+ this component in other parts of the pipeline (e.g., as input to downstream
+ components).
+ example: remove-fields-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which logs this processor
+ targets.
+ example: service:my-service
+ type: string
+ inputs:
+ description: The `PipelineRemoveFieldsProcessor` `inputs`.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - fields
+ - inputs
+ type: object
+ ObservabilityPipelineRemoveFieldsProcessorType:
+ default: remove_fields
+ description: The processor type. The value should always be `remove_fields`.
+ enum:
+ - remove_fields
+ example: remove_fields
+ type: string
+ x-enum-varnames:
+ - REMOVE_FIELDS
+ ObservabilityPipelineRenameFieldsProcessor:
+ description: The `rename_fields` processor changes field names.
+ properties:
+ fields:
+ description: A list of rename rules specifying which fields to rename in
+ the event, what to rename them to, and whether to preserve the original
+ fields.
+ items:
+ $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField'
+ type: array
+ id:
+ description: A unique identifier for this component. Used to reference this
+ component in other parts of the pipeline (e.g., as input to downstream
+ components).
+ example: rename-fields-processor
+ type: string
+ include:
+ description: A Datadog search query used to determine which logs this processor
+ targets.
+ example: service:my-service
+ type: string
+ inputs:
+ description: A list of component IDs whose output is used as the `input`
+ for this component.
+ example:
+ - datadog-agent-source
+ items:
+ type: string
+ type: array
+ type:
+ $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType'
+ required:
+ - id
+ - type
+ - include
+ - fields
+ - inputs
+ type: object
+ ObservabilityPipelineRenameFieldsProcessorField:
+ description: Defines how to rename a field in log events.
+ properties:
+ destination:
+ description: The field name to assign the renamed value to.
+ example: destination_field
+ type: string
+ preserve_source:
+ description: Indicates whether the original field, that is received from
+ the source, should be kept (`true`) or removed (`false`) after renaming.
+ example: false
+ type: boolean
+ source:
+ description: The original field name in the log event that should be renamed.
+ example: source_field
+ type: string
+ required:
+ - source
+ - destination
+ - preserve_source
+ type: object
+ ObservabilityPipelineRenameFieldsProcessorType:
+ default: rename_fields
+ description: The processor type. The value should always be `rename_fields`.
+ enum:
+ - rename_fields
+ example: rename_fields
+ type: string
+ x-enum-varnames:
+ - RENAME_FIELDS
+ ObservabilityPipelineTls:
+ description: Configuration for enabling TLS encryption.
+ properties:
+ ca_file:
+ description: "Path to the Certificate Authority (CA) file used to validate
+ the server\u2019s TLS certificate."
+ type: string
+ crt_file:
+ description: Path to the TLS client certificate file used to authenticate
+ the pipeline component with upstream or downstream services.
+ example: /path/to/cert.crt
+ type: string
+ key_file:
+ description: Path to the private key file associated with the TLS client
+ certificate. Used for mutual TLS authentication.
+ type: string
+ required:
+ - crt_file
+ type: object
OktaAccount:
description: Schema for an Okta account.
properties:
@@ -45671,6 +46333,167 @@ paths:
summary: Get the latest CSM Threats policy
tags:
- CSM Threats
+ /api/v2/remote_config/products/obs_pipelines/pipelines:
+ post:
+ description: Create a new pipeline.
+ operationId: CreatePipeline
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ObservabilityPipelineCreateRequest'
+ required: true
+ responses:
+ '201':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ObservabilityPipeline'
+ description: OK
+ '400':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Bad Request
+ '403':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Forbidden
+ '409':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Conflict
+ '429':
+ $ref: '#/components/responses/TooManyRequestsResponse'
+ summary: Create a new pipeline
+ tags:
+ - Observability Pipelines
+ x-unstable: '**Note**: This endpoint is in Preview.'
+ /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}:
+ delete:
+ description: Delete a pipeline.
+ operationId: DeletePipeline
+ parameters:
+ - description: The ID of the pipeline to delete.
+ in: path
+ name: pipeline_id
+ required: true
+ schema:
+ type: string
+ responses:
+ '204':
+ description: OK
+ '403':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Forbidden
+ '404':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Not Found
+ '409':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Conflict
+ '429':
+ $ref: '#/components/responses/TooManyRequestsResponse'
+ summary: Delete a pipeline
+ tags:
+ - Observability Pipelines
+ x-unstable: '**Note**: This endpoint is in Preview.'
+ get:
+ description: Get a specific pipeline by its ID.
+ operationId: GetPipeline
+ parameters:
+ - description: The ID of the pipeline to retrieve.
+ in: path
+ name: pipeline_id
+ required: true
+ schema:
+ type: string
+ responses:
+ '200':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ObservabilityPipeline'
+ description: OK
+ '403':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Forbidden
+ '429':
+ $ref: '#/components/responses/TooManyRequestsResponse'
+ summary: Get a specific pipeline
+ tags:
+ - Observability Pipelines
+ x-unstable: '**Note**: This endpoint is in Preview.'
+ put:
+ description: Update a pipeline.
+ operationId: UpdatePipeline
+ parameters:
+ - description: The ID of the pipeline to update.
+ in: path
+ name: pipeline_id
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ObservabilityPipeline'
+ required: true
+ responses:
+ '200':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ObservabilityPipeline'
+ description: OK
+ '400':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Bad Request
+ '403':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Forbidden
+ '404':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Not Found
+ '409':
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/APIErrorResponse'
+ description: Conflict
+ '429':
+ $ref: '#/components/responses/TooManyRequestsResponse'
+ summary: Update a pipeline
+ tags:
+ - Observability Pipelines
+ x-unstable: '**Note**: This endpoint is in Preview.'
/api/v2/restriction_policy/{resource_id}:
delete:
description: Deletes the restriction policy associated with a specified resource.
@@ -54015,6 +54838,12 @@ tags:
and their attributes. See the [Network Device Monitoring page](https://docs.datadoghq.com/network_monitoring/)
for more information.
name: Network Device Monitoring
+- description: Observability Pipelines allows you to collect and process logs within
+ your own infrastructure, and then route them to downstream integrations.
+ externalDocs:
+ description: Find out more at
+ url: https://docs.datadoghq.com/observability_pipelines/
+ name: Observability Pipelines
- description: Configure your [Datadog Okta integration](https://docs.datadoghq.com/integrations/okta/)
directly through the Datadog API.
name: Okta Integration
diff --git a/examples/v2/observability-pipelines/CreatePipeline.java b/examples/v2/observability-pipelines/CreatePipeline.java
new file mode 100644
index 00000000000..0b0aca9369e
--- /dev/null
+++ b/examples/v2/observability-pipelines/CreatePipeline.java
@@ -0,0 +1,81 @@
+// Create a new pipeline returns "OK" response
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
+import com.datadog.api.client.v2.model.ObservabilityPipeline;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfig;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest;
+import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequestData;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType;
+import java.util.Collections;
+
+public class Example {
+ public static void main(String[] args) {
+ ApiClient defaultClient = ApiClient.getDefaultApiClient();
+ defaultClient.setUnstableOperationEnabled("v2.createPipeline", true);
+ ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);
+
+ ObservabilityPipelineCreateRequest body =
+ new ObservabilityPipelineCreateRequest()
+ .data(
+ new ObservabilityPipelineCreateRequestData()
+ .attributes(
+ new ObservabilityPipelineDataAttributes()
+ .config(
+ new ObservabilityPipelineConfig()
+ .destinations(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigDestinationItem(
+ new ObservabilityPipelineDatadogLogsDestination()
+ .id("datadog-logs-destination")
+ .inputs(
+ Collections.singletonList(
+ "filter-processor"))
+ .type(
+ ObservabilityPipelineDatadogLogsDestinationType
+ .DATADOG_LOGS))))
+ .processors(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigProcessorItem(
+ new ObservabilityPipelineFilterProcessor()
+ .id("filter-processor")
+ .include("service:my-service")
+ .inputs(
+ Collections.singletonList(
+ "datadog-agent-source"))
+ .type(
+ ObservabilityPipelineFilterProcessorType
+ .FILTER))))
+ .sources(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigSourceItem(
+ new ObservabilityPipelineDatadogAgentSource()
+ .id("datadog-agent-source")
+ .type(
+ ObservabilityPipelineDatadogAgentSourceType
+ .DATADOG_AGENT)))))
+ .name("Main Observability Pipeline"))
+ .type("pipelines"));
+
+ try {
+ ObservabilityPipeline result = apiInstance.createPipeline(body);
+ System.out.println(result);
+ } catch (ApiException e) {
+ System.err.println("Exception when calling ObservabilityPipelinesApi#createPipeline");
+ System.err.println("Status code: " + e.getCode());
+ System.err.println("Reason: " + e.getResponseBody());
+ System.err.println("Response headers: " + e.getResponseHeaders());
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/examples/v2/observability-pipelines/DeletePipeline.java b/examples/v2/observability-pipelines/DeletePipeline.java
new file mode 100644
index 00000000000..ccbb1723426
--- /dev/null
+++ b/examples/v2/observability-pipelines/DeletePipeline.java
@@ -0,0 +1,26 @@
+// Delete a pipeline returns "OK" response
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
+
+public class Example {
+ public static void main(String[] args) {
+ ApiClient defaultClient = ApiClient.getDefaultApiClient();
+ defaultClient.setUnstableOperationEnabled("v2.deletePipeline", true);
+ ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);
+
+ // there is a valid "pipeline" in the system
+ String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID");
+
+ try {
+ apiInstance.deletePipeline(PIPELINE_DATA_ID);
+ } catch (ApiException e) {
+ System.err.println("Exception when calling ObservabilityPipelinesApi#deletePipeline");
+ System.err.println("Status code: " + e.getCode());
+ System.err.println("Reason: " + e.getResponseBody());
+ System.err.println("Response headers: " + e.getResponseHeaders());
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/examples/v2/observability-pipelines/GetPipeline.java b/examples/v2/observability-pipelines/GetPipeline.java
new file mode 100644
index 00000000000..39c8942b9da
--- /dev/null
+++ b/examples/v2/observability-pipelines/GetPipeline.java
@@ -0,0 +1,28 @@
+// Get a specific pipeline returns "OK" response
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
+import com.datadog.api.client.v2.model.ObservabilityPipeline;
+
+public class Example {
+ public static void main(String[] args) {
+ ApiClient defaultClient = ApiClient.getDefaultApiClient();
+ defaultClient.setUnstableOperationEnabled("v2.getPipeline", true);
+ ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);
+
+ // there is a valid "pipeline" in the system
+ String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID");
+
+ try {
+ ObservabilityPipeline result = apiInstance.getPipeline(PIPELINE_DATA_ID);
+ System.out.println(result);
+ } catch (ApiException e) {
+ System.err.println("Exception when calling ObservabilityPipelinesApi#getPipeline");
+ System.err.println("Status code: " + e.getCode());
+ System.err.println("Reason: " + e.getResponseBody());
+ System.err.println("Response headers: " + e.getResponseHeaders());
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/examples/v2/observability-pipelines/UpdatePipeline.java b/examples/v2/observability-pipelines/UpdatePipeline.java
new file mode 100644
index 00000000000..0f8df0509d3
--- /dev/null
+++ b/examples/v2/observability-pipelines/UpdatePipeline.java
@@ -0,0 +1,84 @@
+// Update a pipeline returns "OK" response
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
+import com.datadog.api.client.v2.model.ObservabilityPipeline;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfig;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem;
+import com.datadog.api.client.v2.model.ObservabilityPipelineData;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination;
+import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor;
+import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType;
+import java.util.Collections;
+
+public class Example {
+ public static void main(String[] args) {
+ ApiClient defaultClient = ApiClient.getDefaultApiClient();
+ defaultClient.setUnstableOperationEnabled("v2.updatePipeline", true);
+ ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);
+
+ // there is a valid "pipeline" in the system
+ String PIPELINE_DATA_ID = System.getenv("PIPELINE_DATA_ID");
+
+ ObservabilityPipeline body =
+ new ObservabilityPipeline()
+ .data(
+ new ObservabilityPipelineData()
+ .attributes(
+ new ObservabilityPipelineDataAttributes()
+ .config(
+ new ObservabilityPipelineConfig()
+ .destinations(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigDestinationItem(
+ new ObservabilityPipelineDatadogLogsDestination()
+ .id("updated-datadog-logs-destination-id")
+ .inputs(
+ Collections.singletonList(
+ "filter-processor"))
+ .type(
+ ObservabilityPipelineDatadogLogsDestinationType
+ .DATADOG_LOGS))))
+ .processors(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigProcessorItem(
+ new ObservabilityPipelineFilterProcessor()
+ .id("filter-processor")
+ .include("service:my-service")
+ .inputs(
+ Collections.singletonList(
+ "datadog-agent-source"))
+ .type(
+ ObservabilityPipelineFilterProcessorType
+ .FILTER))))
+ .sources(
+ Collections.singletonList(
+ new ObservabilityPipelineConfigSourceItem(
+ new ObservabilityPipelineDatadogAgentSource()
+ .id("datadog-agent-source")
+ .type(
+ ObservabilityPipelineDatadogAgentSourceType
+ .DATADOG_AGENT)))))
+ .name("Updated Pipeline Name"))
+ .id(PIPELINE_DATA_ID)
+ .type("pipelines"));
+
+ try {
+ ObservabilityPipeline result = apiInstance.updatePipeline(PIPELINE_DATA_ID, body);
+ System.out.println(result);
+ } catch (ApiException e) {
+ System.err.println("Exception when calling ObservabilityPipelinesApi#updatePipeline");
+ System.err.println("Status code: " + e.getCode());
+ System.err.println("Reason: " + e.getResponseBody());
+ System.err.println("Response headers: " + e.getResponseHeaders());
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/ApiClient.java b/src/main/java/com/datadog/api/client/ApiClient.java
index 3e0e7436dd7..cd61faea3a8 100644
--- a/src/main/java/com/datadog/api/client/ApiClient.java
+++ b/src/main/java/com/datadog/api/client/ApiClient.java
@@ -422,6 +422,10 @@ public class ApiClient {
put("v2.listVulnerableAssets", false);
put("v2.muteFindings", false);
put("v2.runHistoricalJob", false);
+ put("v2.createPipeline", false);
+ put("v2.deletePipeline", false);
+ put("v2.getPipeline", false);
+ put("v2.updatePipeline", false);
put("v2.createScorecardOutcomesBatch", false);
put("v2.createScorecardRule", false);
put("v2.deleteScorecardRule", false);
diff --git a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java
new file mode 100644
index 00000000000..0b67bc579b5
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java
@@ -0,0 +1,679 @@
+package com.datadog.api.client.v2.api;
+
+import com.datadog.api.client.ApiClient;
+import com.datadog.api.client.ApiException;
+import com.datadog.api.client.ApiResponse;
+import com.datadog.api.client.Pair;
+import com.datadog.api.client.v2.model.ObservabilityPipeline;
+import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest;
+import jakarta.ws.rs.client.Invocation;
+import jakarta.ws.rs.core.GenericType;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelinesApi {
+ private ApiClient apiClient;
+
+ public ObservabilityPipelinesApi() {
+ this(ApiClient.getDefaultApiClient());
+ }
+
+ public ObservabilityPipelinesApi(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ /**
+ * Get the API client.
+ *
+ * @return API client
+ */
+ public ApiClient getApiClient() {
+ return apiClient;
+ }
+
+ /**
+ * Set the API client.
+ *
+ * @param apiClient an instance of API client
+ */
+ public void setApiClient(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ /**
+ * Create a new pipeline.
+ *
+ *
See {@link #createPipelineWithHttpInfo}.
+ *
+ * @param body (required)
+ * @return ObservabilityPipeline
+ * @throws ApiException if fails to make API call
+ */
+ public ObservabilityPipeline createPipeline(ObservabilityPipelineCreateRequest body)
+ throws ApiException {
+ return createPipelineWithHttpInfo(body).getData();
+ }
+
+ /**
+ * Create a new pipeline.
+ *
+ *
See {@link #createPipelineWithHttpInfoAsync}.
+ *
+ * @param body (required)
+ * @return CompletableFuture<ObservabilityPipeline>
+ */
+ public CompletableFuture createPipelineAsync(
+ ObservabilityPipelineCreateRequest body) {
+ return createPipelineWithHttpInfoAsync(body)
+ .thenApply(
+ response -> {
+ return response.getData();
+ });
+ }
+
+ /**
+ * Create a new pipeline.
+ *
+ * @param body (required)
+ * @return ApiResponse<ObservabilityPipeline>
+ * @throws ApiException if fails to make API call
+ * @http.response.details
+ *
+ * Response details
+ * | Status Code | Description | Response Headers |
+ * | 201 | OK | - |
+ * | 400 | Bad Request | - |
+ * | 403 | Forbidden | - |
+ * | 409 | Conflict | - |
+ * | 429 | Too many requests | - |
+ *
+ */
+ public ApiResponse createPipelineWithHttpInfo(
+ ObservabilityPipelineCreateRequest body) throws ApiException {
+ // Check if unstable operation is enabled
+ String operationId = "createPipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId));
+ }
+ Object localVarPostBody = body;
+
+ // verify the required parameter 'body' is set
+ if (body == null) {
+ throw new ApiException(
+ 400, "Missing the required parameter 'body' when calling createPipeline");
+ }
+ // create path and map variables
+ String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines";
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.createPipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ return apiClient.invokeAPI(
+ "POST",
+ builder,
+ localVarHeaderParams,
+ new String[] {"application/json"},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+
+ /**
+ * Create a new pipeline.
+ *
+ * See {@link #createPipelineWithHttpInfo}.
+ *
+ * @param body (required)
+ * @return CompletableFuture<ApiResponse<ObservabilityPipeline>>
+ */
+ public CompletableFuture> createPipelineWithHttpInfoAsync(
+ ObservabilityPipelineCreateRequest body) {
+ // Check if unstable operation is enabled
+ String operationId = "createPipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)));
+ return result;
+ }
+ Object localVarPostBody = body;
+
+ // verify the required parameter 'body' is set
+ if (body == null) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(
+ 400, "Missing the required parameter 'body' when calling createPipeline"));
+ return result;
+ }
+ // create path and map variables
+ String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines";
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder;
+ try {
+ builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.createPipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ } catch (ApiException ex) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(ex);
+ return result;
+ }
+ return apiClient.invokeAPIAsync(
+ "POST",
+ builder,
+ localVarHeaderParams,
+ new String[] {"application/json"},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+
+ /**
+ * Delete a pipeline.
+ *
+ * See {@link #deletePipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to delete. (required)
+ * @throws ApiException if fails to make API call
+ */
+ public void deletePipeline(String pipelineId) throws ApiException {
+ deletePipelineWithHttpInfo(pipelineId);
+ }
+
+ /**
+ * Delete a pipeline.
+ *
+ *
See {@link #deletePipelineWithHttpInfoAsync}.
+ *
+ * @param pipelineId The ID of the pipeline to delete. (required)
+ * @return CompletableFuture
+ */
+ public CompletableFuture deletePipelineAsync(String pipelineId) {
+ return deletePipelineWithHttpInfoAsync(pipelineId)
+ .thenApply(
+ response -> {
+ return response.getData();
+ });
+ }
+
+ /**
+ * Delete a pipeline.
+ *
+ * @param pipelineId The ID of the pipeline to delete. (required)
+ * @return ApiResponse<Void>
+ * @throws ApiException if fails to make API call
+ * @http.response.details
+ *
+ * Response details
+ * | Status Code | Description | Response Headers |
+ * | 204 | OK | - |
+ * | 403 | Forbidden | - |
+ * | 404 | Not Found | - |
+ * | 409 | Conflict | - |
+ * | 429 | Too many requests | - |
+ *
+ */
+ public ApiResponse deletePipelineWithHttpInfo(String pipelineId) throws ApiException {
+ // Check if unstable operation is enabled
+ String operationId = "deletePipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId));
+ }
+ Object localVarPostBody = null;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ throw new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling deletePipeline");
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.deletePipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"*/*"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ return apiClient.invokeAPI(
+ "DELETE",
+ builder,
+ localVarHeaderParams,
+ new String[] {},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ null);
+ }
+
+ /**
+ * Delete a pipeline.
+ *
+ * See {@link #deletePipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to delete. (required)
+ * @return CompletableFuture<ApiResponse<Void>>
+ */
+ public CompletableFuture> deletePipelineWithHttpInfoAsync(String pipelineId) {
+ // Check if unstable operation is enabled
+ String operationId = "deletePipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)));
+ return result;
+ }
+ Object localVarPostBody = null;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling deletePipeline"));
+ return result;
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder;
+ try {
+ builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.deletePipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"*/*"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ } catch (ApiException ex) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(ex);
+ return result;
+ }
+ return apiClient.invokeAPIAsync(
+ "DELETE",
+ builder,
+ localVarHeaderParams,
+ new String[] {},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ null);
+ }
+
+ /**
+ * Get a specific pipeline.
+ *
+ * See {@link #getPipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to retrieve. (required)
+ * @return ObservabilityPipeline
+ * @throws ApiException if fails to make API call
+ */
+ public ObservabilityPipeline getPipeline(String pipelineId) throws ApiException {
+ return getPipelineWithHttpInfo(pipelineId).getData();
+ }
+
+ /**
+ * Get a specific pipeline.
+ *
+ *
See {@link #getPipelineWithHttpInfoAsync}.
+ *
+ * @param pipelineId The ID of the pipeline to retrieve. (required)
+ * @return CompletableFuture<ObservabilityPipeline>
+ */
+ public CompletableFuture getPipelineAsync(String pipelineId) {
+ return getPipelineWithHttpInfoAsync(pipelineId)
+ .thenApply(
+ response -> {
+ return response.getData();
+ });
+ }
+
+ /**
+ * Get a specific pipeline by its ID.
+ *
+ * @param pipelineId The ID of the pipeline to retrieve. (required)
+ * @return ApiResponse<ObservabilityPipeline>
+ * @throws ApiException if fails to make API call
+ * @http.response.details
+ *
+ * Response details
+ * | Status Code | Description | Response Headers |
+ * | 200 | OK | - |
+ * | 403 | Forbidden | - |
+ * | 429 | Too many requests | - |
+ *
+ */
+ public ApiResponse getPipelineWithHttpInfo(String pipelineId)
+ throws ApiException {
+ // Check if unstable operation is enabled
+ String operationId = "getPipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId));
+ }
+ Object localVarPostBody = null;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ throw new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling getPipeline");
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.getPipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ return apiClient.invokeAPI(
+ "GET",
+ builder,
+ localVarHeaderParams,
+ new String[] {},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+
+ /**
+ * Get a specific pipeline.
+ *
+ * See {@link #getPipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to retrieve. (required)
+ * @return CompletableFuture<ApiResponse<ObservabilityPipeline>>
+ */
+ public CompletableFuture> getPipelineWithHttpInfoAsync(
+ String pipelineId) {
+ // Check if unstable operation is enabled
+ String operationId = "getPipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)));
+ return result;
+ }
+ Object localVarPostBody = null;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling getPipeline"));
+ return result;
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder;
+ try {
+ builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.getPipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ } catch (ApiException ex) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(ex);
+ return result;
+ }
+ return apiClient.invokeAPIAsync(
+ "GET",
+ builder,
+ localVarHeaderParams,
+ new String[] {},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+
+ /**
+ * Update a pipeline.
+ *
+ * See {@link #updatePipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to update. (required)
+ * @param body (required)
+ * @return ObservabilityPipeline
+ * @throws ApiException if fails to make API call
+ */
+ public ObservabilityPipeline updatePipeline(String pipelineId, ObservabilityPipeline body)
+ throws ApiException {
+ return updatePipelineWithHttpInfo(pipelineId, body).getData();
+ }
+
+ /**
+ * Update a pipeline.
+ *
+ *
See {@link #updatePipelineWithHttpInfoAsync}.
+ *
+ * @param pipelineId The ID of the pipeline to update. (required)
+ * @param body (required)
+ * @return CompletableFuture<ObservabilityPipeline>
+ */
+ public CompletableFuture updatePipelineAsync(
+ String pipelineId, ObservabilityPipeline body) {
+ return updatePipelineWithHttpInfoAsync(pipelineId, body)
+ .thenApply(
+ response -> {
+ return response.getData();
+ });
+ }
+
+ /**
+ * Update a pipeline.
+ *
+ * @param pipelineId The ID of the pipeline to update. (required)
+ * @param body (required)
+ * @return ApiResponse<ObservabilityPipeline>
+ * @throws ApiException if fails to make API call
+ * @http.response.details
+ *
+ * Response details
+ * | Status Code | Description | Response Headers |
+ * | 200 | OK | - |
+ * | 400 | Bad Request | - |
+ * | 403 | Forbidden | - |
+ * | 404 | Not Found | - |
+ * | 409 | Conflict | - |
+ * | 429 | Too many requests | - |
+ *
+ */
+ public ApiResponse updatePipelineWithHttpInfo(
+ String pipelineId, ObservabilityPipeline body) throws ApiException {
+ // Check if unstable operation is enabled
+ String operationId = "updatePipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId));
+ }
+ Object localVarPostBody = body;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ throw new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling updatePipeline");
+ }
+
+ // verify the required parameter 'body' is set
+ if (body == null) {
+ throw new ApiException(
+ 400, "Missing the required parameter 'body' when calling updatePipeline");
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.updatePipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ return apiClient.invokeAPI(
+ "PUT",
+ builder,
+ localVarHeaderParams,
+ new String[] {"application/json"},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+
+ /**
+ * Update a pipeline.
+ *
+ * See {@link #updatePipelineWithHttpInfo}.
+ *
+ * @param pipelineId The ID of the pipeline to update. (required)
+ * @param body (required)
+ * @return CompletableFuture<ApiResponse<ObservabilityPipeline>>
+ */
+ public CompletableFuture> updatePipelineWithHttpInfoAsync(
+ String pipelineId, ObservabilityPipeline body) {
+ // Check if unstable operation is enabled
+ String operationId = "updatePipeline";
+ if (apiClient.isUnstableOperationEnabled("v2." + operationId)) {
+ apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId));
+ } else {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)));
+ return result;
+ }
+ Object localVarPostBody = body;
+
+ // verify the required parameter 'pipelineId' is set
+ if (pipelineId == null) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(
+ 400, "Missing the required parameter 'pipelineId' when calling updatePipeline"));
+ return result;
+ }
+
+ // verify the required parameter 'body' is set
+ if (body == null) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(
+ new ApiException(
+ 400, "Missing the required parameter 'body' when calling updatePipeline"));
+ return result;
+ }
+ // create path and map variables
+ String localVarPath =
+ "/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}"
+ .replaceAll(
+ "\\{" + "pipeline_id" + "\\}", apiClient.escapeString(pipelineId.toString()));
+
+ Map localVarHeaderParams = new HashMap();
+
+ Invocation.Builder builder;
+ try {
+ builder =
+ apiClient.createBuilder(
+ "v2.ObservabilityPipelinesApi.updatePipeline",
+ localVarPath,
+ new ArrayList(),
+ localVarHeaderParams,
+ new HashMap(),
+ new String[] {"application/json"},
+ new String[] {"apiKeyAuth", "appKeyAuth"});
+ } catch (ApiException ex) {
+ CompletableFuture> result = new CompletableFuture<>();
+ result.completeExceptionally(ex);
+ return result;
+ }
+ return apiClient.invokeAPIAsync(
+ "PUT",
+ builder,
+ localVarHeaderParams,
+ new String[] {"application/json"},
+ localVarPostBody,
+ new HashMap(),
+ false,
+ new GenericType() {});
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java
new file mode 100644
index 00000000000..f7f96865b8f
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipeline.java
@@ -0,0 +1,145 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/** Top-level schema representing a pipeline. */
+@JsonPropertyOrder({ObservabilityPipeline.JSON_PROPERTY_DATA})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipeline {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_DATA = "data";
+ private ObservabilityPipelineData data;
+
+ public ObservabilityPipeline() {}
+
+ @JsonCreator
+ public ObservabilityPipeline(
+ @JsonProperty(required = true, value = JSON_PROPERTY_DATA) ObservabilityPipelineData data) {
+ this.data = data;
+ this.unparsed |= data.unparsed;
+ }
+
+ public ObservabilityPipeline data(ObservabilityPipelineData data) {
+ this.data = data;
+ this.unparsed |= data.unparsed;
+ return this;
+ }
+
+ /**
+ * Contains the pipeline’s ID, type, and configuration attributes.
+ *
+ * @return data
+ */
+ @JsonProperty(JSON_PROPERTY_DATA)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineData getData() {
+ return data;
+ }
+
+ public void setData(ObservabilityPipelineData data) {
+ this.data = data;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipeline
+ */
+ @JsonAnySetter
+ public ObservabilityPipeline putAdditionalProperty(String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipeline object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipeline observabilityPipeline = (ObservabilityPipeline) o;
+ return Objects.equals(this.data, observabilityPipeline.data)
+ && Objects.equals(this.additionalProperties, observabilityPipeline.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(data, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipeline {\n");
+ sb.append(" data: ").append(toIndentedString(data)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java
new file mode 100644
index 00000000000..942e68603c8
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessor.java
@@ -0,0 +1,289 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/** The add_fields processor adds static key-value fields to logs. */
+@JsonPropertyOrder({
+ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_FIELDS,
+ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_ID,
+ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_INCLUDE,
+ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_INPUTS,
+ ObservabilityPipelineAddFieldsProcessor.JSON_PROPERTY_TYPE
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineAddFieldsProcessor {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_FIELDS = "fields";
+ private List fields = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_ID = "id";
+ private String id;
+
+ public static final String JSON_PROPERTY_INCLUDE = "include";
+ private String include;
+
+ public static final String JSON_PROPERTY_INPUTS = "inputs";
+ private List inputs = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_TYPE = "type";
+ private ObservabilityPipelineAddFieldsProcessorType type =
+ ObservabilityPipelineAddFieldsProcessorType.ADD_FIELDS;
+
+ public ObservabilityPipelineAddFieldsProcessor() {}
+
+ @JsonCreator
+ public ObservabilityPipelineAddFieldsProcessor(
+ @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS)
+ List fields,
+ @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include,
+ @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs,
+ @JsonProperty(required = true, value = JSON_PROPERTY_TYPE)
+ ObservabilityPipelineAddFieldsProcessorType type) {
+ this.fields = fields;
+ this.id = id;
+ this.include = include;
+ this.inputs = inputs;
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor fields(
+ List fields) {
+ this.fields = fields;
+ for (ObservabilityPipelineFieldValue item : fields) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor addFieldsItem(
+ ObservabilityPipelineFieldValue fieldsItem) {
+ this.fields.add(fieldsItem);
+ this.unparsed |= fieldsItem.unparsed;
+ return this;
+ }
+
+ /**
+ * A list of static fields (key-value pairs) that is added to each log event processed by this
+ * component.
+ *
+ * @return fields
+ */
+ @JsonProperty(JSON_PROPERTY_FIELDS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getFields() {
+ return fields;
+ }
+
+ public void setFields(List fields) {
+ this.fields = fields;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor id(String id) {
+ this.id = id;
+ return this;
+ }
+
+ /**
+ * The unique identifier for this component. Used to reference this component in other parts of
+ * the pipeline (for example, as the input to downstream components).
+ *
+ * @return id
+ */
+ @JsonProperty(JSON_PROPERTY_ID)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor include(String include) {
+ this.include = include;
+ return this;
+ }
+
+ /**
+ * A Datadog search query used to determine which logs this processor targets.
+ *
+ * @return include
+ */
+ @JsonProperty(JSON_PROPERTY_INCLUDE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public String getInclude() {
+ return include;
+ }
+
+ public void setInclude(String include) {
+ this.include = include;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor inputs(List inputs) {
+ this.inputs = inputs;
+ return this;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor addInputsItem(String inputsItem) {
+ this.inputs.add(inputsItem);
+ return this;
+ }
+
+ /**
+ * A list of component IDs whose output is used as the input for this component.
+ *
+ * @return inputs
+ */
+ @JsonProperty(JSON_PROPERTY_INPUTS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getInputs() {
+ return inputs;
+ }
+
+ public void setInputs(List inputs) {
+ this.inputs = inputs;
+ }
+
+ public ObservabilityPipelineAddFieldsProcessor type(
+ ObservabilityPipelineAddFieldsProcessorType type) {
+ this.type = type;
+ this.unparsed |= !type.isValid();
+ return this;
+ }
+
+ /**
+ * The processor type. The value should always be add_fields.
+ *
+ * @return type
+ */
+ @JsonProperty(JSON_PROPERTY_TYPE)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public ObservabilityPipelineAddFieldsProcessorType getType() {
+ return type;
+ }
+
+ public void setType(ObservabilityPipelineAddFieldsProcessorType type) {
+ if (!type.isValid()) {
+ this.unparsed = true;
+ }
+ this.type = type;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineAddFieldsProcessor
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineAddFieldsProcessor putAdditionalProperty(String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipelineAddFieldsProcessor object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineAddFieldsProcessor observabilityPipelineAddFieldsProcessor =
+ (ObservabilityPipelineAddFieldsProcessor) o;
+ return Objects.equals(this.fields, observabilityPipelineAddFieldsProcessor.fields)
+ && Objects.equals(this.id, observabilityPipelineAddFieldsProcessor.id)
+ && Objects.equals(this.include, observabilityPipelineAddFieldsProcessor.include)
+ && Objects.equals(this.inputs, observabilityPipelineAddFieldsProcessor.inputs)
+ && Objects.equals(this.type, observabilityPipelineAddFieldsProcessor.type)
+ && Objects.equals(
+ this.additionalProperties,
+ observabilityPipelineAddFieldsProcessor.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fields, id, include, inputs, type, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineAddFieldsProcessor {\n");
+ sb.append(" fields: ").append(toIndentedString(fields)).append("\n");
+ sb.append(" id: ").append(toIndentedString(id)).append("\n");
+ sb.append(" include: ").append(toIndentedString(include)).append("\n");
+ sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n");
+ sb.append(" type: ").append(toIndentedString(type)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java
new file mode 100644
index 00000000000..22e3da400a6
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddFieldsProcessorType.java
@@ -0,0 +1,62 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.ModelEnum;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/** The processor type. The value should always be add_fields. */
+@JsonSerialize(
+ using =
+ ObservabilityPipelineAddFieldsProcessorType
+ .ObservabilityPipelineAddFieldsProcessorTypeSerializer.class)
+public class ObservabilityPipelineAddFieldsProcessorType extends ModelEnum {
+
+ private static final Set allowedValues = new HashSet(Arrays.asList("add_fields"));
+
+ public static final ObservabilityPipelineAddFieldsProcessorType ADD_FIELDS =
+ new ObservabilityPipelineAddFieldsProcessorType("add_fields");
+
+ ObservabilityPipelineAddFieldsProcessorType(String value) {
+ super(value, allowedValues);
+ }
+
+ public static class ObservabilityPipelineAddFieldsProcessorTypeSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineAddFieldsProcessorTypeSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineAddFieldsProcessorTypeSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineAddFieldsProcessorType value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.value);
+ }
+ }
+
+ @JsonCreator
+ public static ObservabilityPipelineAddFieldsProcessorType fromValue(String value) {
+ return new ObservabilityPipelineAddFieldsProcessorType(value);
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java
new file mode 100644
index 00000000000..0be81362fe3
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java
@@ -0,0 +1,239 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/** Specifies the pipeline's configuration, including its sources, processors, and destinations. */
+@JsonPropertyOrder({
+ ObservabilityPipelineConfig.JSON_PROPERTY_DESTINATIONS,
+ ObservabilityPipelineConfig.JSON_PROPERTY_PROCESSORS,
+ ObservabilityPipelineConfig.JSON_PROPERTY_SOURCES
+})
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+public class ObservabilityPipelineConfig {
+ @JsonIgnore public boolean unparsed = false;
+ public static final String JSON_PROPERTY_DESTINATIONS = "destinations";
+ private List destinations = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_PROCESSORS = "processors";
+ private List processors = new ArrayList<>();
+
+ public static final String JSON_PROPERTY_SOURCES = "sources";
+ private List sources = new ArrayList<>();
+
+ public ObservabilityPipelineConfig() {}
+
+ @JsonCreator
+ public ObservabilityPipelineConfig(
+ @JsonProperty(required = true, value = JSON_PROPERTY_DESTINATIONS)
+ List destinations,
+ @JsonProperty(required = true, value = JSON_PROPERTY_PROCESSORS)
+ List processors,
+ @JsonProperty(required = true, value = JSON_PROPERTY_SOURCES)
+ List sources) {
+ this.destinations = destinations;
+ this.processors = processors;
+ this.sources = sources;
+ }
+
+ public ObservabilityPipelineConfig destinations(
+ List destinations) {
+ this.destinations = destinations;
+ for (ObservabilityPipelineConfigDestinationItem item : destinations) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineConfig addDestinationsItem(
+ ObservabilityPipelineConfigDestinationItem destinationsItem) {
+ this.destinations.add(destinationsItem);
+ this.unparsed |= destinationsItem.unparsed;
+ return this;
+ }
+
+ /**
+ * A list of destination components where processed logs are sent.
+ *
+ * @return destinations
+ */
+ @JsonProperty(JSON_PROPERTY_DESTINATIONS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getDestinations() {
+ return destinations;
+ }
+
+ public void setDestinations(List destinations) {
+ this.destinations = destinations;
+ }
+
+ public ObservabilityPipelineConfig processors(
+ List processors) {
+ this.processors = processors;
+ for (ObservabilityPipelineConfigProcessorItem item : processors) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineConfig addProcessorsItem(
+ ObservabilityPipelineConfigProcessorItem processorsItem) {
+ this.processors.add(processorsItem);
+ this.unparsed |= processorsItem.unparsed;
+ return this;
+ }
+
+ /**
+ * A list of processors that transform or enrich log data.
+ *
+ * @return processors
+ */
+ @JsonProperty(JSON_PROPERTY_PROCESSORS)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getProcessors() {
+ return processors;
+ }
+
+ public void setProcessors(List processors) {
+ this.processors = processors;
+ }
+
+ public ObservabilityPipelineConfig sources(List sources) {
+ this.sources = sources;
+ for (ObservabilityPipelineConfigSourceItem item : sources) {
+ this.unparsed |= item.unparsed;
+ }
+ return this;
+ }
+
+ public ObservabilityPipelineConfig addSourcesItem(
+ ObservabilityPipelineConfigSourceItem sourcesItem) {
+ this.sources.add(sourcesItem);
+ this.unparsed |= sourcesItem.unparsed;
+ return this;
+ }
+
+ /**
+ * A list of configured data sources for the pipeline.
+ *
+ * @return sources
+ */
+ @JsonProperty(JSON_PROPERTY_SOURCES)
+ @JsonInclude(value = JsonInclude.Include.ALWAYS)
+ public List getSources() {
+ return sources;
+ }
+
+ public void setSources(List sources) {
+ this.sources = sources;
+ }
+
+ /**
+ * A container for additional, undeclared properties. This is a holder for any undeclared
+ * properties as specified with the 'additionalProperties' keyword in the OAS document.
+ */
+ private Map additionalProperties;
+
+ /**
+ * Set the additional (undeclared) property with the specified name and value. If the property
+ * does not already exist, create it otherwise replace it.
+ *
+ * @param key The arbitrary key to set
+ * @param value The associated value
+ * @return ObservabilityPipelineConfig
+ */
+ @JsonAnySetter
+ public ObservabilityPipelineConfig putAdditionalProperty(String key, Object value) {
+ if (this.additionalProperties == null) {
+ this.additionalProperties = new HashMap();
+ }
+ this.additionalProperties.put(key, value);
+ return this;
+ }
+
+ /**
+ * Return the additional (undeclared) property.
+ *
+ * @return The additional properties
+ */
+ @JsonAnyGetter
+ public Map getAdditionalProperties() {
+ return additionalProperties;
+ }
+
+ /**
+ * Return the additional (undeclared) property with the specified name.
+ *
+ * @param key The arbitrary key to get
+ * @return The specific additional property for the given key
+ */
+ public Object getAdditionalProperty(String key) {
+ if (this.additionalProperties == null) {
+ return null;
+ }
+ return this.additionalProperties.get(key);
+ }
+
+ /** Return true if this ObservabilityPipelineConfig object is equal to o. */
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ ObservabilityPipelineConfig observabilityPipelineConfig = (ObservabilityPipelineConfig) o;
+ return Objects.equals(this.destinations, observabilityPipelineConfig.destinations)
+ && Objects.equals(this.processors, observabilityPipelineConfig.processors)
+ && Objects.equals(this.sources, observabilityPipelineConfig.sources)
+ && Objects.equals(
+ this.additionalProperties, observabilityPipelineConfig.additionalProperties);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(destinations, processors, sources, additionalProperties);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("class ObservabilityPipelineConfig {\n");
+ sb.append(" destinations: ").append(toIndentedString(destinations)).append("\n");
+ sb.append(" processors: ").append(toIndentedString(processors)).append("\n");
+ sb.append(" sources: ").append(toIndentedString(sources)).append("\n");
+ sb.append(" additionalProperties: ")
+ .append(toIndentedString(additionalProperties))
+ .append("\n");
+ sb.append('}');
+ return sb.toString();
+ }
+
+ /**
+ * Convert the given object to string with each line indented by 4 spaces (except the first line).
+ */
+ private String toIndentedString(Object o) {
+ if (o == null) {
+ return "null";
+ }
+ return o.toString().replace("\n", "\n ");
+ }
+}
diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java
new file mode 100644
index 00000000000..f6d74a9f38a
--- /dev/null
+++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java
@@ -0,0 +1,240 @@
+/*
+ * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+ * This product includes software developed at Datadog (https://www.datadoghq.com/).
+ * Copyright 2019-Present Datadog, Inc.
+ */
+
+package com.datadog.api.client.v2.model;
+
+import com.datadog.api.client.AbstractOpenApiSchema;
+import com.datadog.api.client.JSON;
+import com.datadog.api.client.UnparsedObject;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.MapperFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.fasterxml.jackson.databind.annotation.JsonSerialize;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+import jakarta.ws.rs.core.GenericType;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+@jakarta.annotation.Generated(
+ value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator")
+@JsonDeserialize(
+ using =
+ ObservabilityPipelineConfigDestinationItem
+ .ObservabilityPipelineConfigDestinationItemDeserializer.class)
+@JsonSerialize(
+ using =
+ ObservabilityPipelineConfigDestinationItem
+ .ObservabilityPipelineConfigDestinationItemSerializer.class)
+public class ObservabilityPipelineConfigDestinationItem extends AbstractOpenApiSchema {
+ private static final Logger log =
+ Logger.getLogger(ObservabilityPipelineConfigDestinationItem.class.getName());
+
+ @JsonIgnore public boolean unparsed = false;
+
+ public static class ObservabilityPipelineConfigDestinationItemSerializer
+ extends StdSerializer {
+ public ObservabilityPipelineConfigDestinationItemSerializer(
+ Class t) {
+ super(t);
+ }
+
+ public ObservabilityPipelineConfigDestinationItemSerializer() {
+ this(null);
+ }
+
+ @Override
+ public void serialize(
+ ObservabilityPipelineConfigDestinationItem value,
+ JsonGenerator jgen,
+ SerializerProvider provider)
+ throws IOException, JsonProcessingException {
+ jgen.writeObject(value.getActualInstance());
+ }
+ }
+
+ public static class ObservabilityPipelineConfigDestinationItemDeserializer
+ extends StdDeserializer {
+ public ObservabilityPipelineConfigDestinationItemDeserializer() {
+ this(ObservabilityPipelineConfigDestinationItem.class);
+ }
+
+ public ObservabilityPipelineConfigDestinationItemDeserializer(Class> vc) {
+ super(vc);
+ }
+
+ @Override
+ public ObservabilityPipelineConfigDestinationItem deserialize(
+ JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+ JsonNode tree = jp.readValueAsTree();
+ Object deserialized = null;
+ Object tmp = null;
+ boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS);
+ int match = 0;
+ JsonToken token = tree.traverse(jp.getCodec()).nextToken();
+ // deserialize ObservabilityPipelineDatadogLogsDestination
+ try {
+ boolean attemptParsing = true;
+ // ensure that we respect type coercion as set on the client ObjectMapper
+ if (ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)) {
+ attemptParsing = typeCoercion;
+ if (!attemptParsing) {
+ attemptParsing |=
+ ((ObservabilityPipelineDatadogLogsDestination.class.equals(Integer.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Long.class))
+ && token == JsonToken.VALUE_NUMBER_INT);
+ attemptParsing |=
+ ((ObservabilityPipelineDatadogLogsDestination.class.equals(Float.class)
+ || ObservabilityPipelineDatadogLogsDestination.class.equals(Double.class))
+ && (token == JsonToken.VALUE_NUMBER_FLOAT
+ || token == JsonToken.VALUE_NUMBER_INT));
+ attemptParsing |=
+ (ObservabilityPipelineDatadogLogsDestination.class.equals(Boolean.class)
+ && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE));
+ attemptParsing |=
+ (ObservabilityPipelineDatadogLogsDestination.class.equals(String.class)
+ && token == JsonToken.VALUE_STRING);
+ }
+ }
+ if (attemptParsing) {
+ tmp =
+ tree.traverse(jp.getCodec())
+ .readValueAs(ObservabilityPipelineDatadogLogsDestination.class);
+ // TODO: there is no validation against JSON schema constraints
+ // (min, max, enum, pattern...), this does not perform a strict JSON
+ // validation, which means the 'match' count may be higher than it should be.
+ if (!((ObservabilityPipelineDatadogLogsDestination) tmp).unparsed) {
+ deserialized = tmp;
+ match++;
+ }
+ log.log(
+ Level.FINER,
+ "Input data matches schema 'ObservabilityPipelineDatadogLogsDestination'");
+ }
+ } catch (Exception e) {
+ // deserialization failed, continue
+ log.log(
+ Level.FINER,
+ "Input data does not match schema 'ObservabilityPipelineDatadogLogsDestination'",
+ e);
+ }
+
+ ObservabilityPipelineConfigDestinationItem ret =
+ new ObservabilityPipelineConfigDestinationItem();
+ if (match == 1) {
+ ret.setActualInstance(deserialized);
+ } else {
+ Map res =
+ new ObjectMapper()
+ .readValue(
+ tree.traverse(jp.getCodec()).readValueAsTree().toString(),
+ new TypeReference