Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43045,12 +43045,14 @@ components:
ObservabilityPipelineAmazonS3Source:
description: |-
The `amazon_s3` source ingests logs from an Amazon S3 bucket.
It supports AWS authentication and TLS encryption.
It supports AWS authentication, TLS encryption, and configurable compression.

**Supported pipeline types:** logs
properties:
auth:
$ref: "#/components/schemas/ObservabilityPipelineAwsAuth"
compression:
$ref: "#/components/schemas/ObservabilityPipelineAmazonS3SourceCompression"
id:
description: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components).
example: aws-s3-source
Expand All @@ -43073,6 +43075,20 @@ components:
- region
type: object
x-pipeline-types: [logs]
ObservabilityPipelineAmazonS3SourceCompression:
description: Compression format for objects retrieved from the S3 bucket. Use `auto` to detect compression from the object's Content-Encoding header or file extension.
enum:
- auto
- none
- gzip
- zstd
example: gzip
type: string
x-enum-varnames:
- AUTO
- NONE
- GZIP
- ZSTD
ObservabilityPipelineAmazonS3SourceType:
default: amazon_s3
description: The source type. Always `amazon_s3`.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
// Validate an observability pipeline with amazon S3 source compression returns "OK" response

import com.datadog.api.client.ApiClient;
import com.datadog.api.client.ApiException;
import com.datadog.api.client.v2.api.ObservabilityPipelinesApi;
import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3Source;
import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3SourceCompression;
import com.datadog.api.client.v2.model.ObservabilityPipelineAmazonS3SourceType;
import com.datadog.api.client.v2.model.ObservabilityPipelineConfig;
import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem;
import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorGroup;
import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem;
import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem;
import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes;
import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination;
import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType;
import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor;
import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType;
import com.datadog.api.client.v2.model.ObservabilityPipelineSpec;
import com.datadog.api.client.v2.model.ObservabilityPipelineSpecData;
import com.datadog.api.client.v2.model.ValidationResponse;
import java.util.Collections;

public class Example {
public static void main(String[] args) {
ApiClient defaultClient = ApiClient.getDefaultApiClient();
ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient);

ObservabilityPipelineSpec body =
new ObservabilityPipelineSpec()
.data(
new ObservabilityPipelineSpecData()
.attributes(
new ObservabilityPipelineDataAttributes()
.config(
new ObservabilityPipelineConfig()
.destinations(
Collections.singletonList(
new ObservabilityPipelineConfigDestinationItem(
new ObservabilityPipelineDatadogLogsDestination()
.id("datadog-logs-destination")
.inputs(
Collections.singletonList(
"my-processor-group"))
.type(
ObservabilityPipelineDatadogLogsDestinationType
.DATADOG_LOGS))))
.processorGroups(
Collections.singletonList(
new ObservabilityPipelineConfigProcessorGroup()
.enabled(true)
.id("my-processor-group")
.include("service:my-service")
.inputs(
Collections.singletonList("amazon-s3-source"))
.processors(
Collections.singletonList(
new ObservabilityPipelineConfigProcessorItem(
new ObservabilityPipelineFilterProcessor()
.enabled(true)
.id("filter-processor")
.include("service:my-service")
.type(
ObservabilityPipelineFilterProcessorType
.FILTER))))))
.sources(
Collections.singletonList(
new ObservabilityPipelineConfigSourceItem(
new ObservabilityPipelineAmazonS3Source()
.id("amazon-s3-source")
.type(
ObservabilityPipelineAmazonS3SourceType
.AMAZON_S3)
.region("us-east-1")
.compression(
ObservabilityPipelineAmazonS3SourceCompression
.GZIP)))))
.name("Pipeline with S3 Source Compression"))
.type("pipelines"));

try {
ValidationResponse result = apiInstance.validatePipeline(body);
System.out.println(result);
} catch (ApiException e) {
System.err.println("Exception when calling ObservabilityPipelinesApi#validatePipeline");
System.err.println("Status code: " + e.getCode());
System.err.println("Reason: " + e.getResponseBody());
System.err.println("Response headers: " + e.getResponseHeaders());
e.printStackTrace();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,13 @@

/**
* The <code>amazon_s3</code> source ingests logs from an Amazon S3 bucket. It supports AWS
* authentication and TLS encryption.
* authentication, TLS encryption, and configurable compression.
*
* <p><strong>Supported pipeline types:</strong> logs
*/
@JsonPropertyOrder({
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_COMPRESSION,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_ID,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_REGION,
ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TLS,
Expand All @@ -38,6 +39,9 @@ public class ObservabilityPipelineAmazonS3Source {
public static final String JSON_PROPERTY_AUTH = "auth";
private ObservabilityPipelineAwsAuth auth;

public static final String JSON_PROPERTY_COMPRESSION = "compression";
private ObservabilityPipelineAmazonS3SourceCompression compression;

public static final String JSON_PROPERTY_ID = "id";
private String id;

Expand Down Expand Up @@ -91,6 +95,33 @@ public void setAuth(ObservabilityPipelineAwsAuth auth) {
this.auth = auth;
}

public ObservabilityPipelineAmazonS3Source compression(
ObservabilityPipelineAmazonS3SourceCompression compression) {
this.compression = compression;
this.unparsed |= !compression.isValid();
return this;
}

/**
* Compression format for objects retrieved from the S3 bucket. Use <code>auto</code> to detect
* compression from the object's Content-Encoding header or file extension.
*
* @return compression
*/
@jakarta.annotation.Nullable
@JsonProperty(JSON_PROPERTY_COMPRESSION)
@JsonInclude(value = JsonInclude.Include.USE_DEFAULTS)
public ObservabilityPipelineAmazonS3SourceCompression getCompression() {
return compression;
}

public void setCompression(ObservabilityPipelineAmazonS3SourceCompression compression) {
if (!compression.isValid()) {
this.unparsed = true;
}
this.compression = compression;
}

public ObservabilityPipelineAmazonS3Source id(String id) {
this.id = id;
return this;
Expand Down Expand Up @@ -257,6 +288,7 @@ public boolean equals(Object o) {
ObservabilityPipelineAmazonS3Source observabilityPipelineAmazonS3Source =
(ObservabilityPipelineAmazonS3Source) o;
return Objects.equals(this.auth, observabilityPipelineAmazonS3Source.auth)
&& Objects.equals(this.compression, observabilityPipelineAmazonS3Source.compression)
&& Objects.equals(this.id, observabilityPipelineAmazonS3Source.id)
&& Objects.equals(this.region, observabilityPipelineAmazonS3Source.region)
&& Objects.equals(this.tls, observabilityPipelineAmazonS3Source.tls)
Expand All @@ -268,14 +300,15 @@ public boolean equals(Object o) {

@Override
public int hashCode() {
return Objects.hash(auth, id, region, tls, type, urlKey, additionalProperties);
return Objects.hash(auth, compression, id, region, tls, type, urlKey, additionalProperties);
}

@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ObservabilityPipelineAmazonS3Source {\n");
sb.append(" auth: ").append(toIndentedString(auth)).append("\n");
sb.append(" compression: ").append(toIndentedString(compression)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" region: ").append(toIndentedString(region)).append("\n");
sb.append(" tls: ").append(toIndentedString(tls)).append("\n");
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
* This product includes software developed at Datadog (https://www.datadoghq.com/).
* Copyright 2019-Present Datadog, Inc.
*/

package com.datadog.api.client.v2.model;

import com.datadog.api.client.ModelEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

/**
* Compression format for objects retrieved from the S3 bucket. Use <code>auto</code> to detect
* compression from the object's Content-Encoding header or file extension.
*/
@JsonSerialize(
using =
ObservabilityPipelineAmazonS3SourceCompression
.ObservabilityPipelineAmazonS3SourceCompressionSerializer.class)
public class ObservabilityPipelineAmazonS3SourceCompression extends ModelEnum<String> {

private static final Set<String> allowedValues =
new HashSet<String>(Arrays.asList("auto", "none", "gzip", "zstd"));

public static final ObservabilityPipelineAmazonS3SourceCompression AUTO =
new ObservabilityPipelineAmazonS3SourceCompression("auto");
public static final ObservabilityPipelineAmazonS3SourceCompression NONE =
new ObservabilityPipelineAmazonS3SourceCompression("none");
public static final ObservabilityPipelineAmazonS3SourceCompression GZIP =
new ObservabilityPipelineAmazonS3SourceCompression("gzip");
public static final ObservabilityPipelineAmazonS3SourceCompression ZSTD =
new ObservabilityPipelineAmazonS3SourceCompression("zstd");

ObservabilityPipelineAmazonS3SourceCompression(String value) {
super(value, allowedValues);
}

public static class ObservabilityPipelineAmazonS3SourceCompressionSerializer
extends StdSerializer<ObservabilityPipelineAmazonS3SourceCompression> {
public ObservabilityPipelineAmazonS3SourceCompressionSerializer(
Class<ObservabilityPipelineAmazonS3SourceCompression> t) {
super(t);
}

public ObservabilityPipelineAmazonS3SourceCompressionSerializer() {
this(null);
}

@Override
public void serialize(
ObservabilityPipelineAmazonS3SourceCompression value,
JsonGenerator jgen,
SerializerProvider provider)
throws IOException, JsonProcessingException {
jgen.writeObject(value.value);
}
}

@JsonCreator
public static ObservabilityPipelineAmazonS3SourceCompression fromValue(String value) {
return new ObservabilityPipelineAmazonS3SourceCompression(value);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2026-04-08T12:44:25.060Z
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[
{
"httpRequest": {
"body": {
"type": "JSON",
"json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"my-processor-group\"],\"type\":\"datadog_logs\"}],\"processor_groups\":[{\"enabled\":true,\"id\":\"my-processor-group\",\"include\":\"service:my-service\",\"inputs\":[\"amazon-s3-source\"],\"processors\":[{\"enabled\":true,\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"type\":\"filter\"}]}],\"sources\":[{\"compression\":\"gzip\",\"id\":\"amazon-s3-source\",\"region\":\"us-east-1\",\"type\":\"amazon_s3\"}]},\"name\":\"Pipeline with S3 Source Compression\"},\"type\":\"pipelines\"}}"
},
"headers": {},
"method": "POST",
"path": "/api/v2/obs-pipelines/pipelines/validate",
"keepAlive": false,
"secure": true
},
"httpResponse": {
"body": "{\"errors\":[]}\n",
"headers": {
"Content-Type": [
"application/vnd.api+json"
]
},
"statusCode": 200,
"reasonPhrase": "OK"
},
"times": {
"remainingTimes": 1
},
"timeToLive": {
"unlimited": true
},
"id": "558c65f8-ee8c-961f-c37f-fd1c9d896964"
}
]
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,14 @@ Feature: Observability Pipelines
Then the response status is 200 OK
And the response "errors" has length 0

@team:DataDog/observability-pipelines
Scenario: Validate an observability pipeline with amazon S3 source compression returns "OK" response
Given new "ValidatePipeline" request
And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["amazon-s3-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "service:my-service", "type": "filter"}]}], "sources": [{"id": "amazon-s3-source", "type": "amazon_s3", "region": "us-east-1", "compression": "gzip"}]}, "name": "Pipeline with S3 Source Compression"}, "type": "pipelines"}}
When the request is sent
Then the response status is 200 OK
And the response "errors" has length 0

@team:DataDog/observability-pipelines
Scenario: Validate an observability pipeline with destination secret key returns "OK" response
Given new "ValidatePipeline" request
Expand Down
Loading