Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43045,12 +43045,14 @@ components:
ObservabilityPipelineAmazonS3Source:
description: |-
The `amazon_s3` source ingests logs from an Amazon S3 bucket.
It supports AWS authentication and TLS encryption.
It supports AWS authentication, TLS encryption, and configurable compression.

**Supported pipeline types:** logs
properties:
auth:
$ref: "#/components/schemas/ObservabilityPipelineAwsAuth"
compression:
$ref: "#/components/schemas/ObservabilityPipelineAmazonS3SourceCompression"
id:
description: The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components).
example: aws-s3-source
Expand All @@ -43073,6 +43075,20 @@ components:
- region
type: object
x-pipeline-types: [logs]
ObservabilityPipelineAmazonS3SourceCompression:
description: Compression format for objects retrieved from the S3 bucket. Use `auto` to detect compression from the object's Content-Encoding header or file extension.
enum:
- auto
- none
- gzip
- zstd
example: gzip
type: string
x-enum-varnames:
- AUTO
- NONE
- GZIP
- ZSTD
ObservabilityPipelineAmazonS3SourceType:
default: amazon_s3
description: The source type. Always `amazon_s3`.
Expand Down
55 changes: 47 additions & 8 deletions api/datadogV2/model_observability_pipeline_amazon_s3_source.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,15 @@ import (
)

// ObservabilityPipelineAmazonS3Source The `amazon_s3` source ingests logs from an Amazon S3 bucket.
// It supports AWS authentication and TLS encryption.
// It supports AWS authentication, TLS encryption, and configurable compression.
//
// **Supported pipeline types:** logs
type ObservabilityPipelineAmazonS3Source struct {
// AWS authentication credentials used for accessing AWS services such as S3.
// If omitted, the system’s default credentials are used (for example, the IAM role and environment variables).
Auth *ObservabilityPipelineAwsAuth `json:"auth,omitempty"`
// Compression format for objects retrieved from the S3 bucket. Use `auto` to detect compression from the object's Content-Encoding header or file extension.
Compression *ObservabilityPipelineAmazonS3SourceCompression `json:"compression,omitempty"`
// The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components).
Id string `json:"id"`
// AWS region where the S3 bucket resides.
Expand Down Expand Up @@ -83,6 +85,34 @@ func (o *ObservabilityPipelineAmazonS3Source) SetAuth(v ObservabilityPipelineAws
o.Auth = &v
}

// GetCompression returns the Compression field value if set, zero value otherwise.
func (o *ObservabilityPipelineAmazonS3Source) GetCompression() ObservabilityPipelineAmazonS3SourceCompression {
if o == nil || o.Compression == nil {
var ret ObservabilityPipelineAmazonS3SourceCompression
return ret
}
return *o.Compression
}

// GetCompressionOk returns a tuple with the Compression field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ObservabilityPipelineAmazonS3Source) GetCompressionOk() (*ObservabilityPipelineAmazonS3SourceCompression, bool) {
if o == nil || o.Compression == nil {
return nil, false
}
return o.Compression, true
}

// HasCompression returns a boolean if a field has been set.
func (o *ObservabilityPipelineAmazonS3Source) HasCompression() bool {
return o != nil && o.Compression != nil
}

// SetCompression gets a reference to the given ObservabilityPipelineAmazonS3SourceCompression and assigns it to the Compression field.
func (o *ObservabilityPipelineAmazonS3Source) SetCompression(v ObservabilityPipelineAmazonS3SourceCompression) {
o.Compression = &v
}

// GetId returns the Id field value.
func (o *ObservabilityPipelineAmazonS3Source) GetId() string {
if o == nil {
Expand Down Expand Up @@ -217,6 +247,9 @@ func (o ObservabilityPipelineAmazonS3Source) MarshalJSON() ([]byte, error) {
if o.Auth != nil {
toSerialize["auth"] = o.Auth
}
if o.Compression != nil {
toSerialize["compression"] = o.Compression
}
toSerialize["id"] = o.Id
toSerialize["region"] = o.Region
if o.Tls != nil {
Expand All @@ -236,12 +269,13 @@ func (o ObservabilityPipelineAmazonS3Source) MarshalJSON() ([]byte, error) {
// UnmarshalJSON deserializes the given payload.
func (o *ObservabilityPipelineAmazonS3Source) UnmarshalJSON(bytes []byte) (err error) {
all := struct {
Auth *ObservabilityPipelineAwsAuth `json:"auth,omitempty"`
Id *string `json:"id"`
Region *string `json:"region"`
Tls *ObservabilityPipelineTls `json:"tls,omitempty"`
Type *ObservabilityPipelineAmazonS3SourceType `json:"type"`
UrlKey *string `json:"url_key,omitempty"`
Auth *ObservabilityPipelineAwsAuth `json:"auth,omitempty"`
Compression *ObservabilityPipelineAmazonS3SourceCompression `json:"compression,omitempty"`
Id *string `json:"id"`
Region *string `json:"region"`
Tls *ObservabilityPipelineTls `json:"tls,omitempty"`
Type *ObservabilityPipelineAmazonS3SourceType `json:"type"`
UrlKey *string `json:"url_key,omitempty"`
}{}
if err = datadog.Unmarshal(bytes, &all); err != nil {
return datadog.Unmarshal(bytes, &o.UnparsedObject)
Expand All @@ -257,7 +291,7 @@ func (o *ObservabilityPipelineAmazonS3Source) UnmarshalJSON(bytes []byte) (err e
}
additionalProperties := make(map[string]interface{})
if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil {
datadog.DeleteKeys(additionalProperties, &[]string{"auth", "id", "region", "tls", "type", "url_key"})
datadog.DeleteKeys(additionalProperties, &[]string{"auth", "compression", "id", "region", "tls", "type", "url_key"})
} else {
return err
}
Expand All @@ -267,6 +301,11 @@ func (o *ObservabilityPipelineAmazonS3Source) UnmarshalJSON(bytes []byte) (err e
hasInvalidField = true
}
o.Auth = all.Auth
if all.Compression != nil && !all.Compression.IsValid() {
hasInvalidField = true
} else {
o.Compression = all.Compression
}
o.Id = *all.Id
o.Region = *all.Region
if all.Tls != nil && all.Tls.UnparsedObject != nil && o.UnparsedObject == nil {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2019-Present Datadog, Inc.

package datadogV2

import (
"fmt"

"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
)

// ObservabilityPipelineAmazonS3SourceCompression Compression format for objects retrieved from the S3 bucket. Use `auto` to detect compression from the object's Content-Encoding header or file extension.
type ObservabilityPipelineAmazonS3SourceCompression string

// List of ObservabilityPipelineAmazonS3SourceCompression.
const (
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_AUTO ObservabilityPipelineAmazonS3SourceCompression = "auto"
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_NONE ObservabilityPipelineAmazonS3SourceCompression = "none"
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_GZIP ObservabilityPipelineAmazonS3SourceCompression = "gzip"
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_ZSTD ObservabilityPipelineAmazonS3SourceCompression = "zstd"
)

var allowedObservabilityPipelineAmazonS3SourceCompressionEnumValues = []ObservabilityPipelineAmazonS3SourceCompression{
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_AUTO,
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_NONE,
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_GZIP,
OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_ZSTD,
}

// GetAllowedValues reeturns the list of possible values.
func (v *ObservabilityPipelineAmazonS3SourceCompression) GetAllowedValues() []ObservabilityPipelineAmazonS3SourceCompression {
return allowedObservabilityPipelineAmazonS3SourceCompressionEnumValues
}

// UnmarshalJSON deserializes the given payload.
func (v *ObservabilityPipelineAmazonS3SourceCompression) UnmarshalJSON(src []byte) error {
var value string
err := datadog.Unmarshal(src, &value)
if err != nil {
return err
}
*v = ObservabilityPipelineAmazonS3SourceCompression(value)
return nil
}

// NewObservabilityPipelineAmazonS3SourceCompressionFromValue returns a pointer to a valid ObservabilityPipelineAmazonS3SourceCompression
// for the value passed as argument, or an error if the value passed is not allowed by the enum.
func NewObservabilityPipelineAmazonS3SourceCompressionFromValue(v string) (*ObservabilityPipelineAmazonS3SourceCompression, error) {
ev := ObservabilityPipelineAmazonS3SourceCompression(v)
if ev.IsValid() {
return &ev, nil
}
return nil, fmt.Errorf("invalid value '%v' for ObservabilityPipelineAmazonS3SourceCompression: valid values are %v", v, allowedObservabilityPipelineAmazonS3SourceCompressionEnumValues)
}

// IsValid return true if the value is valid for the enum, false otherwise.
func (v ObservabilityPipelineAmazonS3SourceCompression) IsValid() bool {
for _, existing := range allowedObservabilityPipelineAmazonS3SourceCompressionEnumValues {
if existing == v {
return true
}
}
return false
}

// Ptr returns reference to ObservabilityPipelineAmazonS3SourceCompression value.
func (v ObservabilityPipelineAmazonS3SourceCompression) Ptr() *ObservabilityPipelineAmazonS3SourceCompression {
return &v
}
77 changes: 77 additions & 0 deletions examples/v2/observability-pipelines/ValidatePipeline_99164570.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
// Validate an observability pipeline with amazon S3 source compression returns "OK" response

package main

import (
"context"
"encoding/json"
"fmt"
"os"

"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV2"
)

func main() {
body := datadogV2.ObservabilityPipelineSpec{
Data: datadogV2.ObservabilityPipelineSpecData{
Attributes: datadogV2.ObservabilityPipelineDataAttributes{
Config: datadogV2.ObservabilityPipelineConfig{
Destinations: []datadogV2.ObservabilityPipelineConfigDestinationItem{
datadogV2.ObservabilityPipelineConfigDestinationItem{
ObservabilityPipelineDatadogLogsDestination: &datadogV2.ObservabilityPipelineDatadogLogsDestination{
Id: "datadog-logs-destination",
Inputs: []string{
"my-processor-group",
},
Type: datadogV2.OBSERVABILITYPIPELINEDATADOGLOGSDESTINATIONTYPE_DATADOG_LOGS,
}},
},
ProcessorGroups: []datadogV2.ObservabilityPipelineConfigProcessorGroup{
{
Enabled: true,
Id: "my-processor-group",
Include: "service:my-service",
Inputs: []string{
"amazon-s3-source",
},
Processors: []datadogV2.ObservabilityPipelineConfigProcessorItem{
datadogV2.ObservabilityPipelineConfigProcessorItem{
ObservabilityPipelineFilterProcessor: &datadogV2.ObservabilityPipelineFilterProcessor{
Enabled: true,
Id: "filter-processor",
Include: "service:my-service",
Type: datadogV2.OBSERVABILITYPIPELINEFILTERPROCESSORTYPE_FILTER,
}},
},
},
},
Sources: []datadogV2.ObservabilityPipelineConfigSourceItem{
datadogV2.ObservabilityPipelineConfigSourceItem{
ObservabilityPipelineAmazonS3Source: &datadogV2.ObservabilityPipelineAmazonS3Source{
Id: "amazon-s3-source",
Type: datadogV2.OBSERVABILITYPIPELINEAMAZONS3SOURCETYPE_AMAZON_S3,
Region: "us-east-1",
Compression: datadogV2.OBSERVABILITYPIPELINEAMAZONS3SOURCECOMPRESSION_GZIP.Ptr(),
}},
},
},
Name: "Pipeline with S3 Source Compression",
},
Type: "pipelines",
},
}
ctx := datadog.NewDefaultContext(context.Background())
configuration := datadog.NewConfiguration()
apiClient := datadog.NewAPIClient(configuration)
api := datadogV2.NewObservabilityPipelinesApi(apiClient)
resp, r, err := api.ValidatePipeline(ctx, body)

if err != nil {
fmt.Fprintf(os.Stderr, "Error when calling `ObservabilityPipelinesApi.ValidatePipeline`: %v\n", err)
fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
}

responseContent, _ := json.MarshalIndent(resp, "", " ")
fmt.Fprintf(os.Stdout, "Response from `ObservabilityPipelinesApi.ValidatePipeline`:\n%s\n", responseContent)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2026-04-08T12:44:25.060Z
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
interactions:
- request:
body: |
{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["amazon-s3-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"service:my-service","type":"filter"}]}],"sources":[{"compression":"gzip","id":"amazon-s3-source","region":"us-east-1","type":"amazon_s3"}]},"name":"Pipeline with S3 Source Compression"},"type":"pipelines"}}
form: {}
headers:
Accept:
- application/json
Content-Type:
- application/json
id: 0
method: POST
url: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/validate
response:
body: '{"errors":[]}

'
code: 200
duration: 0ms
headers:
Content-Type:
- application/vnd.api+json
status: 200 OK
version: 2
Loading
Loading