Skip to content

Commit c72520d

Browse files
api-clients-generation-pipeline[bot]ci.datadog-api-spec
andauthored
Add tags and description fields to the logs nested pipeline type LogsPipelineProcessor (#3742)
Co-authored-by: ci.datadog-api-spec <packages@datadoghq.com>
1 parent bd2c336 commit c72520d

File tree

6 files changed

+184
-0
lines changed

6 files changed

+184
-0
lines changed

.generator/schemas/v1/openapi.yaml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6236,6 +6236,9 @@ components:
62366236

62376237
A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.
62386238
properties:
6239+
description:
6240+
description: A description of the pipeline.
6241+
type: string
62396242
filter:
62406243
$ref: "#/components/schemas/LogsFilter"
62416244
is_enabled:
@@ -6250,6 +6253,12 @@ components:
62506253
items:
62516254
$ref: "#/components/schemas/LogsProcessor"
62526255
type: array
6256+
tags:
6257+
description: A list of tags associated with the pipeline.
6258+
items:
6259+
description: A single tag using the format `key:value`.
6260+
type: string
6261+
type: array
62536262
type:
62546263
$ref: "#/components/schemas/LogsPipelineProcessorType"
62556264
required:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"2026-03-18T17:10:40.108Z"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
{
2+
"log": {
3+
"_recordingName": "Logs Pipelines/Create a pipeline with nested pipeline processor returns \"OK\" response",
4+
"creator": {
5+
"comment": "persister:fs",
6+
"name": "Polly.JS",
7+
"version": "6.0.5"
8+
},
9+
"entries": [
10+
{
11+
"_id": "43e467db3052c3f2f66c542643a0e3a7",
12+
"_order": 0,
13+
"cache": {},
14+
"request": {
15+
"bodySize": 392,
16+
"cookies": [],
17+
"headers": [
18+
{
19+
"_fromType": "array",
20+
"name": "accept",
21+
"value": "application/json"
22+
},
23+
{
24+
"_fromType": "array",
25+
"name": "content-type",
26+
"value": "application/json"
27+
}
28+
],
29+
"headersSize": 574,
30+
"httpVersion": "HTTP/1.1",
31+
"method": "POST",
32+
"postData": {
33+
"mimeType": "application/json",
34+
"params": [],
35+
"text": "{\"description\":\"Pipeline containing nested processor with tags and description\",\"filter\":{\"query\":\"source:python\"},\"name\":\"testPipelineWithNested\",\"processors\":[{\"description\":\"This is a nested pipeline for production logs\",\"filter\":{\"query\":\"env:production\"},\"is_enabled\":true,\"name\":\"nested_pipeline_with_metadata\",\"tags\":[\"env:prod\",\"type:nested\"],\"type\":\"pipeline\"}],\"tags\":[\"team:test\"]}"
36+
},
37+
"queryString": [],
38+
"url": "https://api.datadoghq.com/api/v1/logs/config/pipelines"
39+
},
40+
"response": {
41+
"bodySize": 497,
42+
"content": {
43+
"mimeType": "application/json",
44+
"size": 497,
45+
"text": "{\"id\":\"GyYNpCrVQtOB3KhqJSpOOA\",\"type\":\"pipeline\",\"name\":\"testPipelineWithNested\",\"is_enabled\":false,\"is_read_only\":false,\"filter\":{\"query\":\"source:python\"},\"processors\":[{\"type\":\"pipeline\",\"name\":\"nested_pipeline_with_metadata\",\"is_enabled\":true,\"filter\":{\"query\":\"env:production\"},\"processors\":[],\"tags\":[\"env:prod\",\"type:nested\"],\"description\":\"This is a nested pipeline for production logs\"}],\"tags\":[\"team:test\"],\"description\":\"Pipeline containing nested processor with tags and description\"}\n"
46+
},
47+
"cookies": [],
48+
"headers": [
49+
{
50+
"name": "content-type",
51+
"value": "application/json"
52+
}
53+
],
54+
"headersSize": 703,
55+
"httpVersion": "HTTP/1.1",
56+
"redirectURL": "",
57+
"status": 200,
58+
"statusText": "OK"
59+
},
60+
"startedDateTime": "2026-03-18T17:10:40.116Z",
61+
"time": 270
62+
},
63+
{
64+
"_id": "9d46b7bdff78f8c6c944e3cd37cf4ca4",
65+
"_order": 0,
66+
"cache": {},
67+
"request": {
68+
"bodySize": 0,
69+
"cookies": [],
70+
"headers": [
71+
{
72+
"_fromType": "array",
73+
"name": "accept",
74+
"value": "*/*"
75+
}
76+
],
77+
"headersSize": 533,
78+
"httpVersion": "HTTP/1.1",
79+
"method": "DELETE",
80+
"queryString": [],
81+
"url": "https://api.datadoghq.com/api/v1/logs/config/pipelines/GyYNpCrVQtOB3KhqJSpOOA"
82+
},
83+
"response": {
84+
"bodySize": 3,
85+
"content": {
86+
"mimeType": "application/json",
87+
"size": 3,
88+
"text": "{}\n"
89+
},
90+
"cookies": [],
91+
"headers": [
92+
{
93+
"name": "content-type",
94+
"value": "application/json"
95+
}
96+
],
97+
"headersSize": 678,
98+
"httpVersion": "HTTP/1.1",
99+
"redirectURL": "",
100+
"status": 200,
101+
"statusText": "OK"
102+
},
103+
"startedDateTime": "2026-03-18T17:10:40.392Z",
104+
"time": 482
105+
}
106+
],
107+
"pages": [],
108+
"version": "1.2"
109+
}
110+
}
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
/**
2+
* Create a pipeline with nested pipeline processor returns "OK" response
3+
*/
4+
5+
import { client, v1 } from "@datadog/datadog-api-client";
6+
7+
const configuration = client.createConfiguration();
8+
const apiInstance = new v1.LogsPipelinesApi(configuration);
9+
10+
const params: v1.LogsPipelinesApiCreateLogsPipelineRequest = {
11+
body: {
12+
filter: {
13+
query: "source:python",
14+
},
15+
name: "testPipelineWithNested",
16+
processors: [
17+
{
18+
type: "pipeline",
19+
isEnabled: true,
20+
name: "nested_pipeline_with_metadata",
21+
filter: {
22+
query: "env:production",
23+
},
24+
tags: ["env:prod", "type:nested"],
25+
description: "This is a nested pipeline for production logs",
26+
},
27+
],
28+
tags: ["team:test"],
29+
description:
30+
"Pipeline containing nested processor with tags and description",
31+
},
32+
};
33+
34+
apiInstance
35+
.createLogsPipeline(params)
36+
.then((data: v1.LogsPipeline) => {
37+
console.log(
38+
"API called successfully. Returned data: " + JSON.stringify(data)
39+
);
40+
})
41+
.catch((error: any) => console.error(error));

features/v1/logs_pipelines.feature

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,13 @@ Feature: Logs Pipelines
9898
When the request is sent
9999
Then the response status is 200 OK
100100

101+
@team:DataDog/event-platform-experience
102+
Scenario: Create a pipeline with nested pipeline processor returns "OK" response
103+
Given new "CreateLogsPipeline" request
104+
And body with value {"filter": {"query": "source:python"}, "name": "testPipelineWithNested", "processors": [{"type": "pipeline", "is_enabled": true, "name": "nested_pipeline_with_metadata", "filter": {"query": "env:production"}, "tags": ["env:prod", "type:nested"], "description": "This is a nested pipeline for production logs"}], "tags": ["team:test"], "description": "Pipeline containing nested processor with tags and description"}
105+
When the request is sent
106+
Then the response status is 200 OK
107+
101108
@team:DataDog/event-platform-experience
102109
Scenario: Create a pipeline with schema processor
103110
Given new "CreateLogsPipeline" request

packages/datadog-api-client-v1/models/LogsPipelineProcessor.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ import { AttributeTypeMap } from "../../datadog-api-client-common/util";
1717
* A pipeline can contain Nested Pipelines and Processors whereas a Nested Pipeline can only contain Processors.
1818
*/
1919
export class LogsPipelineProcessor {
20+
/**
21+
* A description of the pipeline.
22+
*/
23+
"description"?: string;
2024
/**
2125
* Filter for logs.
2226
*/
@@ -33,6 +37,10 @@ export class LogsPipelineProcessor {
3337
* Ordered list of processors in this pipeline.
3438
*/
3539
"processors"?: Array<LogsProcessor>;
40+
/**
41+
* A list of tags associated with the pipeline.
42+
*/
43+
"tags"?: Array<string>;
3644
/**
3745
* Type of logs pipeline processor.
3846
*/
@@ -54,6 +62,10 @@ export class LogsPipelineProcessor {
5462
* @ignore
5563
*/
5664
static readonly attributeTypeMap: AttributeTypeMap = {
65+
description: {
66+
baseName: "description",
67+
type: "string",
68+
},
5769
filter: {
5870
baseName: "filter",
5971
type: "LogsFilter",
@@ -70,6 +82,10 @@ export class LogsPipelineProcessor {
7082
baseName: "processors",
7183
type: "Array<LogsProcessor>",
7284
},
85+
tags: {
86+
baseName: "tags",
87+
type: "Array<string>",
88+
},
7389
type: {
7490
baseName: "type",
7591
type: "LogsPipelineProcessorType",

0 commit comments

Comments
 (0)