diff --git a/.apigentools-info b/.apigentools-info index 027f2f1826d..b77514e1155 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-29 16:12:51.964788", - "spec_repo_commit": "22937387" + "regenerated": "2025-04-29 18:57:45.399045", + "spec_repo_commit": "d1252b21" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-29 16:12:51.982170", - "spec_repo_commit": "22937387" + "regenerated": "2025-04-29 18:57:45.414992", + "spec_repo_commit": "d1252b21" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index a024ef35eeb..dd3346b50b3 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4357,6 +4357,48 @@ components: required: - data type: object + AzureStorageDestination: + description: The `azure_storage` destination forwards logs to an Azure Blob + Storage container. + properties: + blob_prefix: + description: Optional prefix for blobs written to the container. + example: logs/ + type: string + container_name: + description: The name of the Azure Blob Storage container to store logs + in. + example: my-log-container + type: string + id: + description: The unique identifier for this component. + example: azure-storage-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - processor-id + items: + type: string + type: array + type: + $ref: '#/components/schemas/AzureStorageDestinationType' + required: + - id + - type + - inputs + - container_name + type: object + AzureStorageDestinationType: + default: azure_storage + description: The destination type. The value should always be `azure_storage`. + enum: + - azure_storage + example: azure_storage + type: string + x-enum-varnames: + - AZURE_STORAGE AzureUCConfig: description: Azure config. properties: @@ -18998,6 +19040,29 @@ components: meta: $ref: '#/components/schemas/HistoricalJobListMeta' type: object + ListPipelinesResponse: + description: Represents the response payload containing a list of pipelines + and associated metadata. + properties: + data: + description: The `schema` `data`. + items: + $ref: '#/components/schemas/ObservabilityPipelineData' + type: array + meta: + $ref: '#/components/schemas/ListPipelinesResponseMeta' + required: + - data + type: object + ListPipelinesResponseMeta: + description: Metadata about the response. + properties: + totalCount: + description: The total number of pipelines. + example: 42 + format: int64 + type: integer + type: object ListPowerpacksResponse: description: Response object which includes all powerpack configurations. properties: @@ -21653,6 +21718,58 @@ components: - data_source - query type: object + MicrosoftSentinelDestination: + description: The `microsoft_sentinel` destination forwards logs to Microsoft + Sentinel. + properties: + client_id: + description: Azure AD client ID used for authentication. + example: a1b2c3d4-5678-90ab-cdef-1234567890ab + type: string + dcr_immutable_id: + description: The immutable ID of the Data Collection Rule (DCR). + example: dcr-uuid-1234 + type: string + id: + description: The unique identifier for this component. + example: sentinel-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + table: + description: The name of the Log Analytics table where logs are sent. + example: CustomLogsTable + type: string + tenant_id: + description: Azure AD tenant ID. + example: abcdef12-3456-7890-abcd-ef1234567890 + type: string + type: + $ref: '#/components/schemas/MicrosoftSentinelDestinationType' + required: + - id + - type + - inputs + - client_id + - tenant_id + - dcr_immutable_id + - table + type: object + MicrosoftSentinelDestinationType: + default: microsoft_sentinel + description: The destination type. The value should always be `microsoft_sentinel`. + enum: + - microsoft_sentinel + example: microsoft_sentinel + type: string + x-enum-varnames: + - MICROSOFT_SENTINEL MicrosoftTeamsChannelInfoResponseAttributes: description: Channel attributes. properties: @@ -22757,6 +22874,66 @@ components: required: - data type: object + ObservabilityPipelineAddEnvVarsProcessor: + description: The `add_env_vars` processor adds environment variable values to + log events. + properties: + id: + description: The unique identifier for this component. Used to reference + this processor in the pipeline. + example: add-env-vars-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessorType' + variables: + description: A list of environment variable mappings to apply to log fields. + items: + $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessorVariable' + type: array + required: + - id + - type + - include + - inputs + - variables + type: object + ObservabilityPipelineAddEnvVarsProcessorType: + default: add_env_vars + description: The processor type. The value should always be `add_env_vars`. + enum: + - add_env_vars + example: add_env_vars + type: string + x-enum-varnames: + - ADD_ENV_VARS + ObservabilityPipelineAddEnvVarsProcessorVariable: + description: Defines a mapping between an environment variable and a log field. + properties: + field: + description: The target field in the log event. + example: log.environment.region + type: string + name: + description: The name of the environment variable to read. + example: AWS_REGION + type: string + required: + - field + - name + type: object ObservabilityPipelineAddFieldsProcessor: description: The `add_fields` processor adds static key-value fields to logs. properties: @@ -22803,6 +22980,236 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAmazonDataFirehoseSource: + description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: amazon-firehose-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSourceType' + required: + - id + - type + type: object + ObservabilityPipelineAmazonDataFirehoseSourceType: + default: amazon_data_firehose + description: The source type. The value should always be `amazon_data_firehose`. + enum: + - amazon_data_firehose + example: amazon_data_firehose + type: string + x-enum-varnames: + - AMAZON_DATA_FIREHOSE + ObservabilityPipelineAmazonOpenSearchDestination: + description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + bulk_index: + description: The index to write logs to. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: elasticsearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationType' + required: + - id + - type + - inputs + - auth + type: object + ObservabilityPipelineAmazonOpenSearchDestinationAuth: + description: 'Authentication settings for the Amazon OpenSearch destination. + + The `strategy` field determines whether basic or AWS-based authentication + is used. + + ' + properties: + assume_role: + description: The ARN of the role to assume (used with `aws` strategy). + type: string + aws_region: + description: AWS region + type: string + external_id: + description: External ID for the assumed role (used with `aws` strategy). + type: string + session_name: + description: Session name for the assumed role (used with `aws` strategy). + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy' + required: + - strategy + type: object + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy: + description: The authentication strategy to use. + enum: + - basic + - aws + example: aws + type: string + x-enum-varnames: + - BASIC + - AWS + ObservabilityPipelineAmazonOpenSearchDestinationType: + default: amazon_opensearch + description: The destination type. The value should always be `amazon_opensearch`. + enum: + - amazon_opensearch + example: amazon_opensearch + type: string + x-enum-varnames: + - AMAZON_OPENSEARCH + ObservabilityPipelineAmazonS3Destination: + description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + format to an Amazon S3 bucket for archiving. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + bucket: + description: S3 bucket name. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: amazon-s3-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys. + type: string + region: + description: AWS region of the S3 bucket. + example: us-east-1 + type: string + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationStorageClass' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationType' + required: + - id + - type + - inputs + - bucket + - region + - storage_class + type: object + ObservabilityPipelineAmazonS3DestinationStorageClass: + description: S3 storage class. + enum: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + ObservabilityPipelineAmazonS3DestinationType: + default: amazon_s3 + description: The destination type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAmazonS3Source: + description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. + + It supports AWS authentication and TLS encryption. + + ' + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: aws-s3-source + type: string + region: + description: AWS region where the S3 bucket resides. + example: us-east-1 + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3SourceType' + required: + - id + - type + - region + type: object + ObservabilityPipelineAmazonS3SourceType: + default: amazon_s3 + description: The source type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAwsAuth: + description: "AWS authentication credentials used for accessing AWS services + such as S3.\nIf omitted, the system\u2019s default credentials are used (for + example, the IAM role and environment variables).\n" + properties: + assume_role: + description: The Amazon Resource Name (ARN) of the role to assume. + type: string + external_id: + description: A unique identifier for cross-account role assumption. + type: string + session_name: + description: A session identifier used for logging and tracing the assumed + role session. + type: string + type: object ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -22838,13 +23245,26 @@ components: type: array required: - sources - - processors - destinations type: object ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/AzureStorageDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' + - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: @@ -22854,34 +23274,34 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - ObservabilityPipelineCreateRequest: - description: Top-level schema representing a pipeline. - properties: - data: - $ref: '#/components/schemas/ObservabilityPipelineCreateRequestData' - required: - - data - type: object - ObservabilityPipelineCreateRequestData: - description: "Contains the pipeline\u2019s ID, type, and configuration attributes." - properties: - attributes: - $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' - type: - default: pipelines - description: The resource type identifier. For pipeline resources, this - should always be set to `pipelines`. - example: pipelines - type: string - required: - - type - - attributes - type: object + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' ObservabilityPipelineData: description: "Contains the pipeline\u2019s ID, type, and configuration attributes." properties: @@ -22973,427 +23393,2592 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDecoding: + description: The decoding format used to interpret incoming logs. + enum: + - bytes + - gelf + - json + - syslog + example: json + type: string + x-enum-varnames: + - DECODE_BYTES + - DECODE_GELF + - DECODE_JSON + - DECODE_SYSLOG + ObservabilityPipelineDedupeProcessor: + description: The `dedupe` processor removes duplicate fields in log events. + properties: + fields: + description: A list of log field paths to check for duplicates. + example: + - log.message + - log.error + items: + type: string + type: array + id: + description: The unique identifier for this processor. + example: dedupe-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - parse-json-processor + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessorMode' + type: + $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessorType' + required: + - id + - type + - include + - inputs + - fields + - mode + type: object + ObservabilityPipelineDedupeProcessorMode: + description: The deduplication mode to apply to the fields. + enum: + - match + - ignore + example: match + type: string + x-enum-varnames: + - MATCH + - IGNORE + ObservabilityPipelineDedupeProcessorType: + default: dedupe + description: The processor type. The value should always be `dedupe`. + enum: + - dedupe + example: dedupe + type: string + x-enum-varnames: + - DEDUPE + ObservabilityPipelineElasticsearchDestination: + description: The `elasticsearch` destination writes logs to an Elasticsearch + cluster. + properties: + api_version: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + bulk_index: + description: The index to write logs to in Elasticsearch. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: elasticsearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineElasticsearchDestinationApiVersion: + description: The Elasticsearch API version to use. Set to `auto` to auto-detect. + enum: + - auto + - v6 + - v7 + - v8 + example: auto + type: string + x-enum-varnames: + - AUTO + - V6 + - V7 + - V8 + ObservabilityPipelineElasticsearchDestinationType: + default: elasticsearch + description: The destination type. The value should always be `elasticsearch`. + enum: + - elasticsearch + example: elasticsearch + type: string + x-enum-varnames: + - ELASTICSEARCH + ObservabilityPipelineEnrichmentTableFile: + description: Defines a static enrichment table loaded from a CSV file. + properties: + encoding: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileEncoding' + key: + description: Key fields used to look up enrichment values. + items: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileKeyItems' + type: array + path: + description: Path to the CSV file. + example: /etc/enrichment/lookup.csv + type: string + schema: + description: Schema defining column names and their types. + items: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileSchemaItems' + type: array + required: + - encoding + - key + - path + - schema + type: object + ObservabilityPipelineEnrichmentTableFileEncoding: + description: File encoding format. + properties: + delimiter: + description: The `encoding` `delimiter`. + example: ',' + type: string + includes_headers: + description: The `encoding` `includes_headers`. + example: true + type: boolean + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileEncodingType' + required: + - type + - delimiter + - includes_headers + type: object + ObservabilityPipelineEnrichmentTableFileEncodingType: + description: Specifies the encoding format (e.g., CSV) used for enrichment tables. + enum: + - csv + example: csv + type: string + x-enum-varnames: + - CSV + ObservabilityPipelineEnrichmentTableFileKeyItems: + description: Defines how to map log fields to enrichment table columns during + lookups. + properties: + column: + description: The `items` `column`. + example: user_id + type: string + comparison: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileKeyItemsComparison' + field: + description: The `items` `field`. + example: log.user.id + type: string + required: + - column + - comparison + - field + type: object + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison: + description: Defines how to compare key fields for enrichment table lookups. + enum: + - equals + example: equals + type: string + x-enum-varnames: + - EQUALS + ObservabilityPipelineEnrichmentTableFileSchemaItems: + description: Describes a single column and its type in an enrichment table schema. + properties: + column: + description: The `items` `column`. + example: region + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileSchemaItemsType' + required: + - column + - type + type: object + ObservabilityPipelineEnrichmentTableFileSchemaItemsType: + description: Declares allowed data types for enrichment table columns. + enum: + - string + - boolean + - integer + - float + - date + - timestamp + example: string + type: string + x-enum-varnames: + - STRING + - BOOLEAN + - INTEGER + - FLOAT + - DATE + - TIMESTAMP + ObservabilityPipelineEnrichmentTableGeoIp: + description: Uses a GeoIP database to enrich logs based on an IP field. + properties: + key_field: + description: Path to the IP field in the log. + example: log.source.ip + type: string + locale: + description: Locale used to resolve geographical names. + example: en + type: string + path: + description: Path to the GeoIP database file. + example: /etc/geoip/GeoLite2-City.mmdb + type: string + required: + - key_field + - locale + - path + type: object + ObservabilityPipelineEnrichmentTableProcessor: + description: The `enrichment_table` processor enriches logs using a static CSV + file or GeoIP database. + properties: + file: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFile' + geoip: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableGeoIp' + id: + description: The unique identifier for this processor. + example: enrichment-table-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: source:my-source + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - add-fields-processor + items: + type: string + type: array + target: + description: Path where enrichment results should be stored in the log. + example: enriched.geoip + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessorType' + required: + - id + - type + - include + - inputs + - target + type: object + ObservabilityPipelineEnrichmentTableProcessorType: + default: enrichment_table + description: The processor type. The value should always be `enrichment_table`. + enum: + - enrichment_table + example: enrichment_table + type: string + x-enum-varnames: + - ENRICHMENT_TABLE ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: - name: - description: The field name. - example: field_name - type: string - value: - description: The field value. - example: field_value + name: + description: The field name. + example: field_name + type: string + value: + description: The field value. + example: field_value + type: string + required: + - name + - value + type: object + ObservabilityPipelineFilterProcessor: + description: The `filter` processor allows conditional processing of logs based + on a Datadog search query. Logs that match the `include` query are passed + through; others are discarded. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: filter-processor + type: string + include: + description: A Datadog search query used to determine which logs should + pass through the filter. Logs that match this query continue to downstream + components; others are dropped. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineFilterProcessorType: + default: filter + description: The processor type. The value should always be `filter`. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineFluentBitSource: + description: The `fluent_bit` source ingests logs from Fluent Bit. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: fluent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineFluentBitSourceType' + required: + - id + - type + type: object + ObservabilityPipelineFluentBitSourceType: + default: fluent_bit + description: The source type. The value should always be `fluent_bit`. + enum: + - fluent_bit + example: fluent_bit + type: string + x-enum-varnames: + - FLUENT_BIT + ObservabilityPipelineFluentdSource: + description: The `fluentd` source ingests logs from a Fluentd-compatible service. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: fluent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineFluentdSourceType' + required: + - id + - type + type: object + ObservabilityPipelineFluentdSourceType: + default: fluentd + description: The source type. The value should always be `fluentd. + enum: + - fluentd + example: fluentd + type: string + x-enum-varnames: + - FLUENTD + ObservabilityPipelineGcpAuth: + description: 'GCP credentials used to authenticate with Google Cloud Storage. + + ' + properties: + credentials_file: + description: Path to the GCP service account key file. + example: /var/secrets/gcp-credentials.json + type: string + required: + - credentials_file + type: object + ObservabilityPipelineGenerateMetricsProcessor: + description: 'The `generate_datadog_metrics` processor creates custom metrics + from logs and sends them to Datadog. + + Metrics can be counters, gauges, or distributions and optionally grouped by + log fields. + + ' + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline. + example: generate-metrics-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this processor. + example: + - source-id + items: + type: string + type: array + metrics: + description: Configuration for generating individual metrics. + items: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetric' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessorType' + required: + - id + - type + - inputs + - include + - metrics + type: object + ObservabilityPipelineGenerateMetricsProcessorType: + default: generate_datadog_metrics + description: The processor type. Always `generate_datadog_metrics`. + enum: + - generate_datadog_metrics + example: generate_datadog_metrics + type: string + x-enum-varnames: + - GENERATE_DATADOG_METRICS + ObservabilityPipelineGeneratedMetric: + description: 'Defines a log-based custom metric, including its name, type, filter, + value computation strategy, + + and optional grouping fields. + + ' + properties: + group_by: + description: Optional fields used to group the metric series. + example: + - service + - env + items: + type: string + type: array + include: + description: Datadog filter query to match logs for metric generation. + example: service:billing + type: string + metric_type: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricMetricType' + name: + description: Name of the custom metric to be created. + example: logs.processed + type: string + value: + $ref: '#/components/schemas/ObservabilityPipelineMetricValue' + required: + - name + - include + - metric_type + - value + type: object + ObservabilityPipelineGeneratedMetricIncrementByField: + description: Strategy that increments a generated metric based on the value + of a log field. + properties: + field: + description: Name of the log field containing the numeric value to increment + the metric by. + example: errors + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy' + required: + - strategy + - field + type: object + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy: + description: Uses a numeric field in the log event as the metric increment. + enum: + - increment_by_field + example: increment_by_field + type: string + x-enum-varnames: + - INCREMENT_BY_FIELD + ObservabilityPipelineGeneratedMetricIncrementByOne: + description: Strategy that increments a generated metric by one for each matching + event. + properties: + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy' + required: + - strategy + type: object + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy: + description: Increments the metric by 1 for each matching event. + enum: + - increment_by_one + example: increment_by_one + type: string + x-enum-varnames: + - INCREMENT_BY_ONE + ObservabilityPipelineGeneratedMetricMetricType: + description: Type of metric to create. + enum: + - count + - gauge + - distribution + example: count + type: string + x-enum-varnames: + - COUNT + - GAUGE + - DISTRIBUTION + ObservabilityPipelineGoogleChronicleDestination: + description: The `google_chronicle` destination sends logs to Google Chronicle. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + customer_id: + description: The Google Chronicle customer ID. + example: abcdefg123456789 + type: string + encoding: + $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestinationEncoding' + id: + description: The unique identifier for this component. + example: google-chronicle-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + log_type: + description: The log type metadata associated with the Chronicle destination. + example: nginx_logs + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestinationType' + required: + - id + - type + - inputs + - auth + - customer_id + type: object + ObservabilityPipelineGoogleChronicleDestinationEncoding: + description: The encoding format for the logs sent to Chronicle. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineGoogleChronicleDestinationType: + default: google_chronicle + description: The destination type. The value should always be `google_chronicle`. + enum: + - google_chronicle + example: google_chronicle + type: string + x-enum-varnames: + - GOOGLE_CHRONICLE + ObservabilityPipelineGoogleCloudStorageDestination: + description: 'The `google_cloud_storage` destination stores logs in a Google + Cloud Storage (GCS) bucket. + + It requires a bucket name, GCP authentication, and metadata fields. + + ' + properties: + acl: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + bucket: + description: Name of the GCS bucket. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: gcs-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys within the GCS bucket. + type: string + metadata: + description: Custom metadata key-value pairs added to each object. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetadataEntry' + type: array + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass' + type: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationType' + required: + - id + - type + - inputs + - bucket + - auth + - storage_class + - acl + - metadata + type: object + ObservabilityPipelineGoogleCloudStorageDestinationAcl: + description: Access control list setting for objects written to the bucket. + enum: + - private + - project-private + - public-read + - authenticated-read + - bucket-owner-read + - bucket-owner-full-control + example: private + type: string + x-enum-varnames: + - PRIVATE + - PROJECTNOT_PRIVATE + - PUBLICNOT_READ + - AUTHENTICATEDNOT_READ + - BUCKETNOT_OWNERNOT_READ + - BUCKETNOT_OWNERNOT_FULLNOT_CONTROL + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass: + description: Storage class used for objects stored in GCS. + enum: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + ObservabilityPipelineGoogleCloudStorageDestinationType: + default: google_cloud_storage + description: The destination type. Always `google_cloud_storage`. + enum: + - google_cloud_storage + example: google_cloud_storage + type: string + x-enum-varnames: + - GOOGLE_CLOUD_STORAGE + ObservabilityPipelineGooglePubSubSource: + description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + subscription. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: google-pubsub-source + type: string + project: + description: The GCP project ID that owns the Pub/Sub subscription. + example: my-gcp-project + type: string + subscription: + description: The Pub/Sub subscription name from which messages are consumed. + example: logs-subscription + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSourceType' + required: + - id + - type + - auth + - decoding + - project + - subscription + type: object + ObservabilityPipelineGooglePubSubSourceType: + default: google_pubsub + description: The source type. The value should always be `google_pubsub`. + enum: + - google_pubsub + example: google_pubsub + type: string + x-enum-varnames: + - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientSource: + description: The `http_client` source scrapes logs from HTTP endpoints at regular + intervals. + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: http-client-source + type: string + scrape_interval_secs: + description: The interval (in seconds) between HTTP scrape requests. + example: 60 + format: int64 + type: integer + scrape_timeout_secs: + description: The timeout (in seconds) for each scrape request. + example: 10 + format: int64 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceType' + required: + - id + - type + - decoding + type: object + ObservabilityPipelineHttpClientSourceAuthStrategy: + description: Optional authentication strategy for HTTP requests. + enum: + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - BASIC + - BEARER + ObservabilityPipelineHttpClientSourceType: + default: http_client + description: The source type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT + ObservabilityPipelineHttpServerSource: + description: The `http_server` source collects logs over HTTP POST from external + services. + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: Unique ID for the HTTP server source. + example: http-server-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceType' + required: + - id + - type + - auth_strategy + - decoding + type: object + ObservabilityPipelineHttpServerSourceAuthStrategy: + description: HTTP authentication method. + enum: + - none + - plain + example: plain + type: string + x-enum-varnames: + - NONE + - PLAIN + ObservabilityPipelineHttpServerSourceType: + default: http_server + description: The source type. The value should always be `http_server`. + enum: + - http_server + example: http_server + type: string + x-enum-varnames: + - HTTP_SERVER + ObservabilityPipelineKafkaSource: + description: The `kafka` source ingests data from Apache Kafka topics. + properties: + group_id: + description: Consumer group ID used by the Kafka client. + example: consumer-group-0 + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: kafka-source + type: string + librdkafka_options: + description: Optional list of advanced Kafka client configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + type: array + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topics: + description: A list of Kafka topic names to subscribe to. The source ingests + messages from each topic specified. + example: + - topic1 + - topic2 + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + required: + - id + - type + - group_id + - topics + type: object + ObservabilityPipelineKafkaSourceLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka sources, such as timeouts, buffer sizes, and security + settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSourceSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + type: object + ObservabilityPipelineKafkaSourceType: + default: kafka + description: The source type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineLogstashSource: + description: The `logstash` source ingests logs from a Logstash forwarder. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: logstash-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineLogstashSourceType' + required: + - id + - type + type: object + ObservabilityPipelineLogstashSourceType: + default: logstash + description: The source type. The value should always be `logstash`. + enum: + - logstash + example: logstash + type: string + x-enum-varnames: + - LOGSTASH + ObservabilityPipelineMetadataEntry: + description: A custom metadata entry to attach to each object uploaded to the + GCS bucket. + properties: + name: + description: The metadata key. + example: environment + type: string + value: + description: The metadata value. + example: production + type: string + required: + - name + - value + type: object + ObservabilityPipelineMetricValue: + description: Specifies how the value of the generated metric is computed. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' + ObservabilityPipelineNewRelicDestination: + description: The `new_relic` destination sends logs to the New Relic platform. + properties: + id: + description: The unique identifier for this component. + example: new-relic-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + region: + $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestinationRegion' + type: + $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestinationType' + required: + - id + - type + - inputs + - region + type: object + ObservabilityPipelineNewRelicDestinationRegion: + description: The New Relic region. + enum: + - us + - eu + example: us + type: string + x-enum-varnames: + - US + - EU + ObservabilityPipelineNewRelicDestinationType: + default: new_relic + description: The destination type. The value should always be `new_relic`. + enum: + - new_relic + example: new_relic + type: string + x-enum-varnames: + - NEW_RELIC + ObservabilityPipelineOcsfMapperProcessor: + description: The `ocsf_mapper` processor transforms logs into the OCSF schema + using a predefined mapping configuration. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline. + example: ocsf-mapper-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this processor. + example: + - filter-processor + items: + type: string + type: array + mappings: + description: A list of mapping rules to convert events to the OCSF format. + items: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorMapping' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorType' + required: + - id + - type + - include + - inputs + - mappings + type: object + ObservabilityPipelineOcsfMapperProcessorMapping: + description: Defines how specific events are transformed to OCSF using a mapping + configuration. + properties: + include: + description: A Datadog search query used to select the logs that this mapping + should apply to. + example: service:my-service + type: string + mapping: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorMappingMapping' + required: + - include + - mapping + type: object + ObservabilityPipelineOcsfMapperProcessorMappingMapping: + description: Defines a single mapping rule for transforming logs into the OCSF + schema. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMappingLibrary' + ObservabilityPipelineOcsfMapperProcessorType: + default: ocsf_mapper + description: The processor type. The value should always be `ocsf_mapper`. + enum: + - ocsf_mapper + example: ocsf_mapper + type: string + x-enum-varnames: + - OCSF_MAPPER + ObservabilityPipelineOcsfMappingLibrary: + description: Predefined library mappings for common log formats. + enum: + - CloudTrail Account Change + - GCP Cloud Audit CreateBucket + - GCP Cloud Audit CreateSink + - GCP Cloud Audit SetIamPolicy + - GCP Cloud Audit UpdateSink + - Github Audit Log API Activity + - Google Workspace Admin Audit addPrivilege + - Microsoft 365 Defender Incident + - Microsoft 365 Defender UserLoggedIn + - Okta System Log Authentication + - Palo Alto Networks Firewall Traffic + example: CloudTrail Account Change + type: string + x-enum-varnames: + - CLOUDTRAIL_ACCOUNT_CHANGE + - GCP_CLOUD_AUDIT_CREATEBUCKET + - GCP_CLOUD_AUDIT_CREATESINK + - GCP_CLOUD_AUDIT_SETIAMPOLICY + - GCP_CLOUD_AUDIT_UPDATESINK + - GITHUB_AUDIT_LOG_API_ACTIVITY + - GOOGLE_WORKSPACE_ADMIN_AUDIT_ADDPRIVILEGE + - MICROSOFT_365_DEFENDER_INCIDENT + - MICROSOFT_365_DEFENDER_USERLOGGEDIN + - OKTA_SYSTEM_LOG_AUTHENTICATION + - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC + ObservabilityPipelineOpenSearchDestination: + description: The `opensearch` destination writes logs to an OpenSearch cluster. + properties: + bulk_index: + description: The index to write logs to. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: opensearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineOpenSearchDestinationType: + default: opensearch + description: The destination type. The value should always be `opensearch`. + enum: + - opensearch + example: opensearch + type: string + x-enum-varnames: + - OPENSEARCH + ObservabilityPipelineParseGrokProcessor: + description: The `parse_grok` processor extracts structured fields from unstructured + log messages using Grok patterns. + properties: + disable_library_rules: + default: false + description: If set to `true`, disables the default Grok rules provided + by Datadog. + example: true + type: boolean + id: + description: A unique identifier for this processor. + example: parse-grok-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + rules: + description: The list of Grok parsing rules. If multiple matching rules + are provided, they are evaluated in order. The first successful match + is applied. + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRule' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorType' + required: + - id + - type + - include + - inputs + - rules + type: object + ObservabilityPipelineParseGrokProcessorRule: + description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule + defines how to extract structured fields + + from a specific log field using Grok patterns. + + ' + properties: + match_rules: + description: 'A list of Grok parsing rules that define how to extract fields + from the source field. + + Each rule must contain a name and a valid Grok pattern. + + ' + example: + - name: MyParsingRule + rule: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleMatchRule' + type: array + source: + description: The name of the field in the log event to apply the Grok rules + to. + example: message + type: string + support_rules: + description: 'A list of Grok helper rules that can be referenced by the + parsing rules. + + ' + example: + - name: user + rule: '%{word:user.name}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleSupportRule' + type: array + required: + - source + - match_rules + - support_rules + type: object + ObservabilityPipelineParseGrokProcessorRuleMatchRule: + description: 'Defines a Grok parsing rule, which extracts structured fields + from log content using named Grok patterns. + + Each rule must have a unique name and a valid Datadog Grok pattern that will + be applied to the source field. + + ' + properties: + name: + description: The name of the rule. + example: MyParsingRule + type: string + rule: + description: The definition of the Grok rule. + example: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorRuleSupportRule: + description: The Grok helper rule referenced in the parsing rules. + properties: + name: + description: The name of the Grok helper rule. + example: user + type: string + rule: + description: The definition of the Grok helper rule. + example: ' %{word:user.name}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorType: + default: parse_grok + description: The processor type. The value should always be `parse_grok`. + enum: + - parse_grok + example: parse_grok + type: string + x-enum-varnames: + - PARSE_GROK + ObservabilityPipelineParseJSONProcessor: + description: The `parse_json` processor extracts JSON from a specified field + and flattens it into the event. This is useful when logs contain embedded + JSON as a string. + properties: + field: + description: The name of the log field that contains a JSON string. + example: message + type: string + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: parse-json-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType' + required: + - id + - type + - include + - field + - inputs + type: object + ObservabilityPipelineParseJSONProcessorType: + default: parse_json + description: The processor type. The value should always be `parse_json`. + enum: + - parse_json + example: parse_json + type: string + x-enum-varnames: + - PARSE_JSON + ObservabilityPipelinePipelineKafkaSourceSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 + ObservabilityPipelineQuotaProcessor: + description: The Quota Processor measures logging traffic for logs that match + a specified filter. When the configured daily quota is met, the processor + can drop or alert. + properties: + drop_events: + description: If set to `true`, logs that matched the quota filter and sent + after the quota has been met are dropped; only logs that did not match + the filter query continue through the pipeline. + example: false + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: quota-processor + type: string + ignore_when_missing_partitions: + description: If `true`, the processor skips quota checks when partition + fields are missing from the logs. + type: boolean + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + name: + description: Name of the quota. + example: MyQuota + type: string + overflow_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' + overrides: + description: A list of alternate quota rules that apply to specific sets + of events, identified by matching field values. Each override can define + a custom limit. + items: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride' + type: array + partition_fields: + description: A list of fields used to segment log traffic for quota enforcement. + Quotas are tracked independently by unique combinations of these field + values. + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' + required: + - id + - type + - include + - name + - drop_events + - limit + - inputs + type: object + ObservabilityPipelineQuotaProcessorLimit: + description: The maximum amount of data or number of events allowed before the + quota is enforced. Can be specified in bytes or events. + properties: + enforce: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType' + limit: + description: The limit for quota enforcement. + example: 1000 + format: int64 + type: integer + required: + - enforce + - limit + type: object + ObservabilityPipelineQuotaProcessorLimitEnforceType: + description: Unit for quota enforcement in bytes for data size or events for + count. + enum: + - bytes + - events + example: bytes + type: string + x-enum-varnames: + - BYTES + - EVENTS + ObservabilityPipelineQuotaProcessorOverflowAction: + description: 'The action to take when the quota is exceeded. Options: + + - `drop`: Drop the event. + + - `no_action`: Let the event pass through. + + - `overflow_routing`: Route to an overflow destination. + + ' + enum: + - drop + - no_action + - overflow_routing + example: drop + type: string + x-enum-varnames: + - DROP + - NO_ACTION + - OVERFLOW_ROUTING + ObservabilityPipelineQuotaProcessorOverride: + description: Defines a custom quota limit that applies to specific log events + based on matching field values. + properties: + fields: + description: A list of field matchers used to apply a specific override. + If an event matches all listed key-value pairs, the corresponding override + limit is enforced. + items: + $ref: '#/components/schemas/ObservabilityPipelineFieldValue' + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + required: + - fields + - limit + type: object + ObservabilityPipelineQuotaProcessorType: + default: quota + description: The processor type. The value should always be `quota`. + enum: + - quota + example: quota + type: string + x-enum-varnames: + - QUOTA + ObservabilityPipelineReduceProcessor: + description: The `reduce` processor aggregates and merges logs based on matching + keys and merge strategies. + properties: + group_by: + description: A list of fields used to group log events for merging. + example: + - log.user.id + - log.device.id + items: + type: string + type: array + id: + description: The unique identifier for this processor. + example: reduce-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: env:prod + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - parse-json-processor + items: + type: string + type: array + merge_strategies: + description: List of merge strategies defining how values from grouped events + should be combined. + items: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorMergeStrategy' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorType' + required: + - id + - type + - include + - inputs + - group_by + - merge_strategies + type: object + ObservabilityPipelineReduceProcessorMergeStrategy: + description: Defines how a specific field should be merged across grouped events. + properties: + path: + description: The field path in the log event. + example: log.user.roles + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorMergeStrategyStrategy' + required: + - path + - strategy + type: object + ObservabilityPipelineReduceProcessorMergeStrategyStrategy: + description: The merge strategy to apply. + enum: + - discard + - retain + - sum + - max + - min + - array + - concat + - concat_newline + - concat_raw + - shortest_array + - longest_array + - flat_unique + example: flat_unique + type: string + x-enum-varnames: + - DISCARD + - RETAIN + - SUM + - MAX + - MIN + - ARRAY + - CONCAT + - CONCAT_NEWLINE + - CONCAT_RAW + - SHORTEST_ARRAY + - LONGEST_ARRAY + - FLAT_UNIQUE + ObservabilityPipelineReduceProcessorType: + default: reduce + description: The processor type. The value should always be `reduce`. + enum: + - reduce + example: reduce + type: string + x-enum-varnames: + - REDUCE + ObservabilityPipelineRemoveFieldsProcessor: + description: The `remove_fields` processor deletes specified fields from logs. + properties: + fields: + description: A list of field names to be removed from each log event. + example: + - field1 + - field2 + items: + type: string + type: array + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: remove-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: The `PipelineRemoveFieldsProcessor` `inputs`. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRemoveFieldsProcessorType: + default: remove_fields + description: The processor type. The value should always be `remove_fields`. + enum: + - remove_fields + example: remove_fields + type: string + x-enum-varnames: + - REMOVE_FIELDS + ObservabilityPipelineRenameFieldsProcessor: + description: The `rename_fields` processor changes field names. + properties: + fields: + description: A list of rename rules specifying which fields to rename in + the event, what to rename them to, and whether to preserve the original + fields. + items: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField' + type: array + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: rename-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRenameFieldsProcessorField: + description: Defines how to rename a field in log events. + properties: + destination: + description: The field name to assign the renamed value to. + example: destination_field + type: string + preserve_source: + description: Indicates whether the original field, that is received from + the source, should be kept (`true`) or removed (`false`) after renaming. + example: false + type: boolean + source: + description: The original field name in the log event that should be renamed. + example: source_field + type: string + required: + - source + - destination + - preserve_source + type: object + ObservabilityPipelineRenameFieldsProcessorType: + default: rename_fields + description: The processor type. The value should always be `rename_fields`. + enum: + - rename_fields + example: rename_fields + type: string + x-enum-varnames: + - RENAME_FIELDS + ObservabilityPipelineRsyslogDestination: + description: The `rsyslog` destination forwards logs to an external `rsyslog` + server over TCP or UDP using the syslog protocol. + properties: + id: + description: The unique identifier for this component. + example: rsyslog-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + keepalive: + description: Optional socket keepalive duration in milliseconds. + example: 60000 + format: int64 + minimum: 0 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineRsyslogDestinationType: + default: rsyslog + description: The destination type. The value should always be `rsyslog`. + enum: + - rsyslog + example: rsyslog + type: string + x-enum-varnames: + - RSYSLOG + ObservabilityPipelineRsyslogSource: + description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` + server using the syslog protocol. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: rsyslog-source + type: string + mode: + $ref: '#/components/schemas/ObservabilityPipelineSyslogSourceMode' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineRsyslogSourceType' + required: + - id + - type + - mode + type: object + ObservabilityPipelineRsyslogSourceType: + default: rsyslog + description: The source type. The value should always be `rsyslog`. + enum: + - rsyslog + example: rsyslog + type: string + x-enum-varnames: + - RSYSLOG + ObservabilityPipelineSampleProcessor: + description: The `sample` processor allows probabilistic sampling of logs at + a fixed rate. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: sample-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + percentage: + description: The percentage of logs to sample. + example: 10.0 + format: double + type: number + rate: + description: Number of events to sample (1 in N). + example: 10 + format: int64 + minimum: 1 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineSampleProcessorType: + default: sample + description: The processor type. The value should always be `sample`. + enum: + - sample + example: sample + type: string + x-enum-varnames: + - SAMPLE + ObservabilityPipelineSensitiveDataScannerProcessor: + description: The `sensitive_data_scanner` processor detects and optionally redacts + sensitive data in log events. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: sensitive-scanner + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: source:prod + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + rules: + description: A list of rules for identifying and acting on sensitive data + patterns. + items: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorRule' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorType' + required: + - id + - type + - include + - inputs + - rules + type: object + ObservabilityPipelineSensitiveDataScannerProcessorAction: + description: Defines what action to take when sensitive data is matched. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedact' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionHash' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact' + ObservabilityPipelineSensitiveDataScannerProcessorActionHash: + description: Configuration for hashing matched sensitive values. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction' + options: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionHash` + `options`. + type: object + required: + - action + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction: + description: Action type that replaces the matched sensitive data with a hashed + representation, preserving structure while securing content. + enum: + - hash + example: hash + type: string + x-enum-varnames: + - HASH + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact: + description: Configuration for partially redacting matched sensitive data. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction' + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions' + required: + - action + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction: + description: Action type that redacts part of the sensitive data while preserving + a configurable number of characters, typically used for masking purposes (e.g., + show last 4 digits of a credit card). + enum: + - partial_redact + example: partial_redact + type: string + x-enum-varnames: + - PARTIAL_REDACT + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions: + description: Controls how partial redaction is applied, including character + count and direction. + properties: + characters: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions` + `characters`. + example: 4 + format: int64 + type: integer + direction: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection' + required: + - characters + - direction + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection: + description: Indicates whether to redact characters from the first or last part + of the matched value. + enum: + - first + - last + example: last + type: string + x-enum-varnames: + - FIRST + - LAST + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact: + description: Configuration for completely redacting matched sensitive data. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction' + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions' + required: + - action + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction: + description: Action type that completely replaces the matched sensitive data + with a fixed replacement string to remove all visibility. + enum: + - redact + example: redact + type: string + x-enum-varnames: + - REDACT + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions: + description: Configuration for fully redacting sensitive data. + properties: + replace: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions` + `replace`. + example: '***' + type: string + required: + - replace + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern: + description: Defines a custom regex-based pattern for identifying sensitive + data in logs. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions' + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType' + required: + - type + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: + description: Options for defining a custom regex pattern. + properties: + rule: + description: A regular expression used to detect sensitive values. Must + be a valid regex. + example: \b\d{16}\b + type: string + required: + - rule + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType: + description: Indicates a custom regular expression is used for matching. + enum: + - custom + example: custom + type: string + x-enum-varnames: + - CUSTOM + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions: + description: Configuration for keywords used to reinforce sensitive data pattern + detection. + properties: + keywords: + description: A list of keywords to match near the sensitive pattern. + example: + - ssn + - card + - account + items: + type: string + type: array + proximity: + description: Maximum number of tokens between a keyword and a sensitive + value match. + example: 5 + format: int64 + type: integer + required: + - keywords + - proximity + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern: + description: "Specifies a pattern from Datadog\u2019s sensitive data detection + library to match known sensitive data types." + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions' + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType' + required: + - type + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions: + description: Options for selecting a predefined library pattern and enabling + keyword support. + properties: + id: + description: Identifier for a predefined pattern from the sensitive data + scanner pattern library. + example: credit_card + type: string + use_recommended_keywords: + description: Whether to augment the pattern with recommended keywords (optional). + type: boolean + required: + - id + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType: + description: Indicates that a predefined library pattern is used. + enum: + - library + example: library + type: string + x-enum-varnames: + - LIBRARY + ObservabilityPipelineSensitiveDataScannerProcessorPattern: + description: Pattern detection configuration for identifying sensitive data + using either a custom regex or a library reference. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern' + ObservabilityPipelineSensitiveDataScannerProcessorRule: + description: Defines a rule for detecting sensitive data, including matching + pattern, scope, and the action to take. + properties: + keyword_options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions' + name: + description: A name identifying the rule. + example: Redact Credit Card Numbers + type: string + on_match: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorAction' + pattern: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorPattern' + scope: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScope' + tags: + description: Tags assigned to this rule for filtering and classification. + example: + - pii + - ccn + items: + type: string + type: array + required: + - name + - tags + - pattern + - scope + - on_match + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScope: + description: Determines which parts of the log the pattern-matching rule should + be applied to. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeAll' + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll: + description: Applies scanning across all available fields. + properties: + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget' + required: + - target + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget: + description: Applies the rule to all fields. + enum: + - all + example: all + type: string + x-enum-varnames: + - ALL + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude: + description: Excludes specific fields from sensitive data scanning. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions' + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget' + required: + - target + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget: + description: Excludes specific fields from processing. + enum: + - exclude + example: exclude + type: string + x-enum-varnames: + - EXCLUDE + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude: + description: Includes only specific fields for sensitive data scanning. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions' + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget' + required: + - target + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget: + description: Applies the rule only to included fields. + enum: + - include + example: include + type: string + x-enum-varnames: + - INCLUDE + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions: + description: Fields to which the scope rule applies. + properties: + fields: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions` + `fields`. + example: + - '' + items: + type: string + type: array + required: + - fields + type: object + ObservabilityPipelineSensitiveDataScannerProcessorType: + default: sensitive_data_scanner + description: The processor type. The value should always be `sensitive_data_scanner`. + enum: + - sensitive_data_scanner + example: sensitive_data_scanner + type: string + x-enum-varnames: + - SENSITIVE_DATA_SCANNER + ObservabilityPipelineSentinelOneDestination: + description: The `sentinel_one` destination sends logs to SentinelOne. + properties: + id: + description: The unique identifier for this component. + example: sentinelone-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + region: + $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestinationRegion' + type: + $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestinationType' + required: + - id + - type + - inputs + - region + type: object + ObservabilityPipelineSentinelOneDestinationRegion: + description: The SentinelOne region to send logs to. + enum: + - us + - eu + - ca + - data_set_us + example: us + type: string + x-enum-varnames: + - US + - EU + - CA + - DATA_SET_US + ObservabilityPipelineSentinelOneDestinationType: + default: sentinel_one + description: The destination type. The value should always be `sentinel_one`. + enum: + - sentinel_one + example: sentinel_one + type: string + x-enum-varnames: + - SENTINEL_ONE + ObservabilityPipelineSpec: + description: Input schema representing an observability pipeline configuration. + Used in create and validate requests. + properties: + data: + $ref: '#/components/schemas/ObservabilityPipelineSpecData' + required: + - data + type: object + ObservabilityPipelineSpecData: + description: Contains the the pipeline configuration. + properties: + attributes: + $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' + type: + default: pipelines + description: The resource type identifier. For pipeline resources, this + should always be set to `pipelines`. + example: pipelines type: string required: - - name - - value + - type + - attributes type: object - ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + ObservabilityPipelineSplunkHecDestination: + description: 'The `splunk_hec` destination forwards logs to Splunk using the + HTTP Event Collector (HEC). + + ' properties: + auto_extract_timestamp: + description: 'If `true`, Splunk tries to extract timestamps from incoming + log events. + + If `false`, Splunk assigns the time the event was received. + + ' + example: true + type: boolean + encoding: + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` - to downstream components). - example: filter-processor + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-hec-destination type: string - include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. - example: service:my-service + index: + description: Optional name of the Splunk index where logs are written. + example: main type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source + - filter-processor items: type: string type: array + sourcetype: + description: The Splunk sourcetype to assign to log events. + example: custom_sourcetype + type: string type: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationType' required: - id - type - - include - inputs type: object - ObservabilityPipelineFilterProcessorType: - default: filter - description: The processor type. The value should always be `filter`. + ObservabilityPipelineSplunkHecDestinationEncoding: + description: Encoding format for log events. enum: - - filter - example: filter + - json + - raw_message + example: json type: string x-enum-varnames: - - FILTER - ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + - JSON + - RAW_MESSAGE + ObservabilityPipelineSplunkHecDestinationType: + default: splunk_hec + description: The destination type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkHecSource: + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector + (HEC) API. + + ' properties: - group_id: - description: Consumer group ID used by the Kafka client. - example: consumer-group-0 - type: string id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). - example: kafka-source + example: splunk-hec-source type: string - librdkafka_options: - description: Optional list of advanced Kafka client configuration options, - defined as key-value pairs. - items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' - type: array - sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' - topics: - description: A list of Kafka topic names to subscribe to. The source ingests - messages from each topic specified. - example: - - topic1 - - topic2 - items: - type: string - type: array type: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSourceType' required: - id - type - - group_id - - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. + ObservabilityPipelineSplunkHecSourceType: + default: splunk_hec + description: The source type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkTcpSource: + description: 'The `splunk_tcp` source receives logs from a Splunk Universal + Forwarder over TCP. + + TLS is supported for secure transmission. + + ' properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-tcp-source type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSourceType' required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + - id + - type type: object - ObservabilityPipelineKafkaSourceType: - default: kafka - description: The source type. The value should always be `kafka`. + ObservabilityPipelineSplunkTcpSourceType: + default: splunk_tcp + description: The source type. Always `splunk_tcp`. enum: - - kafka - example: kafka + - splunk_tcp + example: splunk_tcp type: string x-enum-varnames: - - KAFKA - ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field - and flattens it into the event. This is useful when logs contain embedded - JSON as a string. + - SPLUNK_TCP + ObservabilityPipelineSumoLogicDestination: + description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: - field: - description: The name of the log field that contains a JSON string. - example: message + encoding: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' + header_custom_fields: + description: A list of custom headers to include in the request to Sumo + Logic. + items: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem' + type: array + header_host_name: + description: Optional override for the host name header. + example: host-123 type: string - id: - description: A unique identifier for this component. Used to reference this - component in other parts of the pipeline (e.g., as input to downstream - components). - example: parse-json-processor + header_source_category: + description: Optional override for the source category header. + example: source-category type: string - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + header_source_name: + description: Optional override for the source name header. + example: source-name + type: string + id: + description: The unique identifier for this component. + example: sumo-logic-destination type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source + - filter-processor items: type: string type: array type: - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationType' required: - id - type - - include - - field - inputs type: object - ObservabilityPipelineParseJSONProcessorType: - default: parse_json - description: The processor type. The value should always be `parse_json`. + ObservabilityPipelineSumoLogicDestinationEncoding: + description: The output encoding format. enum: - - parse_json - example: parse_json + - json + - raw_message + - logfmt + example: json type: string x-enum-varnames: - - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + - JSON + - RAW_MESSAGE + - LOGFMT + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem: + description: Single key-value pair used as a custom log header for Sumo Logic. + properties: + name: + description: The header field name. + example: X-Sumo-Category + type: string + value: + description: The header field value. + example: my-app-logs + type: string + required: + - name + - value + type: object + ObservabilityPipelineSumoLogicDestinationType: + default: sumo_logic + description: The destination type. The value should always be `sumo_logic`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - sumo_logic + example: sumo_logic type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 - ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match - a specified filter. When the configured daily quota is met, the processor - can drop or alert. + - SUMO_LOGIC + ObservabilityPipelineSumoLogicSource: + description: The `sumo_logic` source receives logs from Sumo Logic collectors. properties: - drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. - example: false - type: boolean id: description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` - to downstream components). - example: quota-processor + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: sumo-logic-source type: string - ignore_when_missing_partitions: - description: If `true`, the processor skips quota checks when partition - fields are missing from the logs. - type: boolean - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + type: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSourceType' + required: + - id + - type + type: object + ObservabilityPipelineSumoLogicSourceType: + default: sumo_logic + description: The source type. The value should always be `sumo_logic`. + enum: + - sumo_logic + example: sumo_logic + type: string + x-enum-varnames: + - SUMO_LOGIC + ObservabilityPipelineSyslogNgDestination: + description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + server over TCP or UDP using the syslog protocol. + properties: + id: + description: The unique identifier for this component. + example: syslog-ng-destination type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source - items: - type: string - type: array - limit: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' - name: - description: Name for identifying the processor. - example: MyPipelineQuotaProcessor - type: string - overrides: - description: A list of alternate quota rules that apply to specific sets - of events, identified by matching field values. Each override can define - a custom limit. - items: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride' - type: array - partition_fields: - description: A list of fields used to segment log traffic for quota enforcement. - Quotas are tracked independently by unique combinations of these field - values. + - filter-processor items: type: string type: array + keepalive: + description: Optional socket keepalive duration in milliseconds. + example: 60000 + format: int64 + minimum: 0 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' type: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestinationType' required: - id - type - - include - - name - - drop_events - - limit - inputs type: object - ObservabilityPipelineQuotaProcessorLimit: - description: The maximum amount of data or number of events allowed before the - quota is enforced. Can be specified in bytes or events. - properties: - enforce: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType' - limit: - description: The limit for quota enforcement. - example: 1000 - format: int64 - type: integer - required: - - enforce - - limit - type: object - ObservabilityPipelineQuotaProcessorLimitEnforceType: - description: Unit for quota enforcement in bytes for data size or events for - count. - enum: - - bytes - - events - example: bytes - type: string - x-enum-varnames: - - BYTES - - EVENTS - ObservabilityPipelineQuotaProcessorOverride: - description: Defines a custom quota limit that applies to specific log events - based on matching field values. - properties: - fields: - description: A list of field matchers used to apply a specific override. - If an event matches all listed key-value pairs, the corresponding override - limit is enforced. - items: - $ref: '#/components/schemas/ObservabilityPipelineFieldValue' - type: array - limit: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' - required: - - fields - - limit - type: object - ObservabilityPipelineQuotaProcessorType: - default: quota - description: The processor type. The value should always be `quota`. + ObservabilityPipelineSyslogNgDestinationType: + default: syslog_ng + description: The destination type. The value should always be `syslog_ng`. enum: - - quota - example: quota + - syslog_ng + example: syslog_ng type: string x-enum-varnames: - - QUOTA - ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + - SYSLOG_NG + ObservabilityPipelineSyslogNgSource: + description: The `syslog_ng` source listens for logs over TCP or UDP from a + `syslog-ng` server using the syslog protocol. properties: - fields: - description: A list of field names to be removed from each log event. - example: - - field1 - - field2 - items: - type: string - type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). - example: remove-fields-processor - type: string - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + example: syslog-ng-source type: string - inputs: - description: The `PipelineRemoveFieldsProcessor` `inputs`. - example: - - datadog-agent-source - items: - type: string - type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineSyslogSourceMode' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' type: - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSourceType' required: - id - type - - include - - fields - - inputs + - mode type: object - ObservabilityPipelineRemoveFieldsProcessorType: - default: remove_fields - description: The processor type. The value should always be `remove_fields`. + ObservabilityPipelineSyslogNgSourceType: + default: syslog_ng + description: The source type. The value should always be `syslog_ng`. enum: - - remove_fields - example: remove_fields + - syslog_ng + example: syslog_ng type: string x-enum-varnames: - - REMOVE_FIELDS - ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + - SYSLOG_NG + ObservabilityPipelineSyslogSourceMode: + description: Protocol used by the syslog source to receive messages. + enum: + - tcp + - udp + example: tcp + type: string + x-enum-varnames: + - TCP + - UDP + ObservabilityPipelineThrottleProcessor: + description: The `throttle` processor limits the number of events that pass + through over a given time window. properties: - fields: - description: A list of rename rules specifying which fields to rename in - the event, what to rename them to, and whether to preserve the original - fields. + group_by: + description: Optional list of fields used to group events before the threshold + has been reached. + example: + - log.user.id items: - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField' + type: string type: array id: - description: A unique identifier for this component. Used to reference this - component in other parts of the pipeline (e.g., as input to downstream - components). - example: rename-fields-processor + description: The unique identifier for this processor. + example: throttle-processor type: string include: description: A Datadog search query used to determine which logs this processor targets. - example: service:my-service + example: env:prod type: string inputs: - description: A list of component IDs whose output is used as the `input` - for this component. + description: A list of component IDs whose output is used as the input for + this processor. example: - datadog-agent-source items: type: string type: array + threshold: + description: the number of events allowed in a given time window. Events + sent after the threshold has been reached, are dropped. + example: 1000 + format: int64 + type: integer type: - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessorType' + window: + description: The time window in seconds over which the threshold applies. + example: 60.0 + format: double + type: number required: - id - type - include - - fields - inputs + - threshold + - window type: object - ObservabilityPipelineRenameFieldsProcessorField: - description: Defines how to rename a field in log events. - properties: - destination: - description: The field name to assign the renamed value to. - example: destination_field - type: string - preserve_source: - description: Indicates whether the original field, that is received from - the source, should be kept (`true`) or removed (`false`) after renaming. - example: false - type: boolean - source: - description: The original field name in the log event that should be renamed. - example: source_field - type: string - required: - - source - - destination - - preserve_source - type: object - ObservabilityPipelineRenameFieldsProcessorType: - default: rename_fields - description: The processor type. The value should always be `rename_fields`. + ObservabilityPipelineThrottleProcessorType: + default: throttle + description: The processor type. The value should always be `throttle`. enum: - - rename_fields - example: rename_fields + - throttle + example: throttle type: string x-enum-varnames: - - RENAME_FIELDS + - THROTTLE ObservabilityPipelineTls: - description: Configuration for enabling TLS encryption. + description: Configuration for enabling TLS encryption between the pipeline + component and external services. properties: ca_file: description: "Path to the Certificate Authority (CA) file used to validate @@ -36294,6 +38879,55 @@ components: type: string x-enum-varnames: - USERS + ValidationError: + description: Represents a single validation error, including a human-readable + title and metadata. + properties: + meta: + $ref: '#/components/schemas/ValidationErrorMeta' + title: + description: A short, human-readable summary of the error. + example: Field 'region' is required + type: string + required: + - title + - meta + type: object + ValidationErrorMeta: + description: Describes additional metadata for validation errors, including + field names and error messages. + properties: + field: + description: The field name that caused the error. + example: region + type: string + id: + description: The ID of the component in which the error occurred. + example: datadog-agent-source + type: string + message: + description: The detailed error message. + example: Field 'region' is required + type: string + required: + - message + type: object + ValidationResponse: + description: Response containing validation errors. + example: + errors: + - meta: + field: region + id: datadog-agent-source + message: Field 'region' is required + title: Field 'region' is required + properties: + errors: + description: The `ValidationResponse` `errors`. + items: + $ref: '#/components/schemas/ValidationError' + type: array + type: object Version: description: Version of the notification rule. It is updated when the rule is modified. @@ -49365,6 +51999,33 @@ paths: tags: - CSM Threats /api/v2/remote_config/products/obs_pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview.' post: description: Create a new pipeline. operationId: CreatePipeline @@ -49372,7 +52033,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ObservabilityPipelineCreateRequest' + $ref: '#/components/schemas/ObservabilityPipelineSpec' required: true responses: '201': @@ -49382,28 +52043,56 @@ paths: $ref: '#/components/schemas/ObservabilityPipeline' description: OK '400': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Bad Request + $ref: '#/components/responses/BadRequestResponse' '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden + $ref: '#/components/responses/NotAuthorizedResponse' '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview.' + /api/v2/remote_config/products/obs_pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any. + + ' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': content: application/json: schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' '429': $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline + summary: Validate an observability pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read x-unstable: '**Note**: This endpoint is in Preview.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: @@ -49442,6 +52131,10 @@ paths: summary: Delete a pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_delete x-unstable: '**Note**: This endpoint is in Preview.' get: description: Get a specific pipeline by its ID. @@ -49471,6 +52164,10 @@ paths: summary: Get a specific pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read x-unstable: '**Note**: This endpoint is in Preview.' put: description: Update a pipeline. @@ -49496,34 +52193,22 @@ paths: $ref: '#/components/schemas/ObservabilityPipeline' description: OK '400': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Bad Request + $ref: '#/components/responses/BadRequestResponse' '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden + $ref: '#/components/responses/NotAuthorizedResponse' '404': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Not Found + $ref: '#/components/responses/NotFoundResponse' '409': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict + $ref: '#/components/responses/ConflictResponse' '429': $ref: '#/components/responses/TooManyRequestsResponse' summary: Update a pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy x-unstable: '**Note**: This endpoint is in Preview.' /api/v2/restriction_policy/{resource_id}: delete: diff --git a/.generator/src/generator/templates/modelOneOf.j2 b/.generator/src/generator/templates/modelOneOf.j2 index ee5e4452a52..1af07158b68 100644 --- a/.generator/src/generator/templates/modelOneOf.j2 +++ b/.generator/src/generator/templates/modelOneOf.j2 @@ -92,7 +92,7 @@ public class {{ name }} extends AbstractOpenApiSchema { // TODO: there is no validation against JSON schema constraints // (min, max, enum, pattern...), this does not perform a strict JSON // validation, which means the 'match' count may be higher than it should be. - {%- if not oneOf|is_primitive and not unParameterizedDataType|lower|is_java_base_type %} + {%- if not oneOf|is_primitive and not unParameterizedDataType|lower|is_java_base_type and "enum" not in oneOf %} if (!(({{ unParameterizedDataType }})tmp).unparsed) { deserialized = tmp; match++; diff --git a/examples/v2/observability-pipelines/CreatePipeline.java b/examples/v2/observability-pipelines/CreatePipeline.java index 0b0aca9369e..01a6027125b 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.java +++ b/examples/v2/observability-pipelines/CreatePipeline.java @@ -8,8 +8,6 @@ import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem; import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem; import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem; -import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest; -import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequestData; import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes; import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource; import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType; @@ -17,6 +15,8 @@ import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType; import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor; import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType; +import com.datadog.api.client.v2.model.ObservabilityPipelineSpec; +import com.datadog.api.client.v2.model.ObservabilityPipelineSpecData; import java.util.Collections; public class Example { @@ -25,10 +25,10 @@ public static void main(String[] args) { defaultClient.setUnstableOperationEnabled("v2.createPipeline", true); ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); - ObservabilityPipelineCreateRequest body = - new ObservabilityPipelineCreateRequest() + ObservabilityPipelineSpec body = + new ObservabilityPipelineSpec() .data( - new ObservabilityPipelineCreateRequestData() + new ObservabilityPipelineSpecData() .attributes( new ObservabilityPipelineDataAttributes() .config( diff --git a/examples/v2/observability-pipelines/ListPipelines.java b/examples/v2/observability-pipelines/ListPipelines.java new file mode 100644 index 00000000000..7902c3fb5bb --- /dev/null +++ b/examples/v2/observability-pipelines/ListPipelines.java @@ -0,0 +1,25 @@ +// List pipelines returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; +import com.datadog.api.client.v2.model.ListPipelinesResponse; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.listPipelines", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + try { + ListPipelinesResponse result = apiInstance.listPipelines(); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#listPipelines"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/examples/v2/observability-pipelines/ValidatePipeline.java b/examples/v2/observability-pipelines/ValidatePipeline.java new file mode 100644 index 00000000000..159cd0640e6 --- /dev/null +++ b/examples/v2/observability-pipelines/ValidatePipeline.java @@ -0,0 +1,81 @@ +// Validate an observability pipeline returns "OK" response + +import com.datadog.api.client.ApiClient; +import com.datadog.api.client.ApiException; +import com.datadog.api.client.v2.api.ObservabilityPipelinesApi; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfig; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigDestinationItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigProcessorItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineConfigSourceItem; +import com.datadog.api.client.v2.model.ObservabilityPipelineDataAttributes; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSource; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogAgentSourceType; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestination; +import com.datadog.api.client.v2.model.ObservabilityPipelineDatadogLogsDestinationType; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessor; +import com.datadog.api.client.v2.model.ObservabilityPipelineFilterProcessorType; +import com.datadog.api.client.v2.model.ObservabilityPipelineSpec; +import com.datadog.api.client.v2.model.ObservabilityPipelineSpecData; +import com.datadog.api.client.v2.model.ValidationResponse; +import java.util.Collections; + +public class Example { + public static void main(String[] args) { + ApiClient defaultClient = ApiClient.getDefaultApiClient(); + defaultClient.setUnstableOperationEnabled("v2.validatePipeline", true); + ObservabilityPipelinesApi apiInstance = new ObservabilityPipelinesApi(defaultClient); + + ObservabilityPipelineSpec body = + new ObservabilityPipelineSpec() + .data( + new ObservabilityPipelineSpecData() + .attributes( + new ObservabilityPipelineDataAttributes() + .config( + new ObservabilityPipelineConfig() + .destinations( + Collections.singletonList( + new ObservabilityPipelineConfigDestinationItem( + new ObservabilityPipelineDatadogLogsDestination() + .id("datadog-logs-destination") + .inputs( + Collections.singletonList( + "filter-processor")) + .type( + ObservabilityPipelineDatadogLogsDestinationType + .DATADOG_LOGS)))) + .processors( + Collections.singletonList( + new ObservabilityPipelineConfigProcessorItem( + new ObservabilityPipelineFilterProcessor() + .id("filter-processor") + .include("service:my-service") + .inputs( + Collections.singletonList( + "datadog-agent-source")) + .type( + ObservabilityPipelineFilterProcessorType + .FILTER)))) + .sources( + Collections.singletonList( + new ObservabilityPipelineConfigSourceItem( + new ObservabilityPipelineDatadogAgentSource() + .id("datadog-agent-source") + .type( + ObservabilityPipelineDatadogAgentSourceType + .DATADOG_AGENT))))) + .name("Main Observability Pipeline")) + .type("pipelines")); + + try { + ValidationResponse result = apiInstance.validatePipeline(body); + System.out.println(result); + } catch (ApiException e) { + System.err.println("Exception when calling ObservabilityPipelinesApi#validatePipeline"); + System.err.println("Status code: " + e.getCode()); + System.err.println("Reason: " + e.getResponseBody()); + System.err.println("Response headers: " + e.getResponseHeaders()); + e.printStackTrace(); + } + } +} diff --git a/src/main/java/com/datadog/api/client/ApiClient.java b/src/main/java/com/datadog/api/client/ApiClient.java index fe1222a4b15..3b585511eb0 100644 --- a/src/main/java/com/datadog/api/client/ApiClient.java +++ b/src/main/java/com/datadog/api/client/ApiClient.java @@ -430,7 +430,9 @@ public class ApiClient { put("v2.createPipeline", false); put("v2.deletePipeline", false); put("v2.getPipeline", false); + put("v2.listPipelines", false); put("v2.updatePipeline", false); + put("v2.validatePipeline", false); put("v2.createScorecardOutcomesBatch", false); put("v2.createScorecardRule", false); put("v2.deleteScorecardRule", false); diff --git a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java index 0b67bc579b5..fc9fc737859 100644 --- a/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java +++ b/src/main/java/com/datadog/api/client/v2/api/ObservabilityPipelinesApi.java @@ -4,12 +4,15 @@ import com.datadog.api.client.ApiException; import com.datadog.api.client.ApiResponse; import com.datadog.api.client.Pair; +import com.datadog.api.client.v2.model.ListPipelinesResponse; import com.datadog.api.client.v2.model.ObservabilityPipeline; -import com.datadog.api.client.v2.model.ObservabilityPipelineCreateRequest; +import com.datadog.api.client.v2.model.ObservabilityPipelineSpec; +import com.datadog.api.client.v2.model.ValidationResponse; import jakarta.ws.rs.client.Invocation; import jakarta.ws.rs.core.GenericType; import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -53,8 +56,7 @@ public void setApiClient(ApiClient apiClient) { * @return ObservabilityPipeline * @throws ApiException if fails to make API call */ - public ObservabilityPipeline createPipeline(ObservabilityPipelineCreateRequest body) - throws ApiException { + public ObservabilityPipeline createPipeline(ObservabilityPipelineSpec body) throws ApiException { return createPipelineWithHttpInfo(body).getData(); } @@ -67,7 +69,7 @@ public ObservabilityPipeline createPipeline(ObservabilityPipelineCreateRequest b * @return CompletableFuture<ObservabilityPipeline> */ public CompletableFuture createPipelineAsync( - ObservabilityPipelineCreateRequest body) { + ObservabilityPipelineSpec body) { return createPipelineWithHttpInfoAsync(body) .thenApply( response -> { @@ -87,13 +89,13 @@ public CompletableFuture createPipelineAsync( * Status Code Description Response Headers * 201 OK - * 400 Bad Request - - * 403 Forbidden - + * 403 Not Authorized - * 409 Conflict - * 429 Too many requests - * */ public ApiResponse createPipelineWithHttpInfo( - ObservabilityPipelineCreateRequest body) throws ApiException { + ObservabilityPipelineSpec body) throws ApiException { // Check if unstable operation is enabled String operationId = "createPipeline"; if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { @@ -142,7 +144,7 @@ public ApiResponse createPipelineWithHttpInfo( * @return CompletableFuture<ApiResponse<ObservabilityPipeline>> */ public CompletableFuture> createPipelineWithHttpInfoAsync( - ObservabilityPipelineCreateRequest body) { + ObservabilityPipelineSpec body) { // Check if unstable operation is enabled String operationId = "createPipeline"; if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { @@ -500,6 +502,209 @@ public CompletableFuture> getPipelineWithHttp new GenericType() {}); } + /** Manage optional parameters to listPipelines. */ + public static class ListPipelinesOptionalParameters { + private Long pageSize; + private Long pageNumber; + + /** + * Set pageSize. + * + * @param pageSize Size for a given page. The maximum allowed value is 100. (optional, default + * to 10) + * @return ListPipelinesOptionalParameters + */ + public ListPipelinesOptionalParameters pageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + /** + * Set pageNumber. + * + * @param pageNumber Specific page number to return. (optional, default to 0) + * @return ListPipelinesOptionalParameters + */ + public ListPipelinesOptionalParameters pageNumber(Long pageNumber) { + this.pageNumber = pageNumber; + return this; + } + } + + /** + * List pipelines. + * + *

See {@link #listPipelinesWithHttpInfo}. + * + * @return ListPipelinesResponse + * @throws ApiException if fails to make API call + */ + public ListPipelinesResponse listPipelines() throws ApiException { + return listPipelinesWithHttpInfo(new ListPipelinesOptionalParameters()).getData(); + } + + /** + * List pipelines. + * + *

See {@link #listPipelinesWithHttpInfoAsync}. + * + * @return CompletableFuture<ListPipelinesResponse> + */ + public CompletableFuture listPipelinesAsync() { + return listPipelinesWithHttpInfoAsync(new ListPipelinesOptionalParameters()) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * List pipelines. + * + *

See {@link #listPipelinesWithHttpInfo}. + * + * @param parameters Optional parameters for the request. + * @return ListPipelinesResponse + * @throws ApiException if fails to make API call + */ + public ListPipelinesResponse listPipelines(ListPipelinesOptionalParameters parameters) + throws ApiException { + return listPipelinesWithHttpInfo(parameters).getData(); + } + + /** + * List pipelines. + * + *

See {@link #listPipelinesWithHttpInfoAsync}. + * + * @param parameters Optional parameters for the request. + * @return CompletableFuture<ListPipelinesResponse> + */ + public CompletableFuture listPipelinesAsync( + ListPipelinesOptionalParameters parameters) { + return listPipelinesWithHttpInfoAsync(parameters) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Retrieve a list of pipelines. + * + * @param parameters Optional parameters for the request. + * @return ApiResponse<ListPipelinesResponse> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
200 OK -
400 Bad Request -
403 Not Authorized -
429 Too many requests -
+ */ + public ApiResponse listPipelinesWithHttpInfo( + ListPipelinesOptionalParameters parameters) throws ApiException { + // Check if unstable operation is enabled + String operationId = "listPipelines"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = null; + Long pageSize = parameters.pageSize; + Long pageNumber = parameters.pageNumber; + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + + List localVarQueryParams = new ArrayList(); + Map localVarHeaderParams = new HashMap(); + + localVarQueryParams.addAll(apiClient.parameterToPairs("", "page[size]", pageSize)); + localVarQueryParams.addAll(apiClient.parameterToPairs("", "page[number]", pageNumber)); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.listPipelines", + localVarPath, + localVarQueryParams, + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "GET", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * List pipelines. + * + *

See {@link #listPipelinesWithHttpInfo}. + * + * @param parameters Optional parameters for the request. + * @return CompletableFuture<ApiResponse<ListPipelinesResponse>> + */ + public CompletableFuture> listPipelinesWithHttpInfoAsync( + ListPipelinesOptionalParameters parameters) { + // Check if unstable operation is enabled + String operationId = "listPipelines"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = null; + Long pageSize = parameters.pageSize; + Long pageNumber = parameters.pageNumber; + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines"; + + List localVarQueryParams = new ArrayList(); + Map localVarHeaderParams = new HashMap(); + + localVarQueryParams.addAll(apiClient.parameterToPairs("", "page[size]", pageSize)); + localVarQueryParams.addAll(apiClient.parameterToPairs("", "page[number]", pageNumber)); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.listPipelines", + localVarPath, + localVarQueryParams, + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "GET", + builder, + localVarHeaderParams, + new String[] {}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + /** * Update a pipeline. * @@ -546,7 +751,7 @@ public CompletableFuture updatePipelineAsync( * Status Code Description Response Headers * 200 OK - * 400 Bad Request - - * 403 Forbidden - + * 403 Not Authorized - * 404 Not Found - * 409 Conflict - * 429 Too many requests - @@ -676,4 +881,154 @@ public CompletableFuture> updatePipelineWithH false, new GenericType() {}); } + + /** + * Validate an observability pipeline. + * + *

See {@link #validatePipelineWithHttpInfo}. + * + * @param body (required) + * @return ValidationResponse + * @throws ApiException if fails to make API call + */ + public ValidationResponse validatePipeline(ObservabilityPipelineSpec body) throws ApiException { + return validatePipelineWithHttpInfo(body).getData(); + } + + /** + * Validate an observability pipeline. + * + *

See {@link #validatePipelineWithHttpInfoAsync}. + * + * @param body (required) + * @return CompletableFuture<ValidationResponse> + */ + public CompletableFuture validatePipelineAsync( + ObservabilityPipelineSpec body) { + return validatePipelineWithHttpInfoAsync(body) + .thenApply( + response -> { + return response.getData(); + }); + } + + /** + * Validates a pipeline configuration without creating or updating any resources. Returns a list + * of validation errors, if any. + * + * @param body (required) + * @return ApiResponse<ValidationResponse> + * @throws ApiException if fails to make API call + * @http.response.details + * + * + * + * + * + * + * + *
Response details
Status Code Description Response Headers
200 OK -
400 Bad Request -
403 Not Authorized -
429 Too many requests -
+ */ + public ApiResponse validatePipelineWithHttpInfo( + ObservabilityPipelineSpec body) throws ApiException { + // Check if unstable operation is enabled + String operationId = "validatePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + throw new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId)); + } + Object localVarPostBody = body; + + // verify the required parameter 'body' is set + if (body == null) { + throw new ApiException( + 400, "Missing the required parameter 'body' when calling validatePipeline"); + } + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.validatePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + return apiClient.invokeAPI( + "POST", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } + + /** + * Validate an observability pipeline. + * + *

See {@link #validatePipelineWithHttpInfo}. + * + * @param body (required) + * @return CompletableFuture<ApiResponse<ValidationResponse>> + */ + public CompletableFuture> validatePipelineWithHttpInfoAsync( + ObservabilityPipelineSpec body) { + // Check if unstable operation is enabled + String operationId = "validatePipeline"; + if (apiClient.isUnstableOperationEnabled("v2." + operationId)) { + apiClient.getLogger().warning(String.format("Using unstable operation '%s'", operationId)); + } else { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException(0, String.format("Unstable operation '%s' is disabled", operationId))); + return result; + } + Object localVarPostBody = body; + + // verify the required parameter 'body' is set + if (body == null) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally( + new ApiException( + 400, "Missing the required parameter 'body' when calling validatePipeline")); + return result; + } + // create path and map variables + String localVarPath = "/api/v2/remote_config/products/obs_pipelines/pipelines/validate"; + + Map localVarHeaderParams = new HashMap(); + + Invocation.Builder builder; + try { + builder = + apiClient.createBuilder( + "v2.ObservabilityPipelinesApi.validatePipeline", + localVarPath, + new ArrayList(), + localVarHeaderParams, + new HashMap(), + new String[] {"application/json"}, + new String[] {"apiKeyAuth", "appKeyAuth"}); + } catch (ApiException ex) { + CompletableFuture> result = new CompletableFuture<>(); + result.completeExceptionally(ex); + return result; + } + return apiClient.invokeAPIAsync( + "POST", + builder, + localVarHeaderParams, + new String[] {"application/json"}, + localVarPostBody, + new HashMap(), + false, + new GenericType() {}); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java new file mode 100644 index 00000000000..2404a627a2b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestination.java @@ -0,0 +1,268 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The azure_storage destination forwards logs to an Azure Blob Storage container. */ +@JsonPropertyOrder({ + AzureStorageDestination.JSON_PROPERTY_BLOB_PREFIX, + AzureStorageDestination.JSON_PROPERTY_CONTAINER_NAME, + AzureStorageDestination.JSON_PROPERTY_ID, + AzureStorageDestination.JSON_PROPERTY_INPUTS, + AzureStorageDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class AzureStorageDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_BLOB_PREFIX = "blob_prefix"; + private String blobPrefix; + + public static final String JSON_PROPERTY_CONTAINER_NAME = "container_name"; + private String containerName; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private AzureStorageDestinationType type = AzureStorageDestinationType.AZURE_STORAGE; + + public AzureStorageDestination() {} + + @JsonCreator + public AzureStorageDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_CONTAINER_NAME) String containerName, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) AzureStorageDestinationType type) { + this.containerName = containerName; + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public AzureStorageDestination blobPrefix(String blobPrefix) { + this.blobPrefix = blobPrefix; + return this; + } + + /** + * Optional prefix for blobs written to the container. + * + * @return blobPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_BLOB_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getBlobPrefix() { + return blobPrefix; + } + + public void setBlobPrefix(String blobPrefix) { + this.blobPrefix = blobPrefix; + } + + public AzureStorageDestination containerName(String containerName) { + this.containerName = containerName; + return this; + } + + /** + * The name of the Azure Blob Storage container to store logs in. + * + * @return containerName + */ + @JsonProperty(JSON_PROPERTY_CONTAINER_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getContainerName() { + return containerName; + } + + public void setContainerName(String containerName) { + this.containerName = containerName; + } + + public AzureStorageDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public AzureStorageDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public AzureStorageDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public AzureStorageDestination type(AzureStorageDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be azure_storage. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public AzureStorageDestinationType getType() { + return type; + } + + public void setType(AzureStorageDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return AzureStorageDestination + */ + @JsonAnySetter + public AzureStorageDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this AzureStorageDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + AzureStorageDestination azureStorageDestination = (AzureStorageDestination) o; + return Objects.equals(this.blobPrefix, azureStorageDestination.blobPrefix) + && Objects.equals(this.containerName, azureStorageDestination.containerName) + && Objects.equals(this.id, azureStorageDestination.id) + && Objects.equals(this.inputs, azureStorageDestination.inputs) + && Objects.equals(this.type, azureStorageDestination.type) + && Objects.equals(this.additionalProperties, azureStorageDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(blobPrefix, containerName, id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class AzureStorageDestination {\n"); + sb.append(" blobPrefix: ").append(toIndentedString(blobPrefix)).append("\n"); + sb.append(" containerName: ").append(toIndentedString(containerName)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestinationType.java new file mode 100644 index 00000000000..23d07869c70 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/AzureStorageDestinationType.java @@ -0,0 +1,57 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be azure_storage. */ +@JsonSerialize(using = AzureStorageDestinationType.AzureStorageDestinationTypeSerializer.class) +public class AzureStorageDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("azure_storage")); + + public static final AzureStorageDestinationType AZURE_STORAGE = + new AzureStorageDestinationType("azure_storage"); + + AzureStorageDestinationType(String value) { + super(value, allowedValues); + } + + public static class AzureStorageDestinationTypeSerializer + extends StdSerializer { + public AzureStorageDestinationTypeSerializer(Class t) { + super(t); + } + + public AzureStorageDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + AzureStorageDestinationType value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static AzureStorageDestinationType fromValue(String value) { + return new AzureStorageDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponse.java b/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponse.java new file mode 100644 index 00000000000..413c9e05358 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponse.java @@ -0,0 +1,185 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Represents the response payload containing a list of pipelines and associated metadata. */ +@JsonPropertyOrder({ + ListPipelinesResponse.JSON_PROPERTY_DATA, + ListPipelinesResponse.JSON_PROPERTY_META +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ListPipelinesResponse { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DATA = "data"; + private List data = new ArrayList<>(); + + public static final String JSON_PROPERTY_META = "meta"; + private ListPipelinesResponseMeta meta; + + public ListPipelinesResponse() {} + + @JsonCreator + public ListPipelinesResponse( + @JsonProperty(required = true, value = JSON_PROPERTY_DATA) + List data) { + this.data = data; + } + + public ListPipelinesResponse data(List data) { + this.data = data; + for (ObservabilityPipelineData item : data) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ListPipelinesResponse addDataItem(ObservabilityPipelineData dataItem) { + this.data.add(dataItem); + this.unparsed |= dataItem.unparsed; + return this; + } + + /** + * The schema data. + * + * @return data + */ + @JsonProperty(JSON_PROPERTY_DATA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getData() { + return data; + } + + public void setData(List data) { + this.data = data; + } + + public ListPipelinesResponse meta(ListPipelinesResponseMeta meta) { + this.meta = meta; + this.unparsed |= meta.unparsed; + return this; + } + + /** + * Metadata about the response. + * + * @return meta + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_META) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ListPipelinesResponseMeta getMeta() { + return meta; + } + + public void setMeta(ListPipelinesResponseMeta meta) { + this.meta = meta; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ListPipelinesResponse + */ + @JsonAnySetter + public ListPipelinesResponse putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ListPipelinesResponse object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListPipelinesResponse listPipelinesResponse = (ListPipelinesResponse) o; + return Objects.equals(this.data, listPipelinesResponse.data) + && Objects.equals(this.meta, listPipelinesResponse.meta) + && Objects.equals(this.additionalProperties, listPipelinesResponse.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(data, meta, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListPipelinesResponse {\n"); + sb.append(" data: ").append(toIndentedString(data)).append("\n"); + sb.append(" meta: ").append(toIndentedString(meta)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponseMeta.java b/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponseMeta.java new file mode 100644 index 00000000000..acd082dbc79 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ListPipelinesResponseMeta.java @@ -0,0 +1,136 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Metadata about the response. */ +@JsonPropertyOrder({ListPipelinesResponseMeta.JSON_PROPERTY_TOTAL_COUNT}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ListPipelinesResponseMeta { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_TOTAL_COUNT = "totalCount"; + private Long totalCount; + + public ListPipelinesResponseMeta totalCount(Long totalCount) { + this.totalCount = totalCount; + return this; + } + + /** + * The total number of pipelines. + * + * @return totalCount + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TOTAL_COUNT) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getTotalCount() { + return totalCount; + } + + public void setTotalCount(Long totalCount) { + this.totalCount = totalCount; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ListPipelinesResponseMeta + */ + @JsonAnySetter + public ListPipelinesResponseMeta putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ListPipelinesResponseMeta object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListPipelinesResponseMeta listPipelinesResponseMeta = (ListPipelinesResponseMeta) o; + return Objects.equals(this.totalCount, listPipelinesResponseMeta.totalCount) + && Objects.equals( + this.additionalProperties, listPipelinesResponseMeta.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(totalCount, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListPipelinesResponseMeta {\n"); + sb.append(" totalCount: ").append(toIndentedString(totalCount)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java new file mode 100644 index 00000000000..62d3cfd1cbc --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestination.java @@ -0,0 +1,329 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The microsoft_sentinel destination forwards logs to Microsoft Sentinel. */ +@JsonPropertyOrder({ + MicrosoftSentinelDestination.JSON_PROPERTY_CLIENT_ID, + MicrosoftSentinelDestination.JSON_PROPERTY_DCR_IMMUTABLE_ID, + MicrosoftSentinelDestination.JSON_PROPERTY_ID, + MicrosoftSentinelDestination.JSON_PROPERTY_INPUTS, + MicrosoftSentinelDestination.JSON_PROPERTY_TABLE, + MicrosoftSentinelDestination.JSON_PROPERTY_TENANT_ID, + MicrosoftSentinelDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class MicrosoftSentinelDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CLIENT_ID = "client_id"; + private String clientId; + + public static final String JSON_PROPERTY_DCR_IMMUTABLE_ID = "dcr_immutable_id"; + private String dcrImmutableId; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TABLE = "table"; + private String table; + + public static final String JSON_PROPERTY_TENANT_ID = "tenant_id"; + private String tenantId; + + public static final String JSON_PROPERTY_TYPE = "type"; + private MicrosoftSentinelDestinationType type = + MicrosoftSentinelDestinationType.MICROSOFT_SENTINEL; + + public MicrosoftSentinelDestination() {} + + @JsonCreator + public MicrosoftSentinelDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_CLIENT_ID) String clientId, + @JsonProperty(required = true, value = JSON_PROPERTY_DCR_IMMUTABLE_ID) String dcrImmutableId, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TABLE) String table, + @JsonProperty(required = true, value = JSON_PROPERTY_TENANT_ID) String tenantId, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + MicrosoftSentinelDestinationType type) { + this.clientId = clientId; + this.dcrImmutableId = dcrImmutableId; + this.id = id; + this.inputs = inputs; + this.table = table; + this.tenantId = tenantId; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public MicrosoftSentinelDestination clientId(String clientId) { + this.clientId = clientId; + return this; + } + + /** + * Azure AD client ID used for authentication. + * + * @return clientId + */ + @JsonProperty(JSON_PROPERTY_CLIENT_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getClientId() { + return clientId; + } + + public void setClientId(String clientId) { + this.clientId = clientId; + } + + public MicrosoftSentinelDestination dcrImmutableId(String dcrImmutableId) { + this.dcrImmutableId = dcrImmutableId; + return this; + } + + /** + * The immutable ID of the Data Collection Rule (DCR). + * + * @return dcrImmutableId + */ + @JsonProperty(JSON_PROPERTY_DCR_IMMUTABLE_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getDcrImmutableId() { + return dcrImmutableId; + } + + public void setDcrImmutableId(String dcrImmutableId) { + this.dcrImmutableId = dcrImmutableId; + } + + public MicrosoftSentinelDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public MicrosoftSentinelDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public MicrosoftSentinelDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public MicrosoftSentinelDestination table(String table) { + this.table = table; + return this; + } + + /** + * The name of the Log Analytics table where logs are sent. + * + * @return table + */ + @JsonProperty(JSON_PROPERTY_TABLE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTable() { + return table; + } + + public void setTable(String table) { + this.table = table; + } + + public MicrosoftSentinelDestination tenantId(String tenantId) { + this.tenantId = tenantId; + return this; + } + + /** + * Azure AD tenant ID. + * + * @return tenantId + */ + @JsonProperty(JSON_PROPERTY_TENANT_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTenantId() { + return tenantId; + } + + public void setTenantId(String tenantId) { + this.tenantId = tenantId; + } + + public MicrosoftSentinelDestination type(MicrosoftSentinelDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be microsoft_sentinel. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public MicrosoftSentinelDestinationType getType() { + return type; + } + + public void setType(MicrosoftSentinelDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return MicrosoftSentinelDestination + */ + @JsonAnySetter + public MicrosoftSentinelDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this MicrosoftSentinelDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + MicrosoftSentinelDestination microsoftSentinelDestination = (MicrosoftSentinelDestination) o; + return Objects.equals(this.clientId, microsoftSentinelDestination.clientId) + && Objects.equals(this.dcrImmutableId, microsoftSentinelDestination.dcrImmutableId) + && Objects.equals(this.id, microsoftSentinelDestination.id) + && Objects.equals(this.inputs, microsoftSentinelDestination.inputs) + && Objects.equals(this.table, microsoftSentinelDestination.table) + && Objects.equals(this.tenantId, microsoftSentinelDestination.tenantId) + && Objects.equals(this.type, microsoftSentinelDestination.type) + && Objects.equals( + this.additionalProperties, microsoftSentinelDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + clientId, dcrImmutableId, id, inputs, table, tenantId, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class MicrosoftSentinelDestination {\n"); + sb.append(" clientId: ").append(toIndentedString(clientId)).append("\n"); + sb.append(" dcrImmutableId: ").append(toIndentedString(dcrImmutableId)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" table: ").append(toIndentedString(table)).append("\n"); + sb.append(" tenantId: ").append(toIndentedString(tenantId)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestinationType.java new file mode 100644 index 00000000000..4877ee7d8ea --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/MicrosoftSentinelDestinationType.java @@ -0,0 +1,58 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be microsoft_sentinel. */ +@JsonSerialize( + using = MicrosoftSentinelDestinationType.MicrosoftSentinelDestinationTypeSerializer.class) +public class MicrosoftSentinelDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("microsoft_sentinel")); + + public static final MicrosoftSentinelDestinationType MICROSOFT_SENTINEL = + new MicrosoftSentinelDestinationType("microsoft_sentinel"); + + MicrosoftSentinelDestinationType(String value) { + super(value, allowedValues); + } + + public static class MicrosoftSentinelDestinationTypeSerializer + extends StdSerializer { + public MicrosoftSentinelDestinationTypeSerializer(Class t) { + super(t); + } + + public MicrosoftSentinelDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + MicrosoftSentinelDestinationType value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static MicrosoftSentinelDestinationType fromValue(String value) { + return new MicrosoftSentinelDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java new file mode 100644 index 00000000000..694f833d9ec --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessor.java @@ -0,0 +1,287 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The add_env_vars processor adds environment variable values to log events. */ +@JsonPropertyOrder({ + ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_TYPE, + ObservabilityPipelineAddEnvVarsProcessor.JSON_PROPERTY_VARIABLES +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddEnvVarsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAddEnvVarsProcessorType type = + ObservabilityPipelineAddEnvVarsProcessorType.ADD_ENV_VARS; + + public static final String JSON_PROPERTY_VARIABLES = "variables"; + private List variables = new ArrayList<>(); + + public ObservabilityPipelineAddEnvVarsProcessor() {} + + @JsonCreator + public ObservabilityPipelineAddEnvVarsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAddEnvVarsProcessorType type, + @JsonProperty(required = true, value = JSON_PROPERTY_VARIABLES) + List variables) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + this.variables = variables; + } + + public ObservabilityPipelineAddEnvVarsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this processor in the pipeline. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAddEnvVarsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineAddEnvVarsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineAddEnvVarsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineAddEnvVarsProcessor type( + ObservabilityPipelineAddEnvVarsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be add_env_vars. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAddEnvVarsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineAddEnvVarsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + public ObservabilityPipelineAddEnvVarsProcessor variables( + List variables) { + this.variables = variables; + for (ObservabilityPipelineAddEnvVarsProcessorVariable item : variables) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineAddEnvVarsProcessor addVariablesItem( + ObservabilityPipelineAddEnvVarsProcessorVariable variablesItem) { + this.variables.add(variablesItem); + this.unparsed |= variablesItem.unparsed; + return this; + } + + /** + * A list of environment variable mappings to apply to log fields. + * + * @return variables + */ + @JsonProperty(JSON_PROPERTY_VARIABLES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getVariables() { + return variables; + } + + public void setVariables(List variables) { + this.variables = variables; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddEnvVarsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineAddEnvVarsProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddEnvVarsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddEnvVarsProcessor observabilityPipelineAddEnvVarsProcessor = + (ObservabilityPipelineAddEnvVarsProcessor) o; + return Objects.equals(this.id, observabilityPipelineAddEnvVarsProcessor.id) + && Objects.equals(this.include, observabilityPipelineAddEnvVarsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineAddEnvVarsProcessor.inputs) + && Objects.equals(this.type, observabilityPipelineAddEnvVarsProcessor.type) + && Objects.equals(this.variables, observabilityPipelineAddEnvVarsProcessor.variables) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddEnvVarsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, type, variables, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddEnvVarsProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" variables: ").append(toIndentedString(variables)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorType.java new file mode 100644 index 00000000000..f1eb0cccea1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be add_env_vars. */ +@JsonSerialize( + using = + ObservabilityPipelineAddEnvVarsProcessorType + .ObservabilityPipelineAddEnvVarsProcessorTypeSerializer.class) +public class ObservabilityPipelineAddEnvVarsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("add_env_vars")); + + public static final ObservabilityPipelineAddEnvVarsProcessorType ADD_ENV_VARS = + new ObservabilityPipelineAddEnvVarsProcessorType("add_env_vars"); + + ObservabilityPipelineAddEnvVarsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAddEnvVarsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAddEnvVarsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAddEnvVarsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAddEnvVarsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAddEnvVarsProcessorType fromValue(String value) { + return new ObservabilityPipelineAddEnvVarsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorVariable.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorVariable.java new file mode 100644 index 00000000000..74843216bc5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAddEnvVarsProcessorVariable.java @@ -0,0 +1,178 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines a mapping between an environment variable and a log field. */ +@JsonPropertyOrder({ + ObservabilityPipelineAddEnvVarsProcessorVariable.JSON_PROPERTY_FIELD, + ObservabilityPipelineAddEnvVarsProcessorVariable.JSON_PROPERTY_NAME +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAddEnvVarsProcessorVariable { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public ObservabilityPipelineAddEnvVarsProcessorVariable() {} + + @JsonCreator + public ObservabilityPipelineAddEnvVarsProcessorVariable( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name) { + this.field = field; + this.name = name; + } + + public ObservabilityPipelineAddEnvVarsProcessorVariable field(String field) { + this.field = field; + return this; + } + + /** + * The target field in the log event. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineAddEnvVarsProcessorVariable name(String name) { + this.name = name; + return this; + } + + /** + * The name of the environment variable to read. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAddEnvVarsProcessorVariable + */ + @JsonAnySetter + public ObservabilityPipelineAddEnvVarsProcessorVariable putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAddEnvVarsProcessorVariable object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAddEnvVarsProcessorVariable + observabilityPipelineAddEnvVarsProcessorVariable = + (ObservabilityPipelineAddEnvVarsProcessorVariable) o; + return Objects.equals(this.field, observabilityPipelineAddEnvVarsProcessorVariable.field) + && Objects.equals(this.name, observabilityPipelineAddEnvVarsProcessorVariable.name) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAddEnvVarsProcessorVariable.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, name, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAddEnvVarsProcessorVariable {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java new file mode 100644 index 00000000000..961394d8736 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSource.java @@ -0,0 +1,243 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The amazon_data_firehose source ingests logs from AWS Data Firehose. */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_TLS, + ObservabilityPipelineAmazonDataFirehoseSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonDataFirehoseSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAwsAuth auth; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonDataFirehoseSourceType type = + ObservabilityPipelineAmazonDataFirehoseSourceType.AMAZON_DATA_FIREHOSE; + + public ObservabilityPipelineAmazonDataFirehoseSource() {} + + @JsonCreator + public ObservabilityPipelineAmazonDataFirehoseSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonDataFirehoseSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonDataFirehoseSource auth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + * + * @return auth + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineAwsAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonDataFirehoseSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonDataFirehoseSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineAmazonDataFirehoseSource type( + ObservabilityPipelineAmazonDataFirehoseSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be amazon_data_firehose. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonDataFirehoseSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonDataFirehoseSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonDataFirehoseSource + */ + @JsonAnySetter + public ObservabilityPipelineAmazonDataFirehoseSource putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonDataFirehoseSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonDataFirehoseSource observabilityPipelineAmazonDataFirehoseSource = + (ObservabilityPipelineAmazonDataFirehoseSource) o; + return Objects.equals(this.auth, observabilityPipelineAmazonDataFirehoseSource.auth) + && Objects.equals(this.id, observabilityPipelineAmazonDataFirehoseSource.id) + && Objects.equals(this.tls, observabilityPipelineAmazonDataFirehoseSource.tls) + && Objects.equals(this.type, observabilityPipelineAmazonDataFirehoseSource.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAmazonDataFirehoseSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(auth, id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonDataFirehoseSource {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSourceType.java new file mode 100644 index 00000000000..267b4ed418b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonDataFirehoseSourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be amazon_data_firehose. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonDataFirehoseSourceType + .ObservabilityPipelineAmazonDataFirehoseSourceTypeSerializer.class) +public class ObservabilityPipelineAmazonDataFirehoseSourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("amazon_data_firehose")); + + public static final ObservabilityPipelineAmazonDataFirehoseSourceType AMAZON_DATA_FIREHOSE = + new ObservabilityPipelineAmazonDataFirehoseSourceType("amazon_data_firehose"); + + ObservabilityPipelineAmazonDataFirehoseSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonDataFirehoseSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonDataFirehoseSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonDataFirehoseSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonDataFirehoseSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonDataFirehoseSourceType fromValue(String value) { + return new ObservabilityPipelineAmazonDataFirehoseSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java new file mode 100644 index 00000000000..f8e615e74d8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestination.java @@ -0,0 +1,282 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The amazon_opensearch destination writes logs to Amazon OpenSearch. */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_BULK_INDEX, + ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineAmazonOpenSearchDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonOpenSearchDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAmazonOpenSearchDestinationAuth auth; + + public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index"; + private String bulkIndex; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonOpenSearchDestinationType type = + ObservabilityPipelineAmazonOpenSearchDestinationType.AMAZON_OPENSEARCH; + + public ObservabilityPipelineAmazonOpenSearchDestination() {} + + @JsonCreator + public ObservabilityPipelineAmazonOpenSearchDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH) + ObservabilityPipelineAmazonOpenSearchDestinationAuth auth, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonOpenSearchDestinationType type) { + this.auth = auth; + this.unparsed |= auth.unparsed; + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonOpenSearchDestination auth( + ObservabilityPipelineAmazonOpenSearchDestinationAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * Authentication settings for the Amazon OpenSearch destination. The strategy field + * determines whether basic or AWS-based authentication is used. + * + * @return auth + */ + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonOpenSearchDestinationAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAmazonOpenSearchDestinationAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonOpenSearchDestination bulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + return this; + } + + /** + * The index to write logs to. + * + * @return bulkIndex + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_BULK_INDEX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getBulkIndex() { + return bulkIndex; + } + + public void setBulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + } + + public ObservabilityPipelineAmazonOpenSearchDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonOpenSearchDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineAmazonOpenSearchDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineAmazonOpenSearchDestination type( + ObservabilityPipelineAmazonOpenSearchDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be amazon_opensearch. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonOpenSearchDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonOpenSearchDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonOpenSearchDestination + */ + @JsonAnySetter + public ObservabilityPipelineAmazonOpenSearchDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonOpenSearchDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonOpenSearchDestination + observabilityPipelineAmazonOpenSearchDestination = + (ObservabilityPipelineAmazonOpenSearchDestination) o; + return Objects.equals(this.auth, observabilityPipelineAmazonOpenSearchDestination.auth) + && Objects.equals( + this.bulkIndex, observabilityPipelineAmazonOpenSearchDestination.bulkIndex) + && Objects.equals(this.id, observabilityPipelineAmazonOpenSearchDestination.id) + && Objects.equals(this.inputs, observabilityPipelineAmazonOpenSearchDestination.inputs) + && Objects.equals(this.type, observabilityPipelineAmazonOpenSearchDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAmazonOpenSearchDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(auth, bulkIndex, id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonOpenSearchDestination {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuth.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuth.java new file mode 100644 index 00000000000..7088fd02e3d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuth.java @@ -0,0 +1,276 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Authentication settings for the Amazon OpenSearch destination. The strategy field + * determines whether basic or AWS-based authentication is used. + */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonOpenSearchDestinationAuth.JSON_PROPERTY_ASSUME_ROLE, + ObservabilityPipelineAmazonOpenSearchDestinationAuth.JSON_PROPERTY_AWS_REGION, + ObservabilityPipelineAmazonOpenSearchDestinationAuth.JSON_PROPERTY_EXTERNAL_ID, + ObservabilityPipelineAmazonOpenSearchDestinationAuth.JSON_PROPERTY_SESSION_NAME, + ObservabilityPipelineAmazonOpenSearchDestinationAuth.JSON_PROPERTY_STRATEGY +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonOpenSearchDestinationAuth { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ASSUME_ROLE = "assume_role"; + private String assumeRole; + + public static final String JSON_PROPERTY_AWS_REGION = "aws_region"; + private String awsRegion; + + public static final String JSON_PROPERTY_EXTERNAL_ID = "external_id"; + private String externalId; + + public static final String JSON_PROPERTY_SESSION_NAME = "session_name"; + private String sessionName; + + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy strategy; + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth() {} + + @JsonCreator + public ObservabilityPipelineAmazonOpenSearchDestinationAuth( + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth assumeRole(String assumeRole) { + this.assumeRole = assumeRole; + return this; + } + + /** + * The ARN of the role to assume (used with aws strategy). + * + * @return assumeRole + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ASSUME_ROLE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAssumeRole() { + return assumeRole; + } + + public void setAssumeRole(String assumeRole) { + this.assumeRole = assumeRole; + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth awsRegion(String awsRegion) { + this.awsRegion = awsRegion; + return this; + } + + /** + * AWS region + * + * @return awsRegion + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AWS_REGION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAwsRegion() { + return awsRegion; + } + + public void setAwsRegion(String awsRegion) { + this.awsRegion = awsRegion; + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth externalId(String externalId) { + this.externalId = externalId; + return this; + } + + /** + * External ID for the assumed role (used with aws strategy). + * + * @return externalId + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getExternalId() { + return externalId; + } + + public void setExternalId(String externalId) { + this.externalId = externalId; + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth sessionName(String sessionName) { + this.sessionName = sessionName; + return this; + } + + /** + * Session name for the assumed role (used with aws strategy). + * + * @return sessionName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SESSION_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getSessionName() { + return sessionName; + } + + public void setSessionName(String sessionName) { + this.sessionName = sessionName; + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuth strategy( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * The authentication strategy to use. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonOpenSearchDestinationAuth + */ + @JsonAnySetter + public ObservabilityPipelineAmazonOpenSearchDestinationAuth putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineAmazonOpenSearchDestinationAuth object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonOpenSearchDestinationAuth + observabilityPipelineAmazonOpenSearchDestinationAuth = + (ObservabilityPipelineAmazonOpenSearchDestinationAuth) o; + return Objects.equals( + this.assumeRole, observabilityPipelineAmazonOpenSearchDestinationAuth.assumeRole) + && Objects.equals( + this.awsRegion, observabilityPipelineAmazonOpenSearchDestinationAuth.awsRegion) + && Objects.equals( + this.externalId, observabilityPipelineAmazonOpenSearchDestinationAuth.externalId) + && Objects.equals( + this.sessionName, observabilityPipelineAmazonOpenSearchDestinationAuth.sessionName) + && Objects.equals( + this.strategy, observabilityPipelineAmazonOpenSearchDestinationAuth.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAmazonOpenSearchDestinationAuth.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + assumeRole, awsRegion, externalId, sessionName, strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonOpenSearchDestinationAuth {\n"); + sb.append(" assumeRole: ").append(toIndentedString(assumeRole)).append("\n"); + sb.append(" awsRegion: ").append(toIndentedString(awsRegion)).append("\n"); + sb.append(" externalId: ").append(toIndentedString(externalId)).append("\n"); + sb.append(" sessionName: ").append(toIndentedString(sessionName)).append("\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy.java new file mode 100644 index 00000000000..882eade53b9 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The authentication strategy to use. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy + .ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategySerializer.class) +public class ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("basic", "aws")); + + public static final ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy BASIC = + new ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy("basic"); + public static final ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy AWS = + new ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy("aws"); + + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategySerializer + extends StdSerializer { + public ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy fromValue( + String value) { + return new ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationType.java new file mode 100644 index 00000000000..087edbfe841 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonOpenSearchDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be amazon_opensearch. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonOpenSearchDestinationType + .ObservabilityPipelineAmazonOpenSearchDestinationTypeSerializer.class) +public class ObservabilityPipelineAmazonOpenSearchDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("amazon_opensearch")); + + public static final ObservabilityPipelineAmazonOpenSearchDestinationType AMAZON_OPENSEARCH = + new ObservabilityPipelineAmazonOpenSearchDestinationType("amazon_opensearch"); + + ObservabilityPipelineAmazonOpenSearchDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonOpenSearchDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonOpenSearchDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonOpenSearchDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonOpenSearchDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonOpenSearchDestinationType fromValue(String value) { + return new ObservabilityPipelineAmazonOpenSearchDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java new file mode 100644 index 00000000000..e7b0612ac30 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java @@ -0,0 +1,398 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The amazon_s3 destination sends your logs in Datadog-rehydratable format to an + * Amazon S3 bucket for archiving. + */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_BUCKET, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_KEY_PREFIX, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_REGION, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_STORAGE_CLASS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_TLS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonS3Destination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAwsAuth auth; + + public static final String JSON_PROPERTY_BUCKET = "bucket"; + private String bucket; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_PREFIX = "key_prefix"; + private String keyPrefix; + + public static final String JSON_PROPERTY_REGION = "region"; + private String region; + + public static final String JSON_PROPERTY_STORAGE_CLASS = "storage_class"; + private ObservabilityPipelineAmazonS3DestinationStorageClass storageClass; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonS3DestinationType type = + ObservabilityPipelineAmazonS3DestinationType.AMAZON_S3; + + public ObservabilityPipelineAmazonS3Destination() {} + + @JsonCreator + public ObservabilityPipelineAmazonS3Destination( + @JsonProperty(required = true, value = JSON_PROPERTY_BUCKET) String bucket, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) String region, + @JsonProperty(required = true, value = JSON_PROPERTY_STORAGE_CLASS) + ObservabilityPipelineAmazonS3DestinationStorageClass storageClass, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonS3DestinationType type) { + this.bucket = bucket; + this.id = id; + this.inputs = inputs; + this.region = region; + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonS3Destination auth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + * + * @return auth + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineAwsAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonS3Destination bucket(String bucket) { + this.bucket = bucket; + return this; + } + + /** + * S3 bucket name. + * + * @return bucket + */ + @JsonProperty(JSON_PROPERTY_BUCKET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getBucket() { + return bucket; + } + + public void setBucket(String bucket) { + this.bucket = bucket; + } + + public ObservabilityPipelineAmazonS3Destination id(String id) { + this.id = id; + return this; + } + + /** + * Unique identifier for the destination component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonS3Destination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineAmazonS3Destination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineAmazonS3Destination keyPrefix(String keyPrefix) { + this.keyPrefix = keyPrefix; + return this; + } + + /** + * Optional prefix for object keys. + * + * @return keyPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyPrefix() { + return keyPrefix; + } + + public void setKeyPrefix(String keyPrefix) { + this.keyPrefix = keyPrefix; + } + + public ObservabilityPipelineAmazonS3Destination region(String region) { + this.region = region; + return this; + } + + /** + * AWS region of the S3 bucket. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRegion() { + return region; + } + + public void setRegion(String region) { + this.region = region; + } + + public ObservabilityPipelineAmazonS3Destination storageClass( + ObservabilityPipelineAmazonS3DestinationStorageClass storageClass) { + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + return this; + } + + /** + * S3 storage class. + * + * @return storageClass + */ + @JsonProperty(JSON_PROPERTY_STORAGE_CLASS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3DestinationStorageClass getStorageClass() { + return storageClass; + } + + public void setStorageClass(ObservabilityPipelineAmazonS3DestinationStorageClass storageClass) { + if (!storageClass.isValid()) { + this.unparsed = true; + } + this.storageClass = storageClass; + } + + public ObservabilityPipelineAmazonS3Destination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineAmazonS3Destination type( + ObservabilityPipelineAmazonS3DestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always amazon_s3. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3DestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonS3DestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonS3Destination + */ + @JsonAnySetter + public ObservabilityPipelineAmazonS3Destination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonS3Destination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonS3Destination observabilityPipelineAmazonS3Destination = + (ObservabilityPipelineAmazonS3Destination) o; + return Objects.equals(this.auth, observabilityPipelineAmazonS3Destination.auth) + && Objects.equals(this.bucket, observabilityPipelineAmazonS3Destination.bucket) + && Objects.equals(this.id, observabilityPipelineAmazonS3Destination.id) + && Objects.equals(this.inputs, observabilityPipelineAmazonS3Destination.inputs) + && Objects.equals(this.keyPrefix, observabilityPipelineAmazonS3Destination.keyPrefix) + && Objects.equals(this.region, observabilityPipelineAmazonS3Destination.region) + && Objects.equals(this.storageClass, observabilityPipelineAmazonS3Destination.storageClass) + && Objects.equals(this.tls, observabilityPipelineAmazonS3Destination.tls) + && Objects.equals(this.type, observabilityPipelineAmazonS3Destination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAmazonS3Destination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + auth, bucket, id, inputs, keyPrefix, region, storageClass, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonS3Destination {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyPrefix: ").append(toIndentedString(keyPrefix)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" storageClass: ").append(toIndentedString(storageClass)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java new file mode 100644 index 00000000000..52c457c7840 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java @@ -0,0 +1,89 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** S3 storage class. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3DestinationStorageClass + .ObservabilityPipelineAmazonS3DestinationStorageClassSerializer.class) +public class ObservabilityPipelineAmazonS3DestinationStorageClass extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "STANDARD", + "REDUCED_REDUNDANCY", + "INTELLIGENT_TIERING", + "STANDARD_IA", + "EXPRESS_ONEZONE", + "ONEZONE_IA", + "GLACIER", + "GLACIER_IR", + "DEEP_ARCHIVE")); + + public static final ObservabilityPipelineAmazonS3DestinationStorageClass STANDARD = + new ObservabilityPipelineAmazonS3DestinationStorageClass("STANDARD"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass REDUCED_REDUNDANCY = + new ObservabilityPipelineAmazonS3DestinationStorageClass("REDUCED_REDUNDANCY"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass INTELLIGENT_TIERING = + new ObservabilityPipelineAmazonS3DestinationStorageClass("INTELLIGENT_TIERING"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass STANDARD_IA = + new ObservabilityPipelineAmazonS3DestinationStorageClass("STANDARD_IA"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass EXPRESS_ONEZONE = + new ObservabilityPipelineAmazonS3DestinationStorageClass("EXPRESS_ONEZONE"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass ONEZONE_IA = + new ObservabilityPipelineAmazonS3DestinationStorageClass("ONEZONE_IA"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass GLACIER = + new ObservabilityPipelineAmazonS3DestinationStorageClass("GLACIER"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass GLACIER_IR = + new ObservabilityPipelineAmazonS3DestinationStorageClass("GLACIER_IR"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass DEEP_ARCHIVE = + new ObservabilityPipelineAmazonS3DestinationStorageClass("DEEP_ARCHIVE"); + + ObservabilityPipelineAmazonS3DestinationStorageClass(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3DestinationStorageClassSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3DestinationStorageClassSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3DestinationStorageClassSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3DestinationStorageClass value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3DestinationStorageClass fromValue(String value) { + return new ObservabilityPipelineAmazonS3DestinationStorageClass(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java new file mode 100644 index 00000000000..045b92c06b3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always amazon_s3. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3DestinationType + .ObservabilityPipelineAmazonS3DestinationTypeSerializer.class) +public class ObservabilityPipelineAmazonS3DestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("amazon_s3")); + + public static final ObservabilityPipelineAmazonS3DestinationType AMAZON_S3 = + new ObservabilityPipelineAmazonS3DestinationType("amazon_s3"); + + ObservabilityPipelineAmazonS3DestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3DestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3DestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3DestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3DestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3DestinationType fromValue(String value) { + return new ObservabilityPipelineAmazonS3DestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java new file mode 100644 index 00000000000..2961c4fec42 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java @@ -0,0 +1,271 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The amazon_s3 source ingests logs from an Amazon S3 bucket. It supports AWS + * authentication and TLS encryption. + */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_REGION, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TLS, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonS3Source { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAwsAuth auth; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_REGION = "region"; + private String region; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonS3SourceType type = + ObservabilityPipelineAmazonS3SourceType.AMAZON_S3; + + public ObservabilityPipelineAmazonS3Source() {} + + @JsonCreator + public ObservabilityPipelineAmazonS3Source( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) String region, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonS3SourceType type) { + this.id = id; + this.region = region; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonS3Source auth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + * + * @return auth + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineAwsAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonS3Source id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonS3Source region(String region) { + this.region = region; + return this; + } + + /** + * AWS region where the S3 bucket resides. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRegion() { + return region; + } + + public void setRegion(String region) { + this.region = region; + } + + public ObservabilityPipelineAmazonS3Source tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineAmazonS3Source type(ObservabilityPipelineAmazonS3SourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always amazon_s3. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3SourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonS3SourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonS3Source + */ + @JsonAnySetter + public ObservabilityPipelineAmazonS3Source putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonS3Source object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonS3Source observabilityPipelineAmazonS3Source = + (ObservabilityPipelineAmazonS3Source) o; + return Objects.equals(this.auth, observabilityPipelineAmazonS3Source.auth) + && Objects.equals(this.id, observabilityPipelineAmazonS3Source.id) + && Objects.equals(this.region, observabilityPipelineAmazonS3Source.region) + && Objects.equals(this.tls, observabilityPipelineAmazonS3Source.tls) + && Objects.equals(this.type, observabilityPipelineAmazonS3Source.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineAmazonS3Source.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(auth, id, region, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonS3Source {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java new file mode 100644 index 00000000000..8546c2103ba --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always amazon_s3. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3SourceType.ObservabilityPipelineAmazonS3SourceTypeSerializer + .class) +public class ObservabilityPipelineAmazonS3SourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("amazon_s3")); + + public static final ObservabilityPipelineAmazonS3SourceType AMAZON_S3 = + new ObservabilityPipelineAmazonS3SourceType("amazon_s3"); + + ObservabilityPipelineAmazonS3SourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3SourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3SourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3SourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3SourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3SourceType fromValue(String value) { + return new ObservabilityPipelineAmazonS3SourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java new file mode 100644 index 00000000000..159b2782bb3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java @@ -0,0 +1,195 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + */ +@JsonPropertyOrder({ + ObservabilityPipelineAwsAuth.JSON_PROPERTY_ASSUME_ROLE, + ObservabilityPipelineAwsAuth.JSON_PROPERTY_EXTERNAL_ID, + ObservabilityPipelineAwsAuth.JSON_PROPERTY_SESSION_NAME +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAwsAuth { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ASSUME_ROLE = "assume_role"; + private String assumeRole; + + public static final String JSON_PROPERTY_EXTERNAL_ID = "external_id"; + private String externalId; + + public static final String JSON_PROPERTY_SESSION_NAME = "session_name"; + private String sessionName; + + public ObservabilityPipelineAwsAuth assumeRole(String assumeRole) { + this.assumeRole = assumeRole; + return this; + } + + /** + * The Amazon Resource Name (ARN) of the role to assume. + * + * @return assumeRole + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ASSUME_ROLE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getAssumeRole() { + return assumeRole; + } + + public void setAssumeRole(String assumeRole) { + this.assumeRole = assumeRole; + } + + public ObservabilityPipelineAwsAuth externalId(String externalId) { + this.externalId = externalId; + return this; + } + + /** + * A unique identifier for cross-account role assumption. + * + * @return externalId + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getExternalId() { + return externalId; + } + + public void setExternalId(String externalId) { + this.externalId = externalId; + } + + public ObservabilityPipelineAwsAuth sessionName(String sessionName) { + this.sessionName = sessionName; + return this; + } + + /** + * A session identifier used for logging and tracing the assumed role session. + * + * @return sessionName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SESSION_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getSessionName() { + return sessionName; + } + + public void setSessionName(String sessionName) { + this.sessionName = sessionName; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAwsAuth + */ + @JsonAnySetter + public ObservabilityPipelineAwsAuth putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAwsAuth object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAwsAuth observabilityPipelineAwsAuth = (ObservabilityPipelineAwsAuth) o; + return Objects.equals(this.assumeRole, observabilityPipelineAwsAuth.assumeRole) + && Objects.equals(this.externalId, observabilityPipelineAwsAuth.externalId) + && Objects.equals(this.sessionName, observabilityPipelineAwsAuth.sessionName) + && Objects.equals( + this.additionalProperties, observabilityPipelineAwsAuth.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(assumeRole, externalId, sessionName, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAwsAuth {\n"); + sb.append(" assumeRole: ").append(toIndentedString(assumeRole)).append("\n"); + sb.append(" externalId: ").append(toIndentedString(externalId)).append("\n"); + sb.append(" sessionName: ").append(toIndentedString(sessionName)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java index 0be81362fe3..cf7863c4c01 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfig.java @@ -33,7 +33,7 @@ public class ObservabilityPipelineConfig { private List destinations = new ArrayList<>(); public static final String JSON_PROPERTY_PROCESSORS = "processors"; - private List processors = new ArrayList<>(); + private List processors = null; public static final String JSON_PROPERTY_SOURCES = "sources"; private List sources = new ArrayList<>(); @@ -44,12 +44,9 @@ public ObservabilityPipelineConfig() {} public ObservabilityPipelineConfig( @JsonProperty(required = true, value = JSON_PROPERTY_DESTINATIONS) List destinations, - @JsonProperty(required = true, value = JSON_PROPERTY_PROCESSORS) - List processors, @JsonProperty(required = true, value = JSON_PROPERTY_SOURCES) List sources) { this.destinations = destinations; - this.processors = processors; this.sources = sources; } @@ -95,6 +92,9 @@ public ObservabilityPipelineConfig processors( public ObservabilityPipelineConfig addProcessorsItem( ObservabilityPipelineConfigProcessorItem processorsItem) { + if (this.processors == null) { + this.processors = new ArrayList<>(); + } this.processors.add(processorsItem); this.unparsed |= processorsItem.unparsed; return this; @@ -105,8 +105,9 @@ public ObservabilityPipelineConfig addProcessorsItem( * * @return processors */ + @jakarta.annotation.Nullable @JsonProperty(JSON_PROPERTY_PROCESSORS) - @JsonInclude(value = JsonInclude.Include.ALWAYS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public List getProcessors() { return processors; } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java index f6d74a9f38a..2f3bfec28a0 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -142,6 +142,719 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineAmazonS3Destination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonS3Destination.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Long.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Double.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Boolean.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonS3Destination.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonS3Destination.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Destination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Destination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonS3Destination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonS3Destination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Destination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonS3Destination'", + e); + } + + // deserialize ObservabilityPipelineGoogleCloudStorageDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'", + e); + } + + // deserialize ObservabilityPipelineSplunkHecDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkHecDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSplunkHecDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'", + e); + } + + // deserialize ObservabilityPipelineSumoLogicDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSumoLogicDestination.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSumoLogicDestination.class.equals(Float.class) + || ObservabilityPipelineSumoLogicDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSumoLogicDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSumoLogicDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSumoLogicDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSumoLogicDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSumoLogicDestination'", + e); + } + + // deserialize ObservabilityPipelineElasticsearchDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineElasticsearchDestination.class.equals(Integer.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineElasticsearchDestination.class.equals(Float.class) + || ObservabilityPipelineElasticsearchDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineElasticsearchDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineElasticsearchDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineElasticsearchDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineElasticsearchDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineElasticsearchDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineElasticsearchDestination'", + e); + } + + // deserialize ObservabilityPipelineRsyslogDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Long.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Float.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Double.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) + || ObservabilityPipelineRsyslogDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineRsyslogDestination.class.equals(Integer.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineRsyslogDestination.class.equals(Float.class) + || ObservabilityPipelineRsyslogDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineRsyslogDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineRsyslogDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineRsyslogDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineRsyslogDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineRsyslogDestination'", + e); + } + + // deserialize ObservabilityPipelineSyslogNgDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSyslogNgDestination.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSyslogNgDestination.class.equals(Float.class) + || ObservabilityPipelineSyslogNgDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSyslogNgDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSyslogNgDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSyslogNgDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSyslogNgDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSyslogNgDestination'", + e); + } + + // deserialize AzureStorageDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (AzureStorageDestination.class.equals(Integer.class) + || AzureStorageDestination.class.equals(Long.class) + || AzureStorageDestination.class.equals(Float.class) + || AzureStorageDestination.class.equals(Double.class) + || AzureStorageDestination.class.equals(Boolean.class) + || AzureStorageDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((AzureStorageDestination.class.equals(Integer.class) + || AzureStorageDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((AzureStorageDestination.class.equals(Float.class) + || AzureStorageDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (AzureStorageDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (AzureStorageDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(AzureStorageDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((AzureStorageDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'AzureStorageDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'AzureStorageDestination'", e); + } + + // deserialize MicrosoftSentinelDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (MicrosoftSentinelDestination.class.equals(Integer.class) + || MicrosoftSentinelDestination.class.equals(Long.class) + || MicrosoftSentinelDestination.class.equals(Float.class) + || MicrosoftSentinelDestination.class.equals(Double.class) + || MicrosoftSentinelDestination.class.equals(Boolean.class) + || MicrosoftSentinelDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((MicrosoftSentinelDestination.class.equals(Integer.class) + || MicrosoftSentinelDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((MicrosoftSentinelDestination.class.equals(Float.class) + || MicrosoftSentinelDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (MicrosoftSentinelDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (MicrosoftSentinelDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(MicrosoftSentinelDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((MicrosoftSentinelDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'MicrosoftSentinelDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log(Level.FINER, "Input data does not match schema 'MicrosoftSentinelDestination'", e); + } + + // deserialize ObservabilityPipelineGoogleChronicleDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Double.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGoogleChronicleDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleChronicleDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGoogleChronicleDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGoogleChronicleDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGoogleChronicleDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGoogleChronicleDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGoogleChronicleDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGoogleChronicleDestination'", + e); + } + + // deserialize ObservabilityPipelineNewRelicDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Long.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Float.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Double.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) + || ObservabilityPipelineNewRelicDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineNewRelicDestination.class.equals(Integer.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineNewRelicDestination.class.equals(Float.class) + || ObservabilityPipelineNewRelicDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineNewRelicDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineNewRelicDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineNewRelicDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineNewRelicDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineNewRelicDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineNewRelicDestination'", + e); + } + + // deserialize ObservabilityPipelineSentinelOneDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSentinelOneDestination.class.equals(Integer.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSentinelOneDestination.class.equals(Float.class) + || ObservabilityPipelineSentinelOneDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSentinelOneDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSentinelOneDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSentinelOneDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSentinelOneDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineSentinelOneDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSentinelOneDestination'", + e); + } + + // deserialize ObservabilityPipelineOpenSearchDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineOpenSearchDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineOpenSearchDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineOpenSearchDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineOpenSearchDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineOpenSearchDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineOpenSearchDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineOpenSearchDestination'", + e); + } + + // deserialize ObservabilityPipelineAmazonOpenSearchDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Long.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Double.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Integer.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Float.class) + || ObservabilityPipelineAmazonOpenSearchDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonOpenSearchDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonOpenSearchDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonOpenSearchDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineAmazonOpenSearchDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonOpenSearchDestination'", + e); + } + ObservabilityPipelineConfigDestinationItem ret = new ObservabilityPipelineConfigDestinationItem(); if (match == 1) { @@ -178,10 +891,122 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLo setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3Destination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineGoogleCloudStorageDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSumoLogicDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineElasticsearchDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineRsyslogDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSyslogNgDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(AzureStorageDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(MicrosoftSentinelDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineGoogleChronicleDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineNewRelicDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSentinelOneDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineOpenSearchDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineAmazonOpenSearchDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineDatadogLogsDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonS3Destination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGoogleCloudStorageDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkHecDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSumoLogicDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineElasticsearchDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineRsyslogDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSyslogNgDestination", + new GenericType() {}); + schemas.put("AzureStorageDestination", new GenericType() {}); + schemas.put("MicrosoftSentinelDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGoogleChronicleDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineNewRelicDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSentinelOneDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineOpenSearchDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonOpenSearchDestination", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); } @@ -193,7 +1018,14 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination + * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination, + * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, + * ObservabilityPipelineOpenSearchDestination, ObservabilityPipelineAmazonOpenSearchDestination * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -205,20 +1037,120 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonS3Destination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGoogleCloudStorageDestination.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSumoLogicDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineElasticsearchDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineRsyslogDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSyslogNgDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf(AzureStorageDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf(MicrosoftSentinelDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGoogleChronicleDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineNewRelicDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSentinelOneDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineOpenSearchDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonOpenSearchDestination.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination"); + "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination," + + " ObservabilityPipelineAmazonS3Destination," + + " ObservabilityPipelineGoogleCloudStorageDestination," + + " ObservabilityPipelineSplunkHecDestination," + + " ObservabilityPipelineSumoLogicDestination," + + " ObservabilityPipelineElasticsearchDestination," + + " ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination," + + " AzureStorageDestination, MicrosoftSentinelDestination," + + " ObservabilityPipelineGoogleChronicleDestination," + + " ObservabilityPipelineNewRelicDestination," + + " ObservabilityPipelineSentinelOneDestination," + + " ObservabilityPipelineOpenSearchDestination," + + " ObservabilityPipelineAmazonOpenSearchDestination"); } /** * Get the actual instance, which can be the following: - * ObservabilityPipelineDatadogLogsDestination + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination, + * ObservabilityPipelineSumoLogicDestination, ObservabilityPipelineElasticsearchDestination, + * ObservabilityPipelineRsyslogDestination, ObservabilityPipelineSyslogNgDestination, + * AzureStorageDestination, MicrosoftSentinelDestination, + * ObservabilityPipelineGoogleChronicleDestination, ObservabilityPipelineNewRelicDestination, + * ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, + * ObservabilityPipelineAmazonOpenSearchDestination * - * @return The actual instance (ObservabilityPipelineDatadogLogsDestination) + * @return The actual instance (ObservabilityPipelineDatadogLogsDestination, + * ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination, ObservabilityPipelineSumoLogicDestination, + * ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineRsyslogDestination, + * ObservabilityPipelineSyslogNgDestination, AzureStorageDestination, + * MicrosoftSentinelDestination, ObservabilityPipelineGoogleChronicleDestination, + * ObservabilityPipelineNewRelicDestination, ObservabilityPipelineSentinelOneDestination, + * ObservabilityPipelineOpenSearchDestination, + * ObservabilityPipelineAmazonOpenSearchDestination) */ @Override public Object getActualInstance() { @@ -237,4 +1169,179 @@ public Object getActualInstance() { getObservabilityPipelineDatadogLogsDestination() throws ClassCastException { return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonS3Destination`. If the actual instance + * is not `ObservabilityPipelineAmazonS3Destination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonS3Destination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAmazonS3Destination` + */ + public ObservabilityPipelineAmazonS3Destination getObservabilityPipelineAmazonS3Destination() + throws ClassCastException { + return (ObservabilityPipelineAmazonS3Destination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual + * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGoogleCloudStorageDestination` + */ + public ObservabilityPipelineGoogleCloudStorageDestination + getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException { + return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance + * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkHecDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination` + */ + public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSumoLogicDestination`. If the actual instance + * is not `ObservabilityPipelineSumoLogicDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSumoLogicDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicDestination` + */ + public ObservabilityPipelineSumoLogicDestination getObservabilityPipelineSumoLogicDestination() + throws ClassCastException { + return (ObservabilityPipelineSumoLogicDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineElasticsearchDestination`. If the actual + * instance is not `ObservabilityPipelineElasticsearchDestination`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineElasticsearchDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineElasticsearchDestination` + */ + public ObservabilityPipelineElasticsearchDestination + getObservabilityPipelineElasticsearchDestination() throws ClassCastException { + return (ObservabilityPipelineElasticsearchDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineRsyslogDestination`. If the actual instance is + * not `ObservabilityPipelineRsyslogDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineRsyslogDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogDestination` + */ + public ObservabilityPipelineRsyslogDestination getObservabilityPipelineRsyslogDestination() + throws ClassCastException { + return (ObservabilityPipelineRsyslogDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSyslogNgDestination`. If the actual instance + * is not `ObservabilityPipelineSyslogNgDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSyslogNgDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgDestination` + */ + public ObservabilityPipelineSyslogNgDestination getObservabilityPipelineSyslogNgDestination() + throws ClassCastException { + return (ObservabilityPipelineSyslogNgDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `AzureStorageDestination`. If the actual instance is not + * `AzureStorageDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `AzureStorageDestination` + * @throws ClassCastException if the instance is not `AzureStorageDestination` + */ + public AzureStorageDestination getAzureStorageDestination() throws ClassCastException { + return (AzureStorageDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `MicrosoftSentinelDestination`. If the actual instance is not + * `MicrosoftSentinelDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `MicrosoftSentinelDestination` + * @throws ClassCastException if the instance is not `MicrosoftSentinelDestination` + */ + public MicrosoftSentinelDestination getMicrosoftSentinelDestination() throws ClassCastException { + return (MicrosoftSentinelDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGoogleChronicleDestination`. If the actual + * instance is not `ObservabilityPipelineGoogleChronicleDestination`, the ClassCastException will + * be thrown. + * + * @return The actual instance of `ObservabilityPipelineGoogleChronicleDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGoogleChronicleDestination` + */ + public ObservabilityPipelineGoogleChronicleDestination + getObservabilityPipelineGoogleChronicleDestination() throws ClassCastException { + return (ObservabilityPipelineGoogleChronicleDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineNewRelicDestination`. If the actual instance + * is not `ObservabilityPipelineNewRelicDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineNewRelicDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineNewRelicDestination` + */ + public ObservabilityPipelineNewRelicDestination getObservabilityPipelineNewRelicDestination() + throws ClassCastException { + return (ObservabilityPipelineNewRelicDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSentinelOneDestination`. If the actual + * instance is not `ObservabilityPipelineSentinelOneDestination`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineSentinelOneDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSentinelOneDestination` + */ + public ObservabilityPipelineSentinelOneDestination + getObservabilityPipelineSentinelOneDestination() throws ClassCastException { + return (ObservabilityPipelineSentinelOneDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineOpenSearchDestination`. If the actual instance + * is not `ObservabilityPipelineOpenSearchDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineOpenSearchDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOpenSearchDestination` + */ + public ObservabilityPipelineOpenSearchDestination getObservabilityPipelineOpenSearchDestination() + throws ClassCastException { + return (ObservabilityPipelineOpenSearchDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonOpenSearchDestination`. If the actual + * instance is not `ObservabilityPipelineAmazonOpenSearchDestination`, the ClassCastException will + * be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonOpenSearchDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineAmazonOpenSearchDestination` + */ + public ObservabilityPipelineAmazonOpenSearchDestination + getObservabilityPipelineAmazonOpenSearchDestination() throws ClassCastException { + return (ObservabilityPipelineAmazonOpenSearchDestination) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java index aa99b3edb23..18000ccfacd 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -393,6 +393,515 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } + // deserialize ObservabilityPipelineGenerateMetricsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", + e); + } + + // deserialize ObservabilityPipelineSampleProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class) + || ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class) + || ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSampleProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSampleProcessor.class.equals(Integer.class) + || ObservabilityPipelineSampleProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSampleProcessor.class.equals(Float.class) + || ObservabilityPipelineSampleProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSampleProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSampleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSampleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSampleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSampleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSampleProcessor'", + e); + } + + // deserialize ObservabilityPipelineParseGrokProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineParseGrokProcessor.class.equals(Integer.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineParseGrokProcessor.class.equals(Float.class) + || ObservabilityPipelineParseGrokProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineParseGrokProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineParseGrokProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineParseGrokProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineParseGrokProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineParseGrokProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineParseGrokProcessor'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessor.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineSensitiveDataScannerProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSensitiveDataScannerProcessor'", + e); + } + + // deserialize ObservabilityPipelineOcsfMapperProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Integer.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineOcsfMapperProcessor.class.equals(Float.class) + || ObservabilityPipelineOcsfMapperProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineOcsfMapperProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineOcsfMapperProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineOcsfMapperProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineOcsfMapperProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMapperProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineOcsfMapperProcessor'", + e); + } + + // deserialize ObservabilityPipelineAddEnvVarsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Integer.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAddEnvVarsProcessor.class.equals(Float.class) + || ObservabilityPipelineAddEnvVarsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAddEnvVarsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAddEnvVarsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAddEnvVarsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAddEnvVarsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAddEnvVarsProcessor'", + e); + } + + // deserialize ObservabilityPipelineDedupeProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + || ObservabilityPipelineDedupeProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineDedupeProcessor.class.equals(Integer.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineDedupeProcessor.class.equals(Float.class) + || ObservabilityPipelineDedupeProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineDedupeProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineDedupeProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineDedupeProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineDedupeProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineDedupeProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineDedupeProcessor'", + e); + } + + // deserialize ObservabilityPipelineEnrichmentTableProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Integer.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineEnrichmentTableProcessor.class.equals(Float.class) + || ObservabilityPipelineEnrichmentTableProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineEnrichmentTableProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineEnrichmentTableProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineEnrichmentTableProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineEnrichmentTableProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineEnrichmentTableProcessor'", + e); + } + + // deserialize ObservabilityPipelineReduceProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineReduceProcessor.class.equals(Integer.class) + || ObservabilityPipelineReduceProcessor.class.equals(Long.class) + || ObservabilityPipelineReduceProcessor.class.equals(Float.class) + || ObservabilityPipelineReduceProcessor.class.equals(Double.class) + || ObservabilityPipelineReduceProcessor.class.equals(Boolean.class) + || ObservabilityPipelineReduceProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineReduceProcessor.class.equals(Integer.class) + || ObservabilityPipelineReduceProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineReduceProcessor.class.equals(Float.class) + || ObservabilityPipelineReduceProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineReduceProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineReduceProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineReduceProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineReduceProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineReduceProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineReduceProcessor'", + e); + } + + // deserialize ObservabilityPipelineThrottleProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + || ObservabilityPipelineThrottleProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Integer.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineThrottleProcessor.class.equals(Float.class) + || ObservabilityPipelineThrottleProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineThrottleProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineThrottleProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineThrottleProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineThrottleProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineThrottleProcessor'", + e); + } + ObservabilityPipelineConfigProcessorItem ret = new ObservabilityPipelineConfigProcessorItem(); if (match == 1) { ret.setActualInstance(deserialized); @@ -453,6 +962,57 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameField setActualInstance(o); } + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMetricsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineSampleProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineParseGrokProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem( + ObservabilityPipelineSensitiveDataScannerProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineOcsfMapperProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineAddEnvVarsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineDedupeProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineEnrichmentTableProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineReduceProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineThrottleProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineFilterProcessor", @@ -472,6 +1032,36 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameField schemas.put( "ObservabilityPipelineRenameFieldsProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGenerateMetricsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSampleProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineParseGrokProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineOcsfMapperProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAddEnvVarsProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineDedupeProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineEnrichmentTableProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineReduceProcessor", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineThrottleProcessor", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); } @@ -486,7 +1076,12 @@ public Map getSchemas() { * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, + * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, + * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineThrottleProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -523,6 +1118,58 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSampleProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineParseGrokProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessor.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineOcsfMapperProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAddEnvVarsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineDedupeProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineEnrichmentTableProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineReduceProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineThrottleProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -533,19 +1180,36 @@ public void setActualInstance(Object instance) { + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," + " ObservabilityPipelineAddFieldsProcessor," + " ObservabilityPipelineRemoveFieldsProcessor," - + " ObservabilityPipelineRenameFieldsProcessor"); + + " ObservabilityPipelineRenameFieldsProcessor," + + " ObservabilityPipelineGenerateMetricsProcessor," + + " ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor," + + " ObservabilityPipelineSensitiveDataScannerProcessor," + + " ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor," + + " ObservabilityPipelineDedupeProcessor," + + " ObservabilityPipelineEnrichmentTableProcessor," + + " ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor"); } /** * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineSensitiveDataScannerProcessor, ObservabilityPipelineOcsfMapperProcessor, + * ObservabilityPipelineAddEnvVarsProcessor, ObservabilityPipelineDedupeProcessor, + * ObservabilityPipelineEnrichmentTableProcessor, ObservabilityPipelineReduceProcessor, + * ObservabilityPipelineThrottleProcessor * * @return The actual instance (ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor) + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor, + * ObservabilityPipelineSampleProcessor, ObservabilityPipelineParseGrokProcessor, + * ObservabilityPipelineSensitiveDataScannerProcessor, + * ObservabilityPipelineOcsfMapperProcessor, ObservabilityPipelineAddEnvVarsProcessor, + * ObservabilityPipelineDedupeProcessor, ObservabilityPipelineEnrichmentTableProcessor, + * ObservabilityPipelineReduceProcessor, ObservabilityPipelineThrottleProcessor) */ @Override public Object getActualInstance() { @@ -623,4 +1287,130 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename throws ClassCastException { return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineGenerateMetricsProcessor`. If the actual + * instance is not `ObservabilityPipelineGenerateMetricsProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineGenerateMetricsProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGenerateMetricsProcessor` + */ + public ObservabilityPipelineGenerateMetricsProcessor + getObservabilityPipelineGenerateMetricsProcessor() throws ClassCastException { + return (ObservabilityPipelineGenerateMetricsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSampleProcessor`. If the actual instance is + * not `ObservabilityPipelineSampleProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSampleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSampleProcessor` + */ + public ObservabilityPipelineSampleProcessor getObservabilityPipelineSampleProcessor() + throws ClassCastException { + return (ObservabilityPipelineSampleProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineParseGrokProcessor`. If the actual instance is + * not `ObservabilityPipelineParseGrokProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineParseGrokProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineParseGrokProcessor` + */ + public ObservabilityPipelineParseGrokProcessor getObservabilityPipelineParseGrokProcessor() + throws ClassCastException { + return (ObservabilityPipelineParseGrokProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor`. If the actual + * instance is not `ObservabilityPipelineSensitiveDataScannerProcessor`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessor` + */ + public ObservabilityPipelineSensitiveDataScannerProcessor + getObservabilityPipelineSensitiveDataScannerProcessor() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineOcsfMapperProcessor`. If the actual instance + * is not `ObservabilityPipelineOcsfMapperProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineOcsfMapperProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMapperProcessor` + */ + public ObservabilityPipelineOcsfMapperProcessor getObservabilityPipelineOcsfMapperProcessor() + throws ClassCastException { + return (ObservabilityPipelineOcsfMapperProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAddEnvVarsProcessor`. If the actual instance + * is not `ObservabilityPipelineAddEnvVarsProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAddEnvVarsProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAddEnvVarsProcessor` + */ + public ObservabilityPipelineAddEnvVarsProcessor getObservabilityPipelineAddEnvVarsProcessor() + throws ClassCastException { + return (ObservabilityPipelineAddEnvVarsProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineDedupeProcessor`. If the actual instance is + * not `ObservabilityPipelineDedupeProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineDedupeProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineDedupeProcessor` + */ + public ObservabilityPipelineDedupeProcessor getObservabilityPipelineDedupeProcessor() + throws ClassCastException { + return (ObservabilityPipelineDedupeProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineEnrichmentTableProcessor`. If the actual + * instance is not `ObservabilityPipelineEnrichmentTableProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineEnrichmentTableProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineEnrichmentTableProcessor` + */ + public ObservabilityPipelineEnrichmentTableProcessor + getObservabilityPipelineEnrichmentTableProcessor() throws ClassCastException { + return (ObservabilityPipelineEnrichmentTableProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineReduceProcessor`. If the actual instance is + * not `ObservabilityPipelineReduceProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineReduceProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineReduceProcessor` + */ + public ObservabilityPipelineReduceProcessor getObservabilityPipelineReduceProcessor() + throws ClassCastException { + return (ObservabilityPipelineReduceProcessor) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineThrottleProcessor`. If the actual instance is + * not `ObservabilityPipelineThrottleProcessor`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineThrottleProcessor` + * @throws ClassCastException if the instance is not `ObservabilityPipelineThrottleProcessor` + */ + public ObservabilityPipelineThrottleProcessor getObservabilityPipelineThrottleProcessor() + throws ClassCastException { + return (ObservabilityPipelineThrottleProcessor) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java index 68261e72ac1..0ed0198745a 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java @@ -186,6 +186,643 @@ public ObservabilityPipelineConfigSourceItem deserialize( e); } + // deserialize ObservabilityPipelineSplunkTcpSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkTcpSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'", + e); + } + + // deserialize ObservabilityPipelineSplunkHecSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkHecSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'", + e); + } + + // deserialize ObservabilityPipelineAmazonS3Source + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Source.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'", + e); + } + + // deserialize ObservabilityPipelineFluentdSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineFluentdSource.class.equals(Integer.class) + || ObservabilityPipelineFluentdSource.class.equals(Long.class) + || ObservabilityPipelineFluentdSource.class.equals(Float.class) + || ObservabilityPipelineFluentdSource.class.equals(Double.class) + || ObservabilityPipelineFluentdSource.class.equals(Boolean.class) + || ObservabilityPipelineFluentdSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineFluentdSource.class.equals(Integer.class) + || ObservabilityPipelineFluentdSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineFluentdSource.class.equals(Float.class) + || ObservabilityPipelineFluentdSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineFluentdSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineFluentdSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentdSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineFluentdSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentdSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineFluentdSource'", + e); + } + + // deserialize ObservabilityPipelineFluentBitSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineFluentBitSource.class.equals(Integer.class) + || ObservabilityPipelineFluentBitSource.class.equals(Long.class) + || ObservabilityPipelineFluentBitSource.class.equals(Float.class) + || ObservabilityPipelineFluentBitSource.class.equals(Double.class) + || ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) + || ObservabilityPipelineFluentBitSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineFluentBitSource.class.equals(Integer.class) + || ObservabilityPipelineFluentBitSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineFluentBitSource.class.equals(Float.class) + || ObservabilityPipelineFluentBitSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineFluentBitSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineFluentBitSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineFluentBitSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineFluentBitSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineFluentBitSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineFluentBitSource'", + e); + } + + // deserialize ObservabilityPipelineHttpServerSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineHttpServerSource.class.equals(Integer.class) + || ObservabilityPipelineHttpServerSource.class.equals(Long.class) + || ObservabilityPipelineHttpServerSource.class.equals(Float.class) + || ObservabilityPipelineHttpServerSource.class.equals(Double.class) + || ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) + || ObservabilityPipelineHttpServerSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineHttpServerSource.class.equals(Integer.class) + || ObservabilityPipelineHttpServerSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineHttpServerSource.class.equals(Float.class) + || ObservabilityPipelineHttpServerSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineHttpServerSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineHttpServerSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpServerSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineHttpServerSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpServerSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineHttpServerSource'", + e); + } + + // deserialize ObservabilityPipelineSumoLogicSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Long.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Float.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Double.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) + || ObservabilityPipelineSumoLogicSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSumoLogicSource.class.equals(Integer.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSumoLogicSource.class.equals(Float.class) + || ObservabilityPipelineSumoLogicSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSumoLogicSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSumoLogicSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSumoLogicSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSumoLogicSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSumoLogicSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSumoLogicSource'", + e); + } + + // deserialize ObservabilityPipelineRsyslogSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineRsyslogSource.class.equals(Integer.class) + || ObservabilityPipelineRsyslogSource.class.equals(Long.class) + || ObservabilityPipelineRsyslogSource.class.equals(Float.class) + || ObservabilityPipelineRsyslogSource.class.equals(Double.class) + || ObservabilityPipelineRsyslogSource.class.equals(Boolean.class) + || ObservabilityPipelineRsyslogSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineRsyslogSource.class.equals(Integer.class) + || ObservabilityPipelineRsyslogSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineRsyslogSource.class.equals(Float.class) + || ObservabilityPipelineRsyslogSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineRsyslogSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineRsyslogSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineRsyslogSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineRsyslogSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineRsyslogSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineRsyslogSource'", + e); + } + + // deserialize ObservabilityPipelineSyslogNgSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Long.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Float.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Double.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) + || ObservabilityPipelineSyslogNgSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSyslogNgSource.class.equals(Integer.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSyslogNgSource.class.equals(Float.class) + || ObservabilityPipelineSyslogNgSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSyslogNgSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSyslogNgSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSyslogNgSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSyslogNgSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSyslogNgSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSyslogNgSource'", + e); + } + + // deserialize ObservabilityPipelineAmazonDataFirehoseSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Integer.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Float.class) + || ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonDataFirehoseSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonDataFirehoseSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonDataFirehoseSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineAmazonDataFirehoseSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonDataFirehoseSource'", + e); + } + + // deserialize ObservabilityPipelineGooglePubSubSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGooglePubSubSource.class.equals(Integer.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGooglePubSubSource.class.equals(Float.class) + || ObservabilityPipelineGooglePubSubSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGooglePubSubSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGooglePubSubSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGooglePubSubSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGooglePubSubSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineGooglePubSubSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGooglePubSubSource'", + e); + } + + // deserialize ObservabilityPipelineHttpClientSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineHttpClientSource.class.equals(Integer.class) + || ObservabilityPipelineHttpClientSource.class.equals(Long.class) + || ObservabilityPipelineHttpClientSource.class.equals(Float.class) + || ObservabilityPipelineHttpClientSource.class.equals(Double.class) + || ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) + || ObservabilityPipelineHttpClientSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineHttpClientSource.class.equals(Integer.class) + || ObservabilityPipelineHttpClientSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineHttpClientSource.class.equals(Float.class) + || ObservabilityPipelineHttpClientSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineHttpClientSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineHttpClientSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineHttpClientSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineHttpClientSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineHttpClientSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineHttpClientSource'", + e); + } + + // deserialize ObservabilityPipelineLogstashSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineLogstashSource.class.equals(Integer.class) + || ObservabilityPipelineLogstashSource.class.equals(Long.class) + || ObservabilityPipelineLogstashSource.class.equals(Float.class) + || ObservabilityPipelineLogstashSource.class.equals(Double.class) + || ObservabilityPipelineLogstashSource.class.equals(Boolean.class) + || ObservabilityPipelineLogstashSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineLogstashSource.class.equals(Integer.class) + || ObservabilityPipelineLogstashSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineLogstashSource.class.equals(Float.class) + || ObservabilityPipelineLogstashSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineLogstashSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineLogstashSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineLogstashSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineLogstashSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineLogstashSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineLogstashSource'", + e); + } + ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem(); if (match == 1) { ret.setActualInstance(deserialized); @@ -226,12 +863,116 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSo setActualInstance(o); } + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentdSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineFluentBitSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpServerSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSumoLogicSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineRsyslogSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSyslogNgSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonDataFirehoseSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineGooglePubSubSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineHttpClientSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineLogstashSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineKafkaSource", new GenericType() {}); schemas.put( "ObservabilityPipelineDatadogAgentSource", new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkTcpSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkHecSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonS3Source", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineFluentdSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineFluentBitSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineHttpServerSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSumoLogicSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineRsyslogSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSyslogNgSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonDataFirehoseSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGooglePubSubSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineHttpClientSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineLogstashSource", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas)); } @@ -244,7 +985,13 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid * against the oneOf child schemas: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, + * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -261,6 +1008,71 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineFluentdSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineFluentBitSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineHttpServerSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSumoLogicSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineRsyslogSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSyslogNgSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonDataFirehoseSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGooglePubSubSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineHttpClientSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineLogstashSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -268,15 +1080,34 @@ public void setActualInstance(Object instance) { } throw new RuntimeException( "Invalid instance type. Must be ObservabilityPipelineKafkaSource," - + " ObservabilityPipelineDatadogAgentSource"); + + " ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource," + + " ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source," + + " ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource," + + " ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource," + + " ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource," + + " ObservabilityPipelineAmazonDataFirehoseSource," + + " ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource," + + " ObservabilityPipelineLogstashSource"); } /** * Get the actual instance, which can be the following: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, + * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource * * @return The actual instance (ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource) + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source, + * ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, + * ObservabilityPipelineHttpServerSource, ObservabilityPipelineSumoLogicSource, + * ObservabilityPipelineRsyslogSource, ObservabilityPipelineSyslogNgSource, + * ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineGooglePubSubSource, + * ObservabilityPipelineHttpClientSource, ObservabilityPipelineLogstashSource) */ @Override public Object getActualInstance() { @@ -306,4 +1137,162 @@ public ObservabilityPipelineDatadogAgentSource getObservabilityPipelineDatadogAg throws ClassCastException { return (ObservabilityPipelineDatadogAgentSource) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is + * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkTcpSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource` + */ + public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource() + throws ClassCastException { + return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is + * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkHecSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource` + */ + public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonS3Source`. If the actual instance is not + * `ObservabilityPipelineAmazonS3Source`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonS3Source` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAmazonS3Source` + */ + public ObservabilityPipelineAmazonS3Source getObservabilityPipelineAmazonS3Source() + throws ClassCastException { + return (ObservabilityPipelineAmazonS3Source) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineFluentdSource`. If the actual instance is not + * `ObservabilityPipelineFluentdSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFluentdSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentdSource` + */ + public ObservabilityPipelineFluentdSource getObservabilityPipelineFluentdSource() + throws ClassCastException { + return (ObservabilityPipelineFluentdSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineFluentBitSource`. If the actual instance is + * not `ObservabilityPipelineFluentBitSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineFluentBitSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineFluentBitSource` + */ + public ObservabilityPipelineFluentBitSource getObservabilityPipelineFluentBitSource() + throws ClassCastException { + return (ObservabilityPipelineFluentBitSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineHttpServerSource`. If the actual instance is + * not `ObservabilityPipelineHttpServerSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineHttpServerSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpServerSource` + */ + public ObservabilityPipelineHttpServerSource getObservabilityPipelineHttpServerSource() + throws ClassCastException { + return (ObservabilityPipelineHttpServerSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSumoLogicSource`. If the actual instance is + * not `ObservabilityPipelineSumoLogicSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSumoLogicSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSumoLogicSource` + */ + public ObservabilityPipelineSumoLogicSource getObservabilityPipelineSumoLogicSource() + throws ClassCastException { + return (ObservabilityPipelineSumoLogicSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineRsyslogSource`. If the actual instance is not + * `ObservabilityPipelineRsyslogSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineRsyslogSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineRsyslogSource` + */ + public ObservabilityPipelineRsyslogSource getObservabilityPipelineRsyslogSource() + throws ClassCastException { + return (ObservabilityPipelineRsyslogSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSyslogNgSource`. If the actual instance is not + * `ObservabilityPipelineSyslogNgSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSyslogNgSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSyslogNgSource` + */ + public ObservabilityPipelineSyslogNgSource getObservabilityPipelineSyslogNgSource() + throws ClassCastException { + return (ObservabilityPipelineSyslogNgSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonDataFirehoseSource`. If the actual + * instance is not `ObservabilityPipelineAmazonDataFirehoseSource`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonDataFirehoseSource` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineAmazonDataFirehoseSource` + */ + public ObservabilityPipelineAmazonDataFirehoseSource + getObservabilityPipelineAmazonDataFirehoseSource() throws ClassCastException { + return (ObservabilityPipelineAmazonDataFirehoseSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGooglePubSubSource`. If the actual instance is + * not `ObservabilityPipelineGooglePubSubSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGooglePubSubSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineGooglePubSubSource` + */ + public ObservabilityPipelineGooglePubSubSource getObservabilityPipelineGooglePubSubSource() + throws ClassCastException { + return (ObservabilityPipelineGooglePubSubSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineHttpClientSource`. If the actual instance is + * not `ObservabilityPipelineHttpClientSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineHttpClientSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineHttpClientSource` + */ + public ObservabilityPipelineHttpClientSource getObservabilityPipelineHttpClientSource() + throws ClassCastException { + return (ObservabilityPipelineHttpClientSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineLogstashSource`. If the actual instance is not + * `ObservabilityPipelineLogstashSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineLogstashSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineLogstashSource` + */ + public ObservabilityPipelineLogstashSource getObservabilityPipelineLogstashSource() + throws ClassCastException { + return (ObservabilityPipelineLogstashSource) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java index ba5b215bf5e..f7a44ea620b 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDatadogAgentSource.java @@ -77,7 +77,7 @@ public ObservabilityPipelineDatadogAgentSource tls(ObservabilityPipelineTls tls) } /** - * Configuration for enabling TLS encryption. + * Configuration for enabling TLS encryption between the pipeline component and external services. * * @return tls */ diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDecoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDecoding.java new file mode 100644 index 00000000000..6798d02035c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDecoding.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The decoding format used to interpret incoming logs. */ +@JsonSerialize(using = ObservabilityPipelineDecoding.ObservabilityPipelineDecodingSerializer.class) +public class ObservabilityPipelineDecoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("bytes", "gelf", "json", "syslog")); + + public static final ObservabilityPipelineDecoding DECODE_BYTES = + new ObservabilityPipelineDecoding("bytes"); + public static final ObservabilityPipelineDecoding DECODE_GELF = + new ObservabilityPipelineDecoding("gelf"); + public static final ObservabilityPipelineDecoding DECODE_JSON = + new ObservabilityPipelineDecoding("json"); + public static final ObservabilityPipelineDecoding DECODE_SYSLOG = + new ObservabilityPipelineDecoding("syslog"); + + ObservabilityPipelineDecoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDecodingSerializer + extends StdSerializer { + public ObservabilityPipelineDecodingSerializer(Class t) { + super(t); + } + + public ObservabilityPipelineDecodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDecoding value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDecoding fromValue(String value) { + return new ObservabilityPipelineDecoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java new file mode 100644 index 00000000000..68d0ca28d10 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessor.java @@ -0,0 +1,312 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The dedupe processor removes duplicate fields in log events. */ +@JsonPropertyOrder({ + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_FIELDS, + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_MODE, + ObservabilityPipelineDedupeProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineDedupeProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_MODE = "mode"; + private ObservabilityPipelineDedupeProcessorMode mode; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineDedupeProcessorType type = + ObservabilityPipelineDedupeProcessorType.DEDUPE; + + public ObservabilityPipelineDedupeProcessor() {} + + @JsonCreator + public ObservabilityPipelineDedupeProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) List fields, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_MODE) + ObservabilityPipelineDedupeProcessorMode mode, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineDedupeProcessorType type) { + this.fields = fields; + this.id = id; + this.include = include; + this.inputs = inputs; + this.mode = mode; + this.unparsed |= !mode.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineDedupeProcessor fields(List fields) { + this.fields = fields; + return this; + } + + public ObservabilityPipelineDedupeProcessor addFieldsItem(String fieldsItem) { + this.fields.add(fieldsItem); + return this; + } + + /** + * A list of log field paths to check for duplicates. + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + public ObservabilityPipelineDedupeProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this processor. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineDedupeProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineDedupeProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineDedupeProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineDedupeProcessor mode(ObservabilityPipelineDedupeProcessorMode mode) { + this.mode = mode; + this.unparsed |= !mode.isValid(); + return this; + } + + /** + * The deduplication mode to apply to the fields. + * + * @return mode + */ + @JsonProperty(JSON_PROPERTY_MODE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDedupeProcessorMode getMode() { + return mode; + } + + public void setMode(ObservabilityPipelineDedupeProcessorMode mode) { + if (!mode.isValid()) { + this.unparsed = true; + } + this.mode = mode; + } + + public ObservabilityPipelineDedupeProcessor type(ObservabilityPipelineDedupeProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be dedupe. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDedupeProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineDedupeProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineDedupeProcessor + */ + @JsonAnySetter + public ObservabilityPipelineDedupeProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineDedupeProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineDedupeProcessor observabilityPipelineDedupeProcessor = + (ObservabilityPipelineDedupeProcessor) o; + return Objects.equals(this.fields, observabilityPipelineDedupeProcessor.fields) + && Objects.equals(this.id, observabilityPipelineDedupeProcessor.id) + && Objects.equals(this.include, observabilityPipelineDedupeProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineDedupeProcessor.inputs) + && Objects.equals(this.mode, observabilityPipelineDedupeProcessor.mode) + && Objects.equals(this.type, observabilityPipelineDedupeProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineDedupeProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, id, include, inputs, mode, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineDedupeProcessor {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorMode.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorMode.java new file mode 100644 index 00000000000..143a7b50096 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorMode.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The deduplication mode to apply to the fields. */ +@JsonSerialize( + using = + ObservabilityPipelineDedupeProcessorMode.ObservabilityPipelineDedupeProcessorModeSerializer + .class) +public class ObservabilityPipelineDedupeProcessorMode extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("match", "ignore")); + + public static final ObservabilityPipelineDedupeProcessorMode MATCH = + new ObservabilityPipelineDedupeProcessorMode("match"); + public static final ObservabilityPipelineDedupeProcessorMode IGNORE = + new ObservabilityPipelineDedupeProcessorMode("ignore"); + + ObservabilityPipelineDedupeProcessorMode(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDedupeProcessorModeSerializer + extends StdSerializer { + public ObservabilityPipelineDedupeProcessorModeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineDedupeProcessorModeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDedupeProcessorMode value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDedupeProcessorMode fromValue(String value) { + return new ObservabilityPipelineDedupeProcessorMode(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorType.java new file mode 100644 index 00000000000..8b21579bdbc --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineDedupeProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be dedupe. */ +@JsonSerialize( + using = + ObservabilityPipelineDedupeProcessorType.ObservabilityPipelineDedupeProcessorTypeSerializer + .class) +public class ObservabilityPipelineDedupeProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("dedupe")); + + public static final ObservabilityPipelineDedupeProcessorType DEDUPE = + new ObservabilityPipelineDedupeProcessorType("dedupe"); + + ObservabilityPipelineDedupeProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineDedupeProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineDedupeProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineDedupeProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineDedupeProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineDedupeProcessorType fromValue(String value) { + return new ObservabilityPipelineDedupeProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java new file mode 100644 index 00000000000..c414b55e50e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestination.java @@ -0,0 +1,279 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The elasticsearch destination writes logs to an Elasticsearch cluster. */ +@JsonPropertyOrder({ + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_API_VERSION, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_BULK_INDEX, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_ID, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineElasticsearchDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineElasticsearchDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_API_VERSION = "api_version"; + private ObservabilityPipelineElasticsearchDestinationApiVersion apiVersion; + + public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index"; + private String bulkIndex; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineElasticsearchDestinationType type = + ObservabilityPipelineElasticsearchDestinationType.ELASTICSEARCH; + + public ObservabilityPipelineElasticsearchDestination() {} + + @JsonCreator + public ObservabilityPipelineElasticsearchDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineElasticsearchDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineElasticsearchDestination apiVersion( + ObservabilityPipelineElasticsearchDestinationApiVersion apiVersion) { + this.apiVersion = apiVersion; + this.unparsed |= !apiVersion.isValid(); + return this; + } + + /** + * The Elasticsearch API version to use. Set to auto to auto-detect. + * + * @return apiVersion + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_API_VERSION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineElasticsearchDestinationApiVersion getApiVersion() { + return apiVersion; + } + + public void setApiVersion(ObservabilityPipelineElasticsearchDestinationApiVersion apiVersion) { + if (!apiVersion.isValid()) { + this.unparsed = true; + } + this.apiVersion = apiVersion; + } + + public ObservabilityPipelineElasticsearchDestination bulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + return this; + } + + /** + * The index to write logs to in Elasticsearch. + * + * @return bulkIndex + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_BULK_INDEX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getBulkIndex() { + return bulkIndex; + } + + public void setBulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + } + + public ObservabilityPipelineElasticsearchDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineElasticsearchDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineElasticsearchDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineElasticsearchDestination type( + ObservabilityPipelineElasticsearchDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be elasticsearch. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineElasticsearchDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineElasticsearchDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineElasticsearchDestination + */ + @JsonAnySetter + public ObservabilityPipelineElasticsearchDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineElasticsearchDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineElasticsearchDestination observabilityPipelineElasticsearchDestination = + (ObservabilityPipelineElasticsearchDestination) o; + return Objects.equals(this.apiVersion, observabilityPipelineElasticsearchDestination.apiVersion) + && Objects.equals(this.bulkIndex, observabilityPipelineElasticsearchDestination.bulkIndex) + && Objects.equals(this.id, observabilityPipelineElasticsearchDestination.id) + && Objects.equals(this.inputs, observabilityPipelineElasticsearchDestination.inputs) + && Objects.equals(this.type, observabilityPipelineElasticsearchDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineElasticsearchDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(apiVersion, bulkIndex, id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineElasticsearchDestination {\n"); + sb.append(" apiVersion: ").append(toIndentedString(apiVersion)).append("\n"); + sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationApiVersion.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationApiVersion.java new file mode 100644 index 00000000000..fd5e2cdec87 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationApiVersion.java @@ -0,0 +1,69 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The Elasticsearch API version to use. Set to auto to auto-detect. */ +@JsonSerialize( + using = + ObservabilityPipelineElasticsearchDestinationApiVersion + .ObservabilityPipelineElasticsearchDestinationApiVersionSerializer.class) +public class ObservabilityPipelineElasticsearchDestinationApiVersion extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("auto", "v6", "v7", "v8")); + + public static final ObservabilityPipelineElasticsearchDestinationApiVersion AUTO = + new ObservabilityPipelineElasticsearchDestinationApiVersion("auto"); + public static final ObservabilityPipelineElasticsearchDestinationApiVersion V6 = + new ObservabilityPipelineElasticsearchDestinationApiVersion("v6"); + public static final ObservabilityPipelineElasticsearchDestinationApiVersion V7 = + new ObservabilityPipelineElasticsearchDestinationApiVersion("v7"); + public static final ObservabilityPipelineElasticsearchDestinationApiVersion V8 = + new ObservabilityPipelineElasticsearchDestinationApiVersion("v8"); + + ObservabilityPipelineElasticsearchDestinationApiVersion(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineElasticsearchDestinationApiVersionSerializer + extends StdSerializer { + public ObservabilityPipelineElasticsearchDestinationApiVersionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineElasticsearchDestinationApiVersionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineElasticsearchDestinationApiVersion value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineElasticsearchDestinationApiVersion fromValue(String value) { + return new ObservabilityPipelineElasticsearchDestinationApiVersion(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationType.java new file mode 100644 index 00000000000..7d9436e73e2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineElasticsearchDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be elasticsearch. */ +@JsonSerialize( + using = + ObservabilityPipelineElasticsearchDestinationType + .ObservabilityPipelineElasticsearchDestinationTypeSerializer.class) +public class ObservabilityPipelineElasticsearchDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("elasticsearch")); + + public static final ObservabilityPipelineElasticsearchDestinationType ELASTICSEARCH = + new ObservabilityPipelineElasticsearchDestinationType("elasticsearch"); + + ObservabilityPipelineElasticsearchDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineElasticsearchDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineElasticsearchDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineElasticsearchDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineElasticsearchDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineElasticsearchDestinationType fromValue(String value) { + return new ObservabilityPipelineElasticsearchDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFile.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFile.java new file mode 100644 index 00000000000..a4fce322939 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFile.java @@ -0,0 +1,262 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Defines a static enrichment table loaded from a CSV file. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableFile.JSON_PROPERTY_ENCODING, + ObservabilityPipelineEnrichmentTableFile.JSON_PROPERTY_KEY, + ObservabilityPipelineEnrichmentTableFile.JSON_PROPERTY_PATH, + ObservabilityPipelineEnrichmentTableFile.JSON_PROPERTY_SCHEMA +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableFile { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineEnrichmentTableFileEncoding encoding; + + public static final String JSON_PROPERTY_KEY = "key"; + private List key = new ArrayList<>(); + + public static final String JSON_PROPERTY_PATH = "path"; + private String path; + + public static final String JSON_PROPERTY_SCHEMA = "schema"; + private List schema = new ArrayList<>(); + + public ObservabilityPipelineEnrichmentTableFile() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableFile( + @JsonProperty(required = true, value = JSON_PROPERTY_ENCODING) + ObservabilityPipelineEnrichmentTableFileEncoding encoding, + @JsonProperty(required = true, value = JSON_PROPERTY_KEY) + List key, + @JsonProperty(required = true, value = JSON_PROPERTY_PATH) String path, + @JsonProperty(required = true, value = JSON_PROPERTY_SCHEMA) + List schema) { + this.encoding = encoding; + this.unparsed |= encoding.unparsed; + this.key = key; + this.path = path; + this.schema = schema; + } + + public ObservabilityPipelineEnrichmentTableFile encoding( + ObservabilityPipelineEnrichmentTableFileEncoding encoding) { + this.encoding = encoding; + this.unparsed |= encoding.unparsed; + return this; + } + + /** + * File encoding format. + * + * @return encoding + */ + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineEnrichmentTableFileEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineEnrichmentTableFileEncoding encoding) { + this.encoding = encoding; + } + + public ObservabilityPipelineEnrichmentTableFile key( + List key) { + this.key = key; + for (ObservabilityPipelineEnrichmentTableFileKeyItems item : key) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineEnrichmentTableFile addKeyItem( + ObservabilityPipelineEnrichmentTableFileKeyItems keyItem) { + this.key.add(keyItem); + this.unparsed |= keyItem.unparsed; + return this; + } + + /** + * Key fields used to look up enrichment values. + * + * @return key + */ + @JsonProperty(JSON_PROPERTY_KEY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getKey() { + return key; + } + + public void setKey(List key) { + this.key = key; + } + + public ObservabilityPipelineEnrichmentTableFile path(String path) { + this.path = path; + return this; + } + + /** + * Path to the CSV file. + * + * @return path + */ + @JsonProperty(JSON_PROPERTY_PATH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public ObservabilityPipelineEnrichmentTableFile schema( + List schema) { + this.schema = schema; + for (ObservabilityPipelineEnrichmentTableFileSchemaItems item : schema) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineEnrichmentTableFile addSchemaItem( + ObservabilityPipelineEnrichmentTableFileSchemaItems schemaItem) { + this.schema.add(schemaItem); + this.unparsed |= schemaItem.unparsed; + return this; + } + + /** + * Schema defining column names and their types. + * + * @return schema + */ + @JsonProperty(JSON_PROPERTY_SCHEMA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getSchema() { + return schema; + } + + public void setSchema(List schema) { + this.schema = schema; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableFile + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableFile putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineEnrichmentTableFile object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableFile observabilityPipelineEnrichmentTableFile = + (ObservabilityPipelineEnrichmentTableFile) o; + return Objects.equals(this.encoding, observabilityPipelineEnrichmentTableFile.encoding) + && Objects.equals(this.key, observabilityPipelineEnrichmentTableFile.key) + && Objects.equals(this.path, observabilityPipelineEnrichmentTableFile.path) + && Objects.equals(this.schema, observabilityPipelineEnrichmentTableFile.schema) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableFile.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(encoding, key, path, schema, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableFile {\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" key: ").append(toIndentedString(key)).append("\n"); + sb.append(" path: ").append(toIndentedString(path)).append("\n"); + sb.append(" schema: ").append(toIndentedString(schema)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncoding.java new file mode 100644 index 00000000000..35bac18fca1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncoding.java @@ -0,0 +1,216 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** File encoding format. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableFileEncoding.JSON_PROPERTY_DELIMITER, + ObservabilityPipelineEnrichmentTableFileEncoding.JSON_PROPERTY_INCLUDES_HEADERS, + ObservabilityPipelineEnrichmentTableFileEncoding.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableFileEncoding { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DELIMITER = "delimiter"; + private String delimiter; + + public static final String JSON_PROPERTY_INCLUDES_HEADERS = "includes_headers"; + private Boolean includesHeaders; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineEnrichmentTableFileEncodingType type; + + public ObservabilityPipelineEnrichmentTableFileEncoding() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableFileEncoding( + @JsonProperty(required = true, value = JSON_PROPERTY_DELIMITER) String delimiter, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDES_HEADERS) + Boolean includesHeaders, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineEnrichmentTableFileEncodingType type) { + this.delimiter = delimiter; + this.includesHeaders = includesHeaders; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineEnrichmentTableFileEncoding delimiter(String delimiter) { + this.delimiter = delimiter; + return this; + } + + /** + * The encoding delimiter. + * + * @return delimiter + */ + @JsonProperty(JSON_PROPERTY_DELIMITER) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getDelimiter() { + return delimiter; + } + + public void setDelimiter(String delimiter) { + this.delimiter = delimiter; + } + + public ObservabilityPipelineEnrichmentTableFileEncoding includesHeaders(Boolean includesHeaders) { + this.includesHeaders = includesHeaders; + return this; + } + + /** + * The encoding includes_headers. + * + * @return includesHeaders + */ + @JsonProperty(JSON_PROPERTY_INCLUDES_HEADERS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Boolean getIncludesHeaders() { + return includesHeaders; + } + + public void setIncludesHeaders(Boolean includesHeaders) { + this.includesHeaders = includesHeaders; + } + + public ObservabilityPipelineEnrichmentTableFileEncoding type( + ObservabilityPipelineEnrichmentTableFileEncodingType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * Specifies the encoding format (e.g., CSV) used for enrichment tables. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineEnrichmentTableFileEncodingType getType() { + return type; + } + + public void setType(ObservabilityPipelineEnrichmentTableFileEncodingType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableFileEncoding + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableFileEncoding putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineEnrichmentTableFileEncoding object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableFileEncoding + observabilityPipelineEnrichmentTableFileEncoding = + (ObservabilityPipelineEnrichmentTableFileEncoding) o; + return Objects.equals( + this.delimiter, observabilityPipelineEnrichmentTableFileEncoding.delimiter) + && Objects.equals( + this.includesHeaders, observabilityPipelineEnrichmentTableFileEncoding.includesHeaders) + && Objects.equals(this.type, observabilityPipelineEnrichmentTableFileEncoding.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableFileEncoding.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(delimiter, includesHeaders, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableFileEncoding {\n"); + sb.append(" delimiter: ").append(toIndentedString(delimiter)).append("\n"); + sb.append(" includesHeaders: ").append(toIndentedString(includesHeaders)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncodingType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncodingType.java new file mode 100644 index 00000000000..25170a0b642 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileEncodingType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Specifies the encoding format (e.g., CSV) used for enrichment tables. */ +@JsonSerialize( + using = + ObservabilityPipelineEnrichmentTableFileEncodingType + .ObservabilityPipelineEnrichmentTableFileEncodingTypeSerializer.class) +public class ObservabilityPipelineEnrichmentTableFileEncodingType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("csv")); + + public static final ObservabilityPipelineEnrichmentTableFileEncodingType CSV = + new ObservabilityPipelineEnrichmentTableFileEncodingType("csv"); + + ObservabilityPipelineEnrichmentTableFileEncodingType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineEnrichmentTableFileEncodingTypeSerializer + extends StdSerializer { + public ObservabilityPipelineEnrichmentTableFileEncodingTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineEnrichmentTableFileEncodingTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineEnrichmentTableFileEncodingType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineEnrichmentTableFileEncodingType fromValue(String value) { + return new ObservabilityPipelineEnrichmentTableFileEncodingType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItems.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItems.java new file mode 100644 index 00000000000..78879706b49 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItems.java @@ -0,0 +1,214 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines how to map log fields to enrichment table columns during lookups. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableFileKeyItems.JSON_PROPERTY_COLUMN, + ObservabilityPipelineEnrichmentTableFileKeyItems.JSON_PROPERTY_COMPARISON, + ObservabilityPipelineEnrichmentTableFileKeyItems.JSON_PROPERTY_FIELD +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableFileKeyItems { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COLUMN = "column"; + private String column; + + public static final String JSON_PROPERTY_COMPARISON = "comparison"; + private ObservabilityPipelineEnrichmentTableFileKeyItemsComparison comparison; + + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public ObservabilityPipelineEnrichmentTableFileKeyItems() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableFileKeyItems( + @JsonProperty(required = true, value = JSON_PROPERTY_COLUMN) String column, + @JsonProperty(required = true, value = JSON_PROPERTY_COMPARISON) + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison comparison, + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field) { + this.column = column; + this.comparison = comparison; + this.unparsed |= !comparison.isValid(); + this.field = field; + } + + public ObservabilityPipelineEnrichmentTableFileKeyItems column(String column) { + this.column = column; + return this; + } + + /** + * The items column. + * + * @return column + */ + @JsonProperty(JSON_PROPERTY_COLUMN) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getColumn() { + return column; + } + + public void setColumn(String column) { + this.column = column; + } + + public ObservabilityPipelineEnrichmentTableFileKeyItems comparison( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison comparison) { + this.comparison = comparison; + this.unparsed |= !comparison.isValid(); + return this; + } + + /** + * Defines how to compare key fields for enrichment table lookups. + * + * @return comparison + */ + @JsonProperty(JSON_PROPERTY_COMPARISON) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineEnrichmentTableFileKeyItemsComparison getComparison() { + return comparison; + } + + public void setComparison(ObservabilityPipelineEnrichmentTableFileKeyItemsComparison comparison) { + if (!comparison.isValid()) { + this.unparsed = true; + } + this.comparison = comparison; + } + + public ObservabilityPipelineEnrichmentTableFileKeyItems field(String field) { + this.field = field; + return this; + } + + /** + * The items field. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableFileKeyItems + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableFileKeyItems putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineEnrichmentTableFileKeyItems object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableFileKeyItems + observabilityPipelineEnrichmentTableFileKeyItems = + (ObservabilityPipelineEnrichmentTableFileKeyItems) o; + return Objects.equals(this.column, observabilityPipelineEnrichmentTableFileKeyItems.column) + && Objects.equals( + this.comparison, observabilityPipelineEnrichmentTableFileKeyItems.comparison) + && Objects.equals(this.field, observabilityPipelineEnrichmentTableFileKeyItems.field) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableFileKeyItems.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(column, comparison, field, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableFileKeyItems {\n"); + sb.append(" column: ").append(toIndentedString(column)).append("\n"); + sb.append(" comparison: ").append(toIndentedString(comparison)).append("\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItemsComparison.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItemsComparison.java new file mode 100644 index 00000000000..98ce3ece16e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileKeyItemsComparison.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Defines how to compare key fields for enrichment table lookups. */ +@JsonSerialize( + using = + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison + .ObservabilityPipelineEnrichmentTableFileKeyItemsComparisonSerializer.class) +public class ObservabilityPipelineEnrichmentTableFileKeyItemsComparison extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("equals")); + + public static final ObservabilityPipelineEnrichmentTableFileKeyItemsComparison EQUALS = + new ObservabilityPipelineEnrichmentTableFileKeyItemsComparison("equals"); + + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineEnrichmentTableFileKeyItemsComparisonSerializer + extends StdSerializer { + public ObservabilityPipelineEnrichmentTableFileKeyItemsComparisonSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineEnrichmentTableFileKeyItemsComparisonSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineEnrichmentTableFileKeyItemsComparison fromValue(String value) { + return new ObservabilityPipelineEnrichmentTableFileKeyItemsComparison(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItems.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItems.java new file mode 100644 index 00000000000..6fcc3b02938 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItems.java @@ -0,0 +1,187 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Describes a single column and its type in an enrichment table schema. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableFileSchemaItems.JSON_PROPERTY_COLUMN, + ObservabilityPipelineEnrichmentTableFileSchemaItems.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableFileSchemaItems { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_COLUMN = "column"; + private String column; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineEnrichmentTableFileSchemaItemsType type; + + public ObservabilityPipelineEnrichmentTableFileSchemaItems() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableFileSchemaItems( + @JsonProperty(required = true, value = JSON_PROPERTY_COLUMN) String column, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineEnrichmentTableFileSchemaItemsType type) { + this.column = column; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineEnrichmentTableFileSchemaItems column(String column) { + this.column = column; + return this; + } + + /** + * The items column. + * + * @return column + */ + @JsonProperty(JSON_PROPERTY_COLUMN) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getColumn() { + return column; + } + + public void setColumn(String column) { + this.column = column; + } + + public ObservabilityPipelineEnrichmentTableFileSchemaItems type( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * Declares allowed data types for enrichment table columns. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineEnrichmentTableFileSchemaItemsType getType() { + return type; + } + + public void setType(ObservabilityPipelineEnrichmentTableFileSchemaItemsType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableFileSchemaItems + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableFileSchemaItems putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineEnrichmentTableFileSchemaItems object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableFileSchemaItems + observabilityPipelineEnrichmentTableFileSchemaItems = + (ObservabilityPipelineEnrichmentTableFileSchemaItems) o; + return Objects.equals(this.column, observabilityPipelineEnrichmentTableFileSchemaItems.column) + && Objects.equals(this.type, observabilityPipelineEnrichmentTableFileSchemaItems.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableFileSchemaItems.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(column, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableFileSchemaItems {\n"); + sb.append(" column: ").append(toIndentedString(column)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItemsType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItemsType.java new file mode 100644 index 00000000000..f91779011f1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableFileSchemaItemsType.java @@ -0,0 +1,74 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Declares allowed data types for enrichment table columns. */ +@JsonSerialize( + using = + ObservabilityPipelineEnrichmentTableFileSchemaItemsType + .ObservabilityPipelineEnrichmentTableFileSchemaItemsTypeSerializer.class) +public class ObservabilityPipelineEnrichmentTableFileSchemaItemsType extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList("string", "boolean", "integer", "float", "date", "timestamp")); + + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType STRING = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("string"); + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType BOOLEAN = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("boolean"); + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType INTEGER = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("integer"); + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType FLOAT = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("float"); + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType DATE = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("date"); + public static final ObservabilityPipelineEnrichmentTableFileSchemaItemsType TIMESTAMP = + new ObservabilityPipelineEnrichmentTableFileSchemaItemsType("timestamp"); + + ObservabilityPipelineEnrichmentTableFileSchemaItemsType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineEnrichmentTableFileSchemaItemsTypeSerializer + extends StdSerializer { + public ObservabilityPipelineEnrichmentTableFileSchemaItemsTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineEnrichmentTableFileSchemaItemsTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineEnrichmentTableFileSchemaItemsType fromValue(String value) { + return new ObservabilityPipelineEnrichmentTableFileSchemaItemsType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableGeoIp.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableGeoIp.java new file mode 100644 index 00000000000..4be638275f1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableGeoIp.java @@ -0,0 +1,204 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Uses a GeoIP database to enrich logs based on an IP field. */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableGeoIp.JSON_PROPERTY_KEY_FIELD, + ObservabilityPipelineEnrichmentTableGeoIp.JSON_PROPERTY_LOCALE, + ObservabilityPipelineEnrichmentTableGeoIp.JSON_PROPERTY_PATH +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableGeoIp { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_KEY_FIELD = "key_field"; + private String keyField; + + public static final String JSON_PROPERTY_LOCALE = "locale"; + private String locale; + + public static final String JSON_PROPERTY_PATH = "path"; + private String path; + + public ObservabilityPipelineEnrichmentTableGeoIp() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableGeoIp( + @JsonProperty(required = true, value = JSON_PROPERTY_KEY_FIELD) String keyField, + @JsonProperty(required = true, value = JSON_PROPERTY_LOCALE) String locale, + @JsonProperty(required = true, value = JSON_PROPERTY_PATH) String path) { + this.keyField = keyField; + this.locale = locale; + this.path = path; + } + + public ObservabilityPipelineEnrichmentTableGeoIp keyField(String keyField) { + this.keyField = keyField; + return this; + } + + /** + * Path to the IP field in the log. + * + * @return keyField + */ + @JsonProperty(JSON_PROPERTY_KEY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getKeyField() { + return keyField; + } + + public void setKeyField(String keyField) { + this.keyField = keyField; + } + + public ObservabilityPipelineEnrichmentTableGeoIp locale(String locale) { + this.locale = locale; + return this; + } + + /** + * Locale used to resolve geographical names. + * + * @return locale + */ + @JsonProperty(JSON_PROPERTY_LOCALE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getLocale() { + return locale; + } + + public void setLocale(String locale) { + this.locale = locale; + } + + public ObservabilityPipelineEnrichmentTableGeoIp path(String path) { + this.path = path; + return this; + } + + /** + * Path to the GeoIP database file. + * + * @return path + */ + @JsonProperty(JSON_PROPERTY_PATH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableGeoIp + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableGeoIp putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineEnrichmentTableGeoIp object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableGeoIp observabilityPipelineEnrichmentTableGeoIp = + (ObservabilityPipelineEnrichmentTableGeoIp) o; + return Objects.equals(this.keyField, observabilityPipelineEnrichmentTableGeoIp.keyField) + && Objects.equals(this.locale, observabilityPipelineEnrichmentTableGeoIp.locale) + && Objects.equals(this.path, observabilityPipelineEnrichmentTableGeoIp.path) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableGeoIp.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(keyField, locale, path, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableGeoIp {\n"); + sb.append(" keyField: ").append(toIndentedString(keyField)).append("\n"); + sb.append(" locale: ").append(toIndentedString(locale)).append("\n"); + sb.append(" path: ").append(toIndentedString(path)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java new file mode 100644 index 00000000000..e4ad7c5a190 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessor.java @@ -0,0 +1,337 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The enrichment_table processor enriches logs using a static CSV file or GeoIP + * database. + */ +@JsonPropertyOrder({ + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_FILE, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_GEOIP, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TARGET, + ObservabilityPipelineEnrichmentTableProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineEnrichmentTableProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FILE = "file"; + private ObservabilityPipelineEnrichmentTableFile file; + + public static final String JSON_PROPERTY_GEOIP = "geoip"; + private ObservabilityPipelineEnrichmentTableGeoIp geoip; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TARGET = "target"; + private String target; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineEnrichmentTableProcessorType type = + ObservabilityPipelineEnrichmentTableProcessorType.ENRICHMENT_TABLE; + + public ObservabilityPipelineEnrichmentTableProcessor() {} + + @JsonCreator + public ObservabilityPipelineEnrichmentTableProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TARGET) String target, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineEnrichmentTableProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.target = target; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineEnrichmentTableProcessor file( + ObservabilityPipelineEnrichmentTableFile file) { + this.file = file; + this.unparsed |= file.unparsed; + return this; + } + + /** + * Defines a static enrichment table loaded from a CSV file. + * + * @return file + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_FILE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineEnrichmentTableFile getFile() { + return file; + } + + public void setFile(ObservabilityPipelineEnrichmentTableFile file) { + this.file = file; + } + + public ObservabilityPipelineEnrichmentTableProcessor geoip( + ObservabilityPipelineEnrichmentTableGeoIp geoip) { + this.geoip = geoip; + this.unparsed |= geoip.unparsed; + return this; + } + + /** + * Uses a GeoIP database to enrich logs based on an IP field. + * + * @return geoip + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GEOIP) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineEnrichmentTableGeoIp getGeoip() { + return geoip; + } + + public void setGeoip(ObservabilityPipelineEnrichmentTableGeoIp geoip) { + this.geoip = geoip; + } + + public ObservabilityPipelineEnrichmentTableProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this processor. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineEnrichmentTableProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineEnrichmentTableProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineEnrichmentTableProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineEnrichmentTableProcessor target(String target) { + this.target = target; + return this; + } + + /** + * Path where enrichment results should be stored in the log. + * + * @return target + */ + @JsonProperty(JSON_PROPERTY_TARGET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + public ObservabilityPipelineEnrichmentTableProcessor type( + ObservabilityPipelineEnrichmentTableProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be enrichment_table. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineEnrichmentTableProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineEnrichmentTableProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineEnrichmentTableProcessor + */ + @JsonAnySetter + public ObservabilityPipelineEnrichmentTableProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineEnrichmentTableProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineEnrichmentTableProcessor observabilityPipelineEnrichmentTableProcessor = + (ObservabilityPipelineEnrichmentTableProcessor) o; + return Objects.equals(this.file, observabilityPipelineEnrichmentTableProcessor.file) + && Objects.equals(this.geoip, observabilityPipelineEnrichmentTableProcessor.geoip) + && Objects.equals(this.id, observabilityPipelineEnrichmentTableProcessor.id) + && Objects.equals(this.include, observabilityPipelineEnrichmentTableProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineEnrichmentTableProcessor.inputs) + && Objects.equals(this.target, observabilityPipelineEnrichmentTableProcessor.target) + && Objects.equals(this.type, observabilityPipelineEnrichmentTableProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineEnrichmentTableProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(file, geoip, id, include, inputs, target, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineEnrichmentTableProcessor {\n"); + sb.append(" file: ").append(toIndentedString(file)).append("\n"); + sb.append(" geoip: ").append(toIndentedString(geoip)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" target: ").append(toIndentedString(target)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessorType.java new file mode 100644 index 00000000000..6951d54f610 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineEnrichmentTableProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be enrichment_table. */ +@JsonSerialize( + using = + ObservabilityPipelineEnrichmentTableProcessorType + .ObservabilityPipelineEnrichmentTableProcessorTypeSerializer.class) +public class ObservabilityPipelineEnrichmentTableProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("enrichment_table")); + + public static final ObservabilityPipelineEnrichmentTableProcessorType ENRICHMENT_TABLE = + new ObservabilityPipelineEnrichmentTableProcessorType("enrichment_table"); + + ObservabilityPipelineEnrichmentTableProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineEnrichmentTableProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineEnrichmentTableProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineEnrichmentTableProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineEnrichmentTableProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineEnrichmentTableProcessorType fromValue(String value) { + return new ObservabilityPipelineEnrichmentTableProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java new file mode 100644 index 00000000000..5ec1382b4be --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSource.java @@ -0,0 +1,211 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The fluent_bit source ingests logs from Fluent Bit. */ +@JsonPropertyOrder({ + ObservabilityPipelineFluentBitSource.JSON_PROPERTY_ID, + ObservabilityPipelineFluentBitSource.JSON_PROPERTY_TLS, + ObservabilityPipelineFluentBitSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineFluentBitSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineFluentBitSourceType type = + ObservabilityPipelineFluentBitSourceType.FLUENT_BIT; + + public ObservabilityPipelineFluentBitSource() {} + + @JsonCreator + public ObservabilityPipelineFluentBitSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineFluentBitSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineFluentBitSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineFluentBitSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineFluentBitSource type(ObservabilityPipelineFluentBitSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be fluent_bit. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineFluentBitSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineFluentBitSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineFluentBitSource + */ + @JsonAnySetter + public ObservabilityPipelineFluentBitSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineFluentBitSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineFluentBitSource observabilityPipelineFluentBitSource = + (ObservabilityPipelineFluentBitSource) o; + return Objects.equals(this.id, observabilityPipelineFluentBitSource.id) + && Objects.equals(this.tls, observabilityPipelineFluentBitSource.tls) + && Objects.equals(this.type, observabilityPipelineFluentBitSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineFluentBitSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineFluentBitSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSourceType.java new file mode 100644 index 00000000000..0c3b5d53154 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentBitSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be fluent_bit. */ +@JsonSerialize( + using = + ObservabilityPipelineFluentBitSourceType.ObservabilityPipelineFluentBitSourceTypeSerializer + .class) +public class ObservabilityPipelineFluentBitSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("fluent_bit")); + + public static final ObservabilityPipelineFluentBitSourceType FLUENT_BIT = + new ObservabilityPipelineFluentBitSourceType("fluent_bit"); + + ObservabilityPipelineFluentBitSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineFluentBitSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineFluentBitSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineFluentBitSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineFluentBitSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineFluentBitSourceType fromValue(String value) { + return new ObservabilityPipelineFluentBitSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java new file mode 100644 index 00000000000..16111fa18ac --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSource.java @@ -0,0 +1,211 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The fluentd source ingests logs from a Fluentd-compatible service. */ +@JsonPropertyOrder({ + ObservabilityPipelineFluentdSource.JSON_PROPERTY_ID, + ObservabilityPipelineFluentdSource.JSON_PROPERTY_TLS, + ObservabilityPipelineFluentdSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineFluentdSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineFluentdSourceType type = + ObservabilityPipelineFluentdSourceType.FLUENTD; + + public ObservabilityPipelineFluentdSource() {} + + @JsonCreator + public ObservabilityPipelineFluentdSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineFluentdSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineFluentdSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineFluentdSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineFluentdSource type(ObservabilityPipelineFluentdSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be `fluentd. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineFluentdSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineFluentdSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineFluentdSource + */ + @JsonAnySetter + public ObservabilityPipelineFluentdSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineFluentdSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineFluentdSource observabilityPipelineFluentdSource = + (ObservabilityPipelineFluentdSource) o; + return Objects.equals(this.id, observabilityPipelineFluentdSource.id) + && Objects.equals(this.tls, observabilityPipelineFluentdSource.tls) + && Objects.equals(this.type, observabilityPipelineFluentdSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineFluentdSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineFluentdSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSourceType.java new file mode 100644 index 00000000000..c2f70b47e55 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineFluentdSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be `fluentd. */ +@JsonSerialize( + using = + ObservabilityPipelineFluentdSourceType.ObservabilityPipelineFluentdSourceTypeSerializer + .class) +public class ObservabilityPipelineFluentdSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("fluentd")); + + public static final ObservabilityPipelineFluentdSourceType FLUENTD = + new ObservabilityPipelineFluentdSourceType("fluentd"); + + ObservabilityPipelineFluentdSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineFluentdSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineFluentdSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineFluentdSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineFluentdSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineFluentdSourceType fromValue(String value) { + return new ObservabilityPipelineFluentdSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java new file mode 100644 index 00000000000..6e65889c195 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java @@ -0,0 +1,145 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** GCP credentials used to authenticate with Google Cloud Storage. */ +@JsonPropertyOrder({ObservabilityPipelineGcpAuth.JSON_PROPERTY_CREDENTIALS_FILE}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGcpAuth { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CREDENTIALS_FILE = "credentials_file"; + private String credentialsFile; + + public ObservabilityPipelineGcpAuth() {} + + @JsonCreator + public ObservabilityPipelineGcpAuth( + @JsonProperty(required = true, value = JSON_PROPERTY_CREDENTIALS_FILE) + String credentialsFile) { + this.credentialsFile = credentialsFile; + } + + public ObservabilityPipelineGcpAuth credentialsFile(String credentialsFile) { + this.credentialsFile = credentialsFile; + return this; + } + + /** + * Path to the GCP service account key file. + * + * @return credentialsFile + */ + @JsonProperty(JSON_PROPERTY_CREDENTIALS_FILE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getCredentialsFile() { + return credentialsFile; + } + + public void setCredentialsFile(String credentialsFile) { + this.credentialsFile = credentialsFile; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGcpAuth + */ + @JsonAnySetter + public ObservabilityPipelineGcpAuth putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGcpAuth object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGcpAuth observabilityPipelineGcpAuth = (ObservabilityPipelineGcpAuth) o; + return Objects.equals(this.credentialsFile, observabilityPipelineGcpAuth.credentialsFile) + && Objects.equals( + this.additionalProperties, observabilityPipelineGcpAuth.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(credentialsFile, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGcpAuth {\n"); + sb.append(" credentialsFile: ").append(toIndentedString(credentialsFile)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java new file mode 100644 index 00000000000..9a17ba776eb --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java @@ -0,0 +1,293 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The generate_datadog_metrics processor creates custom metrics from logs and sends + * them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log + * fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_METRICS, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGenerateMetricsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_METRICS = "metrics"; + private List metrics = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGenerateMetricsProcessorType type = + ObservabilityPipelineGenerateMetricsProcessorType.GENERATE_DATADOG_METRICS; + + public ObservabilityPipelineGenerateMetricsProcessor() {} + + @JsonCreator + public ObservabilityPipelineGenerateMetricsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_METRICS) + List metrics, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGenerateMetricsProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.metrics = metrics; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGenerateMetricsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGenerateMetricsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineGenerateMetricsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineGenerateMetricsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineGenerateMetricsProcessor metrics( + List metrics) { + this.metrics = metrics; + for (ObservabilityPipelineGeneratedMetric item : metrics) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineGenerateMetricsProcessor addMetricsItem( + ObservabilityPipelineGeneratedMetric metricsItem) { + this.metrics.add(metricsItem); + this.unparsed |= metricsItem.unparsed; + return this; + } + + /** + * Configuration for generating individual metrics. + * + * @return metrics + */ + @JsonProperty(JSON_PROPERTY_METRICS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMetrics() { + return metrics; + } + + public void setMetrics(List metrics) { + this.metrics = metrics; + } + + public ObservabilityPipelineGenerateMetricsProcessor type( + ObservabilityPipelineGenerateMetricsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. Always generate_datadog_metrics. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGenerateMetricsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineGenerateMetricsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGenerateMetricsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineGenerateMetricsProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGenerateMetricsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGenerateMetricsProcessor observabilityPipelineGenerateMetricsProcessor = + (ObservabilityPipelineGenerateMetricsProcessor) o; + return Objects.equals(this.id, observabilityPipelineGenerateMetricsProcessor.id) + && Objects.equals(this.include, observabilityPipelineGenerateMetricsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineGenerateMetricsProcessor.inputs) + && Objects.equals(this.metrics, observabilityPipelineGenerateMetricsProcessor.metrics) + && Objects.equals(this.type, observabilityPipelineGenerateMetricsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGenerateMetricsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, metrics, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGenerateMetricsProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" metrics: ").append(toIndentedString(metrics)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java new file mode 100644 index 00000000000..63639261867 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. Always generate_datadog_metrics. */ +@JsonSerialize( + using = + ObservabilityPipelineGenerateMetricsProcessorType + .ObservabilityPipelineGenerateMetricsProcessorTypeSerializer.class) +public class ObservabilityPipelineGenerateMetricsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("generate_datadog_metrics")); + + public static final ObservabilityPipelineGenerateMetricsProcessorType GENERATE_DATADOG_METRICS = + new ObservabilityPipelineGenerateMetricsProcessorType("generate_datadog_metrics"); + + ObservabilityPipelineGenerateMetricsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGenerateMetricsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGenerateMetricsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGenerateMetricsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGenerateMetricsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGenerateMetricsProcessorType fromValue(String value) { + return new ObservabilityPipelineGenerateMetricsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java new file mode 100644 index 00000000000..ccd6b71a7b7 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java @@ -0,0 +1,281 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Defines a log-based custom metric, including its name, type, filter, value computation strategy, + * and optional grouping fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_GROUP_BY, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_METRIC_TYPE, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_NAME, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetric { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = null; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_METRIC_TYPE = "metric_type"; + private ObservabilityPipelineGeneratedMetricMetricType metricType; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private ObservabilityPipelineMetricValue value; + + public ObservabilityPipelineGeneratedMetric() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetric( + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_METRIC_TYPE) + ObservabilityPipelineGeneratedMetricMetricType metricType, + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) + ObservabilityPipelineMetricValue value) { + this.include = include; + this.metricType = metricType; + this.unparsed |= !metricType.isValid(); + this.name = name; + this.value = value; + this.unparsed |= value.unparsed; + } + + public ObservabilityPipelineGeneratedMetric groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineGeneratedMetric addGroupByItem(String groupByItem) { + if (this.groupBy == null) { + this.groupBy = new ArrayList<>(); + } + this.groupBy.add(groupByItem); + return this; + } + + /** + * Optional fields used to group the metric series. + * + * @return groupBy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + + public ObservabilityPipelineGeneratedMetric include(String include) { + this.include = include; + return this; + } + + /** + * Datadog filter query to match logs for metric generation. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineGeneratedMetric metricType( + ObservabilityPipelineGeneratedMetricMetricType metricType) { + this.metricType = metricType; + this.unparsed |= !metricType.isValid(); + return this; + } + + /** + * Type of metric to create. + * + * @return metricType + */ + @JsonProperty(JSON_PROPERTY_METRIC_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricMetricType getMetricType() { + return metricType; + } + + public void setMetricType(ObservabilityPipelineGeneratedMetricMetricType metricType) { + if (!metricType.isValid()) { + this.unparsed = true; + } + this.metricType = metricType; + } + + public ObservabilityPipelineGeneratedMetric name(String name) { + this.name = name; + return this; + } + + /** + * Name of the custom metric to be created. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineGeneratedMetric value(ObservabilityPipelineMetricValue value) { + this.value = value; + this.unparsed |= value.unparsed; + return this; + } + + /** + * Specifies how the value of the generated metric is computed. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineMetricValue getValue() { + return value; + } + + public void setValue(ObservabilityPipelineMetricValue value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetric + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetric putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGeneratedMetric object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetric observabilityPipelineGeneratedMetric = + (ObservabilityPipelineGeneratedMetric) o; + return Objects.equals(this.groupBy, observabilityPipelineGeneratedMetric.groupBy) + && Objects.equals(this.include, observabilityPipelineGeneratedMetric.include) + && Objects.equals(this.metricType, observabilityPipelineGeneratedMetric.metricType) + && Objects.equals(this.name, observabilityPipelineGeneratedMetric.name) + && Objects.equals(this.value, observabilityPipelineGeneratedMetric.value) + && Objects.equals( + this.additionalProperties, observabilityPipelineGeneratedMetric.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(groupBy, include, metricType, name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetric {\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" metricType: ").append(toIndentedString(metricType)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java new file mode 100644 index 00000000000..a39670e82f2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java @@ -0,0 +1,188 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Strategy that increments a generated metric based on the value of a log field. */ +@JsonPropertyOrder({ + ObservabilityPipelineGeneratedMetricIncrementByField.JSON_PROPERTY_FIELD, + ObservabilityPipelineGeneratedMetricIncrementByField.JSON_PROPERTY_STRATEGY +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetricIncrementByField { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy; + + public ObservabilityPipelineGeneratedMetricIncrementByField() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetricIncrementByField( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + this.field = field; + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineGeneratedMetricIncrementByField field(String field) { + this.field = field; + return this; + } + + /** + * Name of the log field containing the numeric value to increment the metric by. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineGeneratedMetricIncrementByField strategy( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * Uses a numeric field in the log event as the metric increment. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetricIncrementByField + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetricIncrementByField putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGeneratedMetricIncrementByField object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetricIncrementByField + observabilityPipelineGeneratedMetricIncrementByField = + (ObservabilityPipelineGeneratedMetricIncrementByField) o; + return Objects.equals(this.field, observabilityPipelineGeneratedMetricIncrementByField.field) + && Objects.equals( + this.strategy, observabilityPipelineGeneratedMetricIncrementByField.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGeneratedMetricIncrementByField.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetricIncrementByField {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java new file mode 100644 index 00000000000..8b83fe24a38 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java @@ -0,0 +1,66 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Uses a numeric field in the log event as the metric increment. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + .ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer.class) +public class ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("increment_by_field")); + + public static final ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + INCREMENT_BY_FIELD = + new ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy("increment_by_field"); + + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy fromValue( + String value) { + return new ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java new file mode 100644 index 00000000000..e2b47649648 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java @@ -0,0 +1,158 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Strategy that increments a generated metric by one for each matching event. */ +@JsonPropertyOrder({ObservabilityPipelineGeneratedMetricIncrementByOne.JSON_PROPERTY_STRATEGY}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetricIncrementByOne { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy; + + public ObservabilityPipelineGeneratedMetricIncrementByOne() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetricIncrementByOne( + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineGeneratedMetricIncrementByOne strategy( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * Increments the metric by 1 for each matching event. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetricIncrementByOne + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetricIncrementByOne putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGeneratedMetricIncrementByOne object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetricIncrementByOne + observabilityPipelineGeneratedMetricIncrementByOne = + (ObservabilityPipelineGeneratedMetricIncrementByOne) o; + return Objects.equals( + this.strategy, observabilityPipelineGeneratedMetricIncrementByOne.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGeneratedMetricIncrementByOne.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetricIncrementByOne {\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java new file mode 100644 index 00000000000..1f1f47ed1ad --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Increments the metric by 1 for each matching event. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy + .ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer.class) +public class ObservabilityPipelineGeneratedMetricIncrementByOneStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("increment_by_one")); + + public static final ObservabilityPipelineGeneratedMetricIncrementByOneStrategy INCREMENT_BY_ONE = + new ObservabilityPipelineGeneratedMetricIncrementByOneStrategy("increment_by_one"); + + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricIncrementByOneStrategy fromValue(String value) { + return new ObservabilityPipelineGeneratedMetricIncrementByOneStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java new file mode 100644 index 00000000000..62b2d2e15f9 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Type of metric to create. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricMetricType + .ObservabilityPipelineGeneratedMetricMetricTypeSerializer.class) +public class ObservabilityPipelineGeneratedMetricMetricType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("count", "gauge", "distribution")); + + public static final ObservabilityPipelineGeneratedMetricMetricType COUNT = + new ObservabilityPipelineGeneratedMetricMetricType("count"); + public static final ObservabilityPipelineGeneratedMetricMetricType GAUGE = + new ObservabilityPipelineGeneratedMetricMetricType("gauge"); + public static final ObservabilityPipelineGeneratedMetricMetricType DISTRIBUTION = + new ObservabilityPipelineGeneratedMetricMetricType("distribution"); + + ObservabilityPipelineGeneratedMetricMetricType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricMetricTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricMetricTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricMetricTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricMetricType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricMetricType fromValue(String value) { + return new ObservabilityPipelineGeneratedMetricMetricType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java new file mode 100644 index 00000000000..59f67cbc635 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestination.java @@ -0,0 +1,340 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The google_chronicle destination sends logs to Google Chronicle. */ +@JsonPropertyOrder({ + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_AUTH, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_CUSTOMER_ID, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_ID, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_LOG_TYPE, + ObservabilityPipelineGoogleChronicleDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGoogleChronicleDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineGcpAuth auth; + + public static final String JSON_PROPERTY_CUSTOMER_ID = "customer_id"; + private String customerId; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineGoogleChronicleDestinationEncoding encoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_LOG_TYPE = "log_type"; + private String logType; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGoogleChronicleDestinationType type = + ObservabilityPipelineGoogleChronicleDestinationType.GOOGLE_CHRONICLE; + + public ObservabilityPipelineGoogleChronicleDestination() {} + + @JsonCreator + public ObservabilityPipelineGoogleChronicleDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH) ObservabilityPipelineGcpAuth auth, + @JsonProperty(required = true, value = JSON_PROPERTY_CUSTOMER_ID) String customerId, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGoogleChronicleDestinationType type) { + this.auth = auth; + this.unparsed |= auth.unparsed; + this.customerId = customerId; + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGoogleChronicleDestination auth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * GCP credentials used to authenticate with Google Cloud Storage. + * + * @return auth + */ + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGcpAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineGoogleChronicleDestination customerId(String customerId) { + this.customerId = customerId; + return this; + } + + /** + * The Google Chronicle customer ID. + * + * @return customerId + */ + @JsonProperty(JSON_PROPERTY_CUSTOMER_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getCustomerId() { + return customerId; + } + + public void setCustomerId(String customerId) { + this.customerId = customerId; + } + + public ObservabilityPipelineGoogleChronicleDestination encoding( + ObservabilityPipelineGoogleChronicleDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * The encoding format for the logs sent to Chronicle. + * + * @return encoding + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineGoogleChronicleDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineGoogleChronicleDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineGoogleChronicleDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGoogleChronicleDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineGoogleChronicleDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineGoogleChronicleDestination logType(String logType) { + this.logType = logType; + return this; + } + + /** + * The log type metadata associated with the Chronicle destination. + * + * @return logType + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_LOG_TYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getLogType() { + return logType; + } + + public void setLogType(String logType) { + this.logType = logType; + } + + public ObservabilityPipelineGoogleChronicleDestination type( + ObservabilityPipelineGoogleChronicleDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be google_chronicle. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleChronicleDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineGoogleChronicleDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGoogleChronicleDestination + */ + @JsonAnySetter + public ObservabilityPipelineGoogleChronicleDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGoogleChronicleDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGoogleChronicleDestination + observabilityPipelineGoogleChronicleDestination = + (ObservabilityPipelineGoogleChronicleDestination) o; + return Objects.equals(this.auth, observabilityPipelineGoogleChronicleDestination.auth) + && Objects.equals( + this.customerId, observabilityPipelineGoogleChronicleDestination.customerId) + && Objects.equals(this.encoding, observabilityPipelineGoogleChronicleDestination.encoding) + && Objects.equals(this.id, observabilityPipelineGoogleChronicleDestination.id) + && Objects.equals(this.inputs, observabilityPipelineGoogleChronicleDestination.inputs) + && Objects.equals(this.logType, observabilityPipelineGoogleChronicleDestination.logType) + && Objects.equals(this.type, observabilityPipelineGoogleChronicleDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGoogleChronicleDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + auth, customerId, encoding, id, inputs, logType, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGoogleChronicleDestination {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" customerId: ").append(toIndentedString(customerId)).append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" logType: ").append(toIndentedString(logType)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationEncoding.java new file mode 100644 index 00000000000..e5605fd0748 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The encoding format for the logs sent to Chronicle. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleChronicleDestinationEncoding + .ObservabilityPipelineGoogleChronicleDestinationEncodingSerializer.class) +public class ObservabilityPipelineGoogleChronicleDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineGoogleChronicleDestinationEncoding JSON = + new ObservabilityPipelineGoogleChronicleDestinationEncoding("json"); + public static final ObservabilityPipelineGoogleChronicleDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineGoogleChronicleDestinationEncoding("raw_message"); + + ObservabilityPipelineGoogleChronicleDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleChronicleDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleChronicleDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleChronicleDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleChronicleDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleChronicleDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineGoogleChronicleDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationType.java new file mode 100644 index 00000000000..870eea76225 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleChronicleDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be google_chronicle. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleChronicleDestinationType + .ObservabilityPipelineGoogleChronicleDestinationTypeSerializer.class) +public class ObservabilityPipelineGoogleChronicleDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("google_chronicle")); + + public static final ObservabilityPipelineGoogleChronicleDestinationType GOOGLE_CHRONICLE = + new ObservabilityPipelineGoogleChronicleDestinationType("google_chronicle"); + + ObservabilityPipelineGoogleChronicleDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleChronicleDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleChronicleDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleChronicleDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleChronicleDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleChronicleDestinationType fromValue(String value) { + return new ObservabilityPipelineGoogleChronicleDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java new file mode 100644 index 00000000000..3a92e0f1f5b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java @@ -0,0 +1,436 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) + * bucket. It requires a bucket name, GCP authentication, and metadata fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ACL, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_AUTH, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_BUCKET, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ID, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_KEY_PREFIX, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_METADATA, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_STORAGE_CLASS, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGoogleCloudStorageDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACL = "acl"; + private ObservabilityPipelineGoogleCloudStorageDestinationAcl acl; + + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineGcpAuth auth; + + public static final String JSON_PROPERTY_BUCKET = "bucket"; + private String bucket; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_PREFIX = "key_prefix"; + private String keyPrefix; + + public static final String JSON_PROPERTY_METADATA = "metadata"; + private List metadata = new ArrayList<>(); + + public static final String JSON_PROPERTY_STORAGE_CLASS = "storage_class"; + private ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGoogleCloudStorageDestinationType type = + ObservabilityPipelineGoogleCloudStorageDestinationType.GOOGLE_CLOUD_STORAGE; + + public ObservabilityPipelineGoogleCloudStorageDestination() {} + + @JsonCreator + public ObservabilityPipelineGoogleCloudStorageDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ACL) + ObservabilityPipelineGoogleCloudStorageDestinationAcl acl, + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH) ObservabilityPipelineGcpAuth auth, + @JsonProperty(required = true, value = JSON_PROPERTY_BUCKET) String bucket, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_METADATA) + List metadata, + @JsonProperty(required = true, value = JSON_PROPERTY_STORAGE_CLASS) + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGoogleCloudStorageDestinationType type) { + this.acl = acl; + this.unparsed |= !acl.isValid(); + this.auth = auth; + this.unparsed |= auth.unparsed; + this.bucket = bucket; + this.id = id; + this.inputs = inputs; + this.metadata = metadata; + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGoogleCloudStorageDestination acl( + ObservabilityPipelineGoogleCloudStorageDestinationAcl acl) { + this.acl = acl; + this.unparsed |= !acl.isValid(); + return this; + } + + /** + * Access control list setting for objects written to the bucket. + * + * @return acl + */ + @JsonProperty(JSON_PROPERTY_ACL) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationAcl getAcl() { + return acl; + } + + public void setAcl(ObservabilityPipelineGoogleCloudStorageDestinationAcl acl) { + if (!acl.isValid()) { + this.unparsed = true; + } + this.acl = acl; + } + + public ObservabilityPipelineGoogleCloudStorageDestination auth( + ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * GCP credentials used to authenticate with Google Cloud Storage. + * + * @return auth + */ + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGcpAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineGoogleCloudStorageDestination bucket(String bucket) { + this.bucket = bucket; + return this; + } + + /** + * Name of the GCS bucket. + * + * @return bucket + */ + @JsonProperty(JSON_PROPERTY_BUCKET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getBucket() { + return bucket; + } + + public void setBucket(String bucket) { + this.bucket = bucket; + } + + public ObservabilityPipelineGoogleCloudStorageDestination id(String id) { + this.id = id; + return this; + } + + /** + * Unique identifier for the destination component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGoogleCloudStorageDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineGoogleCloudStorageDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineGoogleCloudStorageDestination keyPrefix(String keyPrefix) { + this.keyPrefix = keyPrefix; + return this; + } + + /** + * Optional prefix for object keys within the GCS bucket. + * + * @return keyPrefix + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getKeyPrefix() { + return keyPrefix; + } + + public void setKeyPrefix(String keyPrefix) { + this.keyPrefix = keyPrefix; + } + + public ObservabilityPipelineGoogleCloudStorageDestination metadata( + List metadata) { + this.metadata = metadata; + for (ObservabilityPipelineMetadataEntry item : metadata) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineGoogleCloudStorageDestination addMetadataItem( + ObservabilityPipelineMetadataEntry metadataItem) { + this.metadata.add(metadataItem); + this.unparsed |= metadataItem.unparsed; + return this; + } + + /** + * Custom metadata key-value pairs added to each object. + * + * @return metadata + */ + @JsonProperty(JSON_PROPERTY_METADATA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMetadata() { + return metadata; + } + + public void setMetadata(List metadata) { + this.metadata = metadata; + } + + public ObservabilityPipelineGoogleCloudStorageDestination storageClass( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass) { + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + return this; + } + + /** + * Storage class used for objects stored in GCS. + * + * @return storageClass + */ + @JsonProperty(JSON_PROPERTY_STORAGE_CLASS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClass getStorageClass() { + return storageClass; + } + + public void setStorageClass( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass) { + if (!storageClass.isValid()) { + this.unparsed = true; + } + this.storageClass = storageClass; + } + + public ObservabilityPipelineGoogleCloudStorageDestination type( + ObservabilityPipelineGoogleCloudStorageDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always google_cloud_storage. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineGoogleCloudStorageDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGoogleCloudStorageDestination + */ + @JsonAnySetter + public ObservabilityPipelineGoogleCloudStorageDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGoogleCloudStorageDestination object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGoogleCloudStorageDestination + observabilityPipelineGoogleCloudStorageDestination = + (ObservabilityPipelineGoogleCloudStorageDestination) o; + return Objects.equals(this.acl, observabilityPipelineGoogleCloudStorageDestination.acl) + && Objects.equals(this.auth, observabilityPipelineGoogleCloudStorageDestination.auth) + && Objects.equals(this.bucket, observabilityPipelineGoogleCloudStorageDestination.bucket) + && Objects.equals(this.id, observabilityPipelineGoogleCloudStorageDestination.id) + && Objects.equals(this.inputs, observabilityPipelineGoogleCloudStorageDestination.inputs) + && Objects.equals( + this.keyPrefix, observabilityPipelineGoogleCloudStorageDestination.keyPrefix) + && Objects.equals( + this.metadata, observabilityPipelineGoogleCloudStorageDestination.metadata) + && Objects.equals( + this.storageClass, observabilityPipelineGoogleCloudStorageDestination.storageClass) + && Objects.equals(this.type, observabilityPipelineGoogleCloudStorageDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGoogleCloudStorageDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + acl, + auth, + bucket, + id, + inputs, + keyPrefix, + metadata, + storageClass, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGoogleCloudStorageDestination {\n"); + sb.append(" acl: ").append(toIndentedString(acl)).append("\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyPrefix: ").append(toIndentedString(keyPrefix)).append("\n"); + sb.append(" metadata: ").append(toIndentedString(metadata)).append("\n"); + sb.append(" storageClass: ").append(toIndentedString(storageClass)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java new file mode 100644 index 00000000000..21213127a0a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java @@ -0,0 +1,82 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Access control list setting for objects written to the bucket. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationAcl + .ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationAcl extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "private", + "project-private", + "public-read", + "authenticated-read", + "bucket-owner-read", + "bucket-owner-full-control")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PRIVATE = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("private"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PROJECTNOT_PRIVATE = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("project-private"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PUBLICNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("public-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl AUTHENTICATEDNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("authenticated-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl + BUCKETNOT_OWNERNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl + BUCKETNOT_OWNERNOT_FULLNOT_CONTROL = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-full-control"); + + ObservabilityPipelineGoogleCloudStorageDestinationAcl(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationAcl value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationAcl fromValue(String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationAcl(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java new file mode 100644 index 00000000000..eac7fa16f47 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Storage class used for objects stored in GCS. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass + .ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationStorageClass + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("STANDARD", "NEARLINE", "COLDLINE", "ARCHIVE")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass STANDARD = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("STANDARD"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass NEARLINE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("NEARLINE"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass COLDLINE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("COLDLINE"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass ARCHIVE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("ARCHIVE"); + + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationStorageClass fromValue( + String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java new file mode 100644 index 00000000000..85342e037af --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always google_cloud_storage. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationType + .ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("google_cloud_storage")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationType GOOGLE_CLOUD_STORAGE = + new ObservabilityPipelineGoogleCloudStorageDestinationType("google_cloud_storage"); + + ObservabilityPipelineGoogleCloudStorageDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationType fromValue(String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java new file mode 100644 index 00000000000..42ef94907a5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSource.java @@ -0,0 +1,333 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription. */ +@JsonPropertyOrder({ + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_AUTH, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_DECODING, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_ID, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_PROJECT, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_SUBSCRIPTION, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_TLS, + ObservabilityPipelineGooglePubSubSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGooglePubSubSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineGcpAuth auth; + + public static final String JSON_PROPERTY_DECODING = "decoding"; + private ObservabilityPipelineDecoding decoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_PROJECT = "project"; + private String project; + + public static final String JSON_PROPERTY_SUBSCRIPTION = "subscription"; + private String subscription; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGooglePubSubSourceType type = + ObservabilityPipelineGooglePubSubSourceType.GOOGLE_PUBSUB; + + public ObservabilityPipelineGooglePubSubSource() {} + + @JsonCreator + public ObservabilityPipelineGooglePubSubSource( + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH) ObservabilityPipelineGcpAuth auth, + @JsonProperty(required = true, value = JSON_PROPERTY_DECODING) + ObservabilityPipelineDecoding decoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_PROJECT) String project, + @JsonProperty(required = true, value = JSON_PROPERTY_SUBSCRIPTION) String subscription, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGooglePubSubSourceType type) { + this.auth = auth; + this.unparsed |= auth.unparsed; + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + this.id = id; + this.project = project; + this.subscription = subscription; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGooglePubSubSource auth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * GCP credentials used to authenticate with Google Cloud Storage. + * + * @return auth + */ + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGcpAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineGooglePubSubSource decoding(ObservabilityPipelineDecoding decoding) { + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + return this; + } + + /** + * The decoding format used to interpret incoming logs. + * + * @return decoding + */ + @JsonProperty(JSON_PROPERTY_DECODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDecoding getDecoding() { + return decoding; + } + + public void setDecoding(ObservabilityPipelineDecoding decoding) { + if (!decoding.isValid()) { + this.unparsed = true; + } + this.decoding = decoding; + } + + public ObservabilityPipelineGooglePubSubSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGooglePubSubSource project(String project) { + this.project = project; + return this; + } + + /** + * The GCP project ID that owns the Pub/Sub subscription. + * + * @return project + */ + @JsonProperty(JSON_PROPERTY_PROJECT) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getProject() { + return project; + } + + public void setProject(String project) { + this.project = project; + } + + public ObservabilityPipelineGooglePubSubSource subscription(String subscription) { + this.subscription = subscription; + return this; + } + + /** + * The Pub/Sub subscription name from which messages are consumed. + * + * @return subscription + */ + @JsonProperty(JSON_PROPERTY_SUBSCRIPTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getSubscription() { + return subscription; + } + + public void setSubscription(String subscription) { + this.subscription = subscription; + } + + public ObservabilityPipelineGooglePubSubSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineGooglePubSubSource type( + ObservabilityPipelineGooglePubSubSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be google_pubsub. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGooglePubSubSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineGooglePubSubSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGooglePubSubSource + */ + @JsonAnySetter + public ObservabilityPipelineGooglePubSubSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGooglePubSubSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGooglePubSubSource observabilityPipelineGooglePubSubSource = + (ObservabilityPipelineGooglePubSubSource) o; + return Objects.equals(this.auth, observabilityPipelineGooglePubSubSource.auth) + && Objects.equals(this.decoding, observabilityPipelineGooglePubSubSource.decoding) + && Objects.equals(this.id, observabilityPipelineGooglePubSubSource.id) + && Objects.equals(this.project, observabilityPipelineGooglePubSubSource.project) + && Objects.equals(this.subscription, observabilityPipelineGooglePubSubSource.subscription) + && Objects.equals(this.tls, observabilityPipelineGooglePubSubSource.tls) + && Objects.equals(this.type, observabilityPipelineGooglePubSubSource.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGooglePubSubSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(auth, decoding, id, project, subscription, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGooglePubSubSource {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" decoding: ").append(toIndentedString(decoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" project: ").append(toIndentedString(project)).append("\n"); + sb.append(" subscription: ").append(toIndentedString(subscription)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSourceType.java new file mode 100644 index 00000000000..37a2e37e816 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGooglePubSubSourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be google_pubsub. */ +@JsonSerialize( + using = + ObservabilityPipelineGooglePubSubSourceType + .ObservabilityPipelineGooglePubSubSourceTypeSerializer.class) +public class ObservabilityPipelineGooglePubSubSourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("google_pubsub")); + + public static final ObservabilityPipelineGooglePubSubSourceType GOOGLE_PUBSUB = + new ObservabilityPipelineGooglePubSubSourceType("google_pubsub"); + + ObservabilityPipelineGooglePubSubSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGooglePubSubSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGooglePubSubSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGooglePubSubSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGooglePubSubSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGooglePubSubSourceType fromValue(String value) { + return new ObservabilityPipelineGooglePubSubSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java new file mode 100644 index 00000000000..9cabf897e0a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSource.java @@ -0,0 +1,342 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The http_client source scrapes logs from HTTP endpoints at regular intervals. */ +@JsonPropertyOrder({ + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_AUTH_STRATEGY, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_DECODING, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_ID, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_SCRAPE_INTERVAL_SECS, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_SCRAPE_TIMEOUT_SECS, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_TLS, + ObservabilityPipelineHttpClientSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineHttpClientSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH_STRATEGY = "auth_strategy"; + private ObservabilityPipelineHttpClientSourceAuthStrategy authStrategy; + + public static final String JSON_PROPERTY_DECODING = "decoding"; + private ObservabilityPipelineDecoding decoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_SCRAPE_INTERVAL_SECS = "scrape_interval_secs"; + private Long scrapeIntervalSecs; + + public static final String JSON_PROPERTY_SCRAPE_TIMEOUT_SECS = "scrape_timeout_secs"; + private Long scrapeTimeoutSecs; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineHttpClientSourceType type = + ObservabilityPipelineHttpClientSourceType.HTTP_CLIENT; + + public ObservabilityPipelineHttpClientSource() {} + + @JsonCreator + public ObservabilityPipelineHttpClientSource( + @JsonProperty(required = true, value = JSON_PROPERTY_DECODING) + ObservabilityPipelineDecoding decoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineHttpClientSourceType type) { + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineHttpClientSource authStrategy( + ObservabilityPipelineHttpClientSourceAuthStrategy authStrategy) { + this.authStrategy = authStrategy; + this.unparsed |= !authStrategy.isValid(); + return this; + } + + /** + * Optional authentication strategy for HTTP requests. + * + * @return authStrategy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH_STRATEGY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineHttpClientSourceAuthStrategy getAuthStrategy() { + return authStrategy; + } + + public void setAuthStrategy(ObservabilityPipelineHttpClientSourceAuthStrategy authStrategy) { + if (!authStrategy.isValid()) { + this.unparsed = true; + } + this.authStrategy = authStrategy; + } + + public ObservabilityPipelineHttpClientSource decoding(ObservabilityPipelineDecoding decoding) { + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + return this; + } + + /** + * The decoding format used to interpret incoming logs. + * + * @return decoding + */ + @JsonProperty(JSON_PROPERTY_DECODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDecoding getDecoding() { + return decoding; + } + + public void setDecoding(ObservabilityPipelineDecoding decoding) { + if (!decoding.isValid()) { + this.unparsed = true; + } + this.decoding = decoding; + } + + public ObservabilityPipelineHttpClientSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineHttpClientSource scrapeIntervalSecs(Long scrapeIntervalSecs) { + this.scrapeIntervalSecs = scrapeIntervalSecs; + return this; + } + + /** + * The interval (in seconds) between HTTP scrape requests. + * + * @return scrapeIntervalSecs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SCRAPE_INTERVAL_SECS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getScrapeIntervalSecs() { + return scrapeIntervalSecs; + } + + public void setScrapeIntervalSecs(Long scrapeIntervalSecs) { + this.scrapeIntervalSecs = scrapeIntervalSecs; + } + + public ObservabilityPipelineHttpClientSource scrapeTimeoutSecs(Long scrapeTimeoutSecs) { + this.scrapeTimeoutSecs = scrapeTimeoutSecs; + return this; + } + + /** + * The timeout (in seconds) for each scrape request. + * + * @return scrapeTimeoutSecs + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SCRAPE_TIMEOUT_SECS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getScrapeTimeoutSecs() { + return scrapeTimeoutSecs; + } + + public void setScrapeTimeoutSecs(Long scrapeTimeoutSecs) { + this.scrapeTimeoutSecs = scrapeTimeoutSecs; + } + + public ObservabilityPipelineHttpClientSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineHttpClientSource type( + ObservabilityPipelineHttpClientSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be http_client. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpClientSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineHttpClientSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineHttpClientSource + */ + @JsonAnySetter + public ObservabilityPipelineHttpClientSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineHttpClientSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineHttpClientSource observabilityPipelineHttpClientSource = + (ObservabilityPipelineHttpClientSource) o; + return Objects.equals(this.authStrategy, observabilityPipelineHttpClientSource.authStrategy) + && Objects.equals(this.decoding, observabilityPipelineHttpClientSource.decoding) + && Objects.equals(this.id, observabilityPipelineHttpClientSource.id) + && Objects.equals( + this.scrapeIntervalSecs, observabilityPipelineHttpClientSource.scrapeIntervalSecs) + && Objects.equals( + this.scrapeTimeoutSecs, observabilityPipelineHttpClientSource.scrapeTimeoutSecs) + && Objects.equals(this.tls, observabilityPipelineHttpClientSource.tls) + && Objects.equals(this.type, observabilityPipelineHttpClientSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineHttpClientSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + authStrategy, + decoding, + id, + scrapeIntervalSecs, + scrapeTimeoutSecs, + tls, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineHttpClientSource {\n"); + sb.append(" authStrategy: ").append(toIndentedString(authStrategy)).append("\n"); + sb.append(" decoding: ").append(toIndentedString(decoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" scrapeIntervalSecs: ").append(toIndentedString(scrapeIntervalSecs)).append("\n"); + sb.append(" scrapeTimeoutSecs: ").append(toIndentedString(scrapeTimeoutSecs)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java new file mode 100644 index 00000000000..30e2ad6f59a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceAuthStrategy.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Optional authentication strategy for HTTP requests. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientSourceAuthStrategy + .ObservabilityPipelineHttpClientSourceAuthStrategySerializer.class) +public class ObservabilityPipelineHttpClientSourceAuthStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("basic", "bearer")); + + public static final ObservabilityPipelineHttpClientSourceAuthStrategy BASIC = + new ObservabilityPipelineHttpClientSourceAuthStrategy("basic"); + public static final ObservabilityPipelineHttpClientSourceAuthStrategy BEARER = + new ObservabilityPipelineHttpClientSourceAuthStrategy("bearer"); + + ObservabilityPipelineHttpClientSourceAuthStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientSourceAuthStrategySerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientSourceAuthStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientSourceAuthStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientSourceAuthStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientSourceAuthStrategy fromValue(String value) { + return new ObservabilityPipelineHttpClientSourceAuthStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceType.java new file mode 100644 index 00000000000..75eeeb1e947 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpClientSourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be http_client. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpClientSourceType + .ObservabilityPipelineHttpClientSourceTypeSerializer.class) +public class ObservabilityPipelineHttpClientSourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("http_client")); + + public static final ObservabilityPipelineHttpClientSourceType HTTP_CLIENT = + new ObservabilityPipelineHttpClientSourceType("http_client"); + + ObservabilityPipelineHttpClientSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpClientSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineHttpClientSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpClientSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpClientSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpClientSourceType fromValue(String value) { + return new ObservabilityPipelineHttpClientSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java new file mode 100644 index 00000000000..a3b47a71c7e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSource.java @@ -0,0 +1,280 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The http_server source collects logs over HTTP POST from external services. */ +@JsonPropertyOrder({ + ObservabilityPipelineHttpServerSource.JSON_PROPERTY_AUTH_STRATEGY, + ObservabilityPipelineHttpServerSource.JSON_PROPERTY_DECODING, + ObservabilityPipelineHttpServerSource.JSON_PROPERTY_ID, + ObservabilityPipelineHttpServerSource.JSON_PROPERTY_TLS, + ObservabilityPipelineHttpServerSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineHttpServerSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH_STRATEGY = "auth_strategy"; + private ObservabilityPipelineHttpServerSourceAuthStrategy authStrategy; + + public static final String JSON_PROPERTY_DECODING = "decoding"; + private ObservabilityPipelineDecoding decoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineHttpServerSourceType type = + ObservabilityPipelineHttpServerSourceType.HTTP_SERVER; + + public ObservabilityPipelineHttpServerSource() {} + + @JsonCreator + public ObservabilityPipelineHttpServerSource( + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH_STRATEGY) + ObservabilityPipelineHttpServerSourceAuthStrategy authStrategy, + @JsonProperty(required = true, value = JSON_PROPERTY_DECODING) + ObservabilityPipelineDecoding decoding, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineHttpServerSourceType type) { + this.authStrategy = authStrategy; + this.unparsed |= !authStrategy.isValid(); + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineHttpServerSource authStrategy( + ObservabilityPipelineHttpServerSourceAuthStrategy authStrategy) { + this.authStrategy = authStrategy; + this.unparsed |= !authStrategy.isValid(); + return this; + } + + /** + * HTTP authentication method. + * + * @return authStrategy + */ + @JsonProperty(JSON_PROPERTY_AUTH_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpServerSourceAuthStrategy getAuthStrategy() { + return authStrategy; + } + + public void setAuthStrategy(ObservabilityPipelineHttpServerSourceAuthStrategy authStrategy) { + if (!authStrategy.isValid()) { + this.unparsed = true; + } + this.authStrategy = authStrategy; + } + + public ObservabilityPipelineHttpServerSource decoding(ObservabilityPipelineDecoding decoding) { + this.decoding = decoding; + this.unparsed |= !decoding.isValid(); + return this; + } + + /** + * The decoding format used to interpret incoming logs. + * + * @return decoding + */ + @JsonProperty(JSON_PROPERTY_DECODING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineDecoding getDecoding() { + return decoding; + } + + public void setDecoding(ObservabilityPipelineDecoding decoding) { + if (!decoding.isValid()) { + this.unparsed = true; + } + this.decoding = decoding; + } + + public ObservabilityPipelineHttpServerSource id(String id) { + this.id = id; + return this; + } + + /** + * Unique ID for the HTTP server source. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineHttpServerSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineHttpServerSource type( + ObservabilityPipelineHttpServerSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be http_server. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineHttpServerSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineHttpServerSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineHttpServerSource + */ + @JsonAnySetter + public ObservabilityPipelineHttpServerSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineHttpServerSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineHttpServerSource observabilityPipelineHttpServerSource = + (ObservabilityPipelineHttpServerSource) o; + return Objects.equals(this.authStrategy, observabilityPipelineHttpServerSource.authStrategy) + && Objects.equals(this.decoding, observabilityPipelineHttpServerSource.decoding) + && Objects.equals(this.id, observabilityPipelineHttpServerSource.id) + && Objects.equals(this.tls, observabilityPipelineHttpServerSource.tls) + && Objects.equals(this.type, observabilityPipelineHttpServerSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineHttpServerSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(authStrategy, decoding, id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineHttpServerSource {\n"); + sb.append(" authStrategy: ").append(toIndentedString(authStrategy)).append("\n"); + sb.append(" decoding: ").append(toIndentedString(decoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceAuthStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceAuthStrategy.java new file mode 100644 index 00000000000..9d68310ba83 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceAuthStrategy.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** HTTP authentication method. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpServerSourceAuthStrategy + .ObservabilityPipelineHttpServerSourceAuthStrategySerializer.class) +public class ObservabilityPipelineHttpServerSourceAuthStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("none", "plain")); + + public static final ObservabilityPipelineHttpServerSourceAuthStrategy NONE = + new ObservabilityPipelineHttpServerSourceAuthStrategy("none"); + public static final ObservabilityPipelineHttpServerSourceAuthStrategy PLAIN = + new ObservabilityPipelineHttpServerSourceAuthStrategy("plain"); + + ObservabilityPipelineHttpServerSourceAuthStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpServerSourceAuthStrategySerializer + extends StdSerializer { + public ObservabilityPipelineHttpServerSourceAuthStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpServerSourceAuthStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpServerSourceAuthStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpServerSourceAuthStrategy fromValue(String value) { + return new ObservabilityPipelineHttpServerSourceAuthStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceType.java new file mode 100644 index 00000000000..7a2872e959e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineHttpServerSourceType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be http_server. */ +@JsonSerialize( + using = + ObservabilityPipelineHttpServerSourceType + .ObservabilityPipelineHttpServerSourceTypeSerializer.class) +public class ObservabilityPipelineHttpServerSourceType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("http_server")); + + public static final ObservabilityPipelineHttpServerSourceType HTTP_SERVER = + new ObservabilityPipelineHttpServerSourceType("http_server"); + + ObservabilityPipelineHttpServerSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineHttpServerSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineHttpServerSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineHttpServerSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineHttpServerSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineHttpServerSourceType fromValue(String value) { + return new ObservabilityPipelineHttpServerSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java index 6cedbd4e48b..d8c3ea6254d 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineKafkaSource.java @@ -176,7 +176,7 @@ public ObservabilityPipelineKafkaSource tls(ObservabilityPipelineTls tls) { } /** - * Configuration for enabling TLS encryption. + * Configuration for enabling TLS encryption between the pipeline component and external services. * * @return tls */ diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java new file mode 100644 index 00000000000..18a531220ef --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSource.java @@ -0,0 +1,211 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The logstash source ingests logs from a Logstash forwarder. */ +@JsonPropertyOrder({ + ObservabilityPipelineLogstashSource.JSON_PROPERTY_ID, + ObservabilityPipelineLogstashSource.JSON_PROPERTY_TLS, + ObservabilityPipelineLogstashSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineLogstashSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineLogstashSourceType type = + ObservabilityPipelineLogstashSourceType.LOGSTASH; + + public ObservabilityPipelineLogstashSource() {} + + @JsonCreator + public ObservabilityPipelineLogstashSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineLogstashSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineLogstashSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineLogstashSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineLogstashSource type(ObservabilityPipelineLogstashSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be logstash. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineLogstashSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineLogstashSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineLogstashSource + */ + @JsonAnySetter + public ObservabilityPipelineLogstashSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineLogstashSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineLogstashSource observabilityPipelineLogstashSource = + (ObservabilityPipelineLogstashSource) o; + return Objects.equals(this.id, observabilityPipelineLogstashSource.id) + && Objects.equals(this.tls, observabilityPipelineLogstashSource.tls) + && Objects.equals(this.type, observabilityPipelineLogstashSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineLogstashSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineLogstashSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSourceType.java new file mode 100644 index 00000000000..aebea76cbae --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineLogstashSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be logstash. */ +@JsonSerialize( + using = + ObservabilityPipelineLogstashSourceType.ObservabilityPipelineLogstashSourceTypeSerializer + .class) +public class ObservabilityPipelineLogstashSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("logstash")); + + public static final ObservabilityPipelineLogstashSourceType LOGSTASH = + new ObservabilityPipelineLogstashSourceType("logstash"); + + ObservabilityPipelineLogstashSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineLogstashSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineLogstashSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineLogstashSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineLogstashSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineLogstashSourceType fromValue(String value) { + return new ObservabilityPipelineLogstashSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java new file mode 100644 index 00000000000..9b7d800b53c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java @@ -0,0 +1,175 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** A custom metadata entry to attach to each object uploaded to the GCS bucket. */ +@JsonPropertyOrder({ + ObservabilityPipelineMetadataEntry.JSON_PROPERTY_NAME, + ObservabilityPipelineMetadataEntry.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineMetadataEntry { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private String value; + + public ObservabilityPipelineMetadataEntry() {} + + @JsonCreator + public ObservabilityPipelineMetadataEntry( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { + this.name = name; + this.value = value; + } + + public ObservabilityPipelineMetadataEntry name(String name) { + this.name = name; + return this; + } + + /** + * The metadata key. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineMetadataEntry value(String value) { + this.value = value; + return this; + } + + /** + * The metadata value. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineMetadataEntry + */ + @JsonAnySetter + public ObservabilityPipelineMetadataEntry putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineMetadataEntry object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineMetadataEntry observabilityPipelineMetadataEntry = + (ObservabilityPipelineMetadataEntry) o; + return Objects.equals(this.name, observabilityPipelineMetadataEntry.name) + && Objects.equals(this.value, observabilityPipelineMetadataEntry.value) + && Objects.equals( + this.additionalProperties, observabilityPipelineMetadataEntry.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineMetadataEntry {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java new file mode 100644 index 00000000000..fea24e8cb05 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java @@ -0,0 +1,325 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = ObservabilityPipelineMetricValue.ObservabilityPipelineMetricValueDeserializer.class) +@JsonSerialize( + using = ObservabilityPipelineMetricValue.ObservabilityPipelineMetricValueSerializer.class) +public class ObservabilityPipelineMetricValue extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineMetricValue.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineMetricValueSerializer + extends StdSerializer { + public ObservabilityPipelineMetricValueSerializer(Class t) { + super(t); + } + + public ObservabilityPipelineMetricValueSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineMetricValue value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineMetricValueDeserializer + extends StdDeserializer { + public ObservabilityPipelineMetricValueDeserializer() { + this(ObservabilityPipelineMetricValue.class); + } + + public ObservabilityPipelineMetricValueDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineMetricValue deserialize(JsonParser jp, DeserializationContext ctxt) + throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineGeneratedMetricIncrementByOne + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Long.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Double.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Boolean.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGeneratedMetricIncrementByOne.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGeneratedMetricIncrementByOne) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGeneratedMetricIncrementByOne'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGeneratedMetricIncrementByOne'", + e); + } + + // deserialize ObservabilityPipelineGeneratedMetricIncrementByField + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Long.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Double.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Boolean.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGeneratedMetricIncrementByField.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGeneratedMetricIncrementByField) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGeneratedMetricIncrementByField'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineGeneratedMetricIncrementByField'", + e); + } + + ObservabilityPipelineMetricValue ret = new ObservabilityPipelineMetricValue(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineMetricValue getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "ObservabilityPipelineMetricValue cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineMetricValue() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineMetricValue(ObservabilityPipelineGeneratedMetricIncrementByOne o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineMetricValue(ObservabilityPipelineGeneratedMetricIncrementByField o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineGeneratedMetricIncrementByOne", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGeneratedMetricIncrementByField", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineMetricValue.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineMetricValue.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineGeneratedMetricIncrementByOne.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGeneratedMetricIncrementByField.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineGeneratedMetricIncrementByOne," + + " ObservabilityPipelineGeneratedMetricIncrementByField"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField + * + * @return The actual instance (ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGeneratedMetricIncrementByOne`. If the actual + * instance is not `ObservabilityPipelineGeneratedMetricIncrementByOne`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGeneratedMetricIncrementByOne` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGeneratedMetricIncrementByOne` + */ + public ObservabilityPipelineGeneratedMetricIncrementByOne + getObservabilityPipelineGeneratedMetricIncrementByOne() throws ClassCastException { + return (ObservabilityPipelineGeneratedMetricIncrementByOne) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGeneratedMetricIncrementByField`. If the + * actual instance is not `ObservabilityPipelineGeneratedMetricIncrementByField`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGeneratedMetricIncrementByField` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGeneratedMetricIncrementByField` + */ + public ObservabilityPipelineGeneratedMetricIncrementByField + getObservabilityPipelineGeneratedMetricIncrementByField() throws ClassCastException { + return (ObservabilityPipelineGeneratedMetricIncrementByField) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java new file mode 100644 index 00000000000..014f7edd62a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestination.java @@ -0,0 +1,254 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The new_relic destination sends logs to the New Relic platform. */ +@JsonPropertyOrder({ + ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_ID, + ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_REGION, + ObservabilityPipelineNewRelicDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineNewRelicDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_REGION = "region"; + private ObservabilityPipelineNewRelicDestinationRegion region; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineNewRelicDestinationType type = + ObservabilityPipelineNewRelicDestinationType.NEW_RELIC; + + public ObservabilityPipelineNewRelicDestination() {} + + @JsonCreator + public ObservabilityPipelineNewRelicDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) + ObservabilityPipelineNewRelicDestinationRegion region, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineNewRelicDestinationType type) { + this.id = id; + this.inputs = inputs; + this.region = region; + this.unparsed |= !region.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineNewRelicDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineNewRelicDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineNewRelicDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineNewRelicDestination region( + ObservabilityPipelineNewRelicDestinationRegion region) { + this.region = region; + this.unparsed |= !region.isValid(); + return this; + } + + /** + * The New Relic region. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineNewRelicDestinationRegion getRegion() { + return region; + } + + public void setRegion(ObservabilityPipelineNewRelicDestinationRegion region) { + if (!region.isValid()) { + this.unparsed = true; + } + this.region = region; + } + + public ObservabilityPipelineNewRelicDestination type( + ObservabilityPipelineNewRelicDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be new_relic. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineNewRelicDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineNewRelicDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineNewRelicDestination + */ + @JsonAnySetter + public ObservabilityPipelineNewRelicDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineNewRelicDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineNewRelicDestination observabilityPipelineNewRelicDestination = + (ObservabilityPipelineNewRelicDestination) o; + return Objects.equals(this.id, observabilityPipelineNewRelicDestination.id) + && Objects.equals(this.inputs, observabilityPipelineNewRelicDestination.inputs) + && Objects.equals(this.region, observabilityPipelineNewRelicDestination.region) + && Objects.equals(this.type, observabilityPipelineNewRelicDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineNewRelicDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, region, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineNewRelicDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationRegion.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationRegion.java new file mode 100644 index 00000000000..2cb71f5dad4 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationRegion.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The New Relic region. */ +@JsonSerialize( + using = + ObservabilityPipelineNewRelicDestinationRegion + .ObservabilityPipelineNewRelicDestinationRegionSerializer.class) +public class ObservabilityPipelineNewRelicDestinationRegion extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("us", "eu")); + + public static final ObservabilityPipelineNewRelicDestinationRegion US = + new ObservabilityPipelineNewRelicDestinationRegion("us"); + public static final ObservabilityPipelineNewRelicDestinationRegion EU = + new ObservabilityPipelineNewRelicDestinationRegion("eu"); + + ObservabilityPipelineNewRelicDestinationRegion(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineNewRelicDestinationRegionSerializer + extends StdSerializer { + public ObservabilityPipelineNewRelicDestinationRegionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineNewRelicDestinationRegionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineNewRelicDestinationRegion value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineNewRelicDestinationRegion fromValue(String value) { + return new ObservabilityPipelineNewRelicDestinationRegion(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationType.java new file mode 100644 index 00000000000..dd35905c4d1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineNewRelicDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be new_relic. */ +@JsonSerialize( + using = + ObservabilityPipelineNewRelicDestinationType + .ObservabilityPipelineNewRelicDestinationTypeSerializer.class) +public class ObservabilityPipelineNewRelicDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("new_relic")); + + public static final ObservabilityPipelineNewRelicDestinationType NEW_RELIC = + new ObservabilityPipelineNewRelicDestinationType("new_relic"); + + ObservabilityPipelineNewRelicDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineNewRelicDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineNewRelicDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineNewRelicDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineNewRelicDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineNewRelicDestinationType fromValue(String value) { + return new ObservabilityPipelineNewRelicDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java new file mode 100644 index 00000000000..ce5e0b78369 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessor.java @@ -0,0 +1,291 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The ocsf_mapper processor transforms logs into the OCSF schema using a predefined + * mapping configuration. + */ +@JsonPropertyOrder({ + ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_MAPPINGS, + ObservabilityPipelineOcsfMapperProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineOcsfMapperProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_MAPPINGS = "mappings"; + private List mappings = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineOcsfMapperProcessorType type = + ObservabilityPipelineOcsfMapperProcessorType.OCSF_MAPPER; + + public ObservabilityPipelineOcsfMapperProcessor() {} + + @JsonCreator + public ObservabilityPipelineOcsfMapperProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_MAPPINGS) + List mappings, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineOcsfMapperProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.mappings = mappings; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineOcsfMapperProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineOcsfMapperProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineOcsfMapperProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineOcsfMapperProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineOcsfMapperProcessor mappings( + List mappings) { + this.mappings = mappings; + for (ObservabilityPipelineOcsfMapperProcessorMapping item : mappings) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineOcsfMapperProcessor addMappingsItem( + ObservabilityPipelineOcsfMapperProcessorMapping mappingsItem) { + this.mappings.add(mappingsItem); + this.unparsed |= mappingsItem.unparsed; + return this; + } + + /** + * A list of mapping rules to convert events to the OCSF format. + * + * @return mappings + */ + @JsonProperty(JSON_PROPERTY_MAPPINGS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMappings() { + return mappings; + } + + public void setMappings(List mappings) { + this.mappings = mappings; + } + + public ObservabilityPipelineOcsfMapperProcessor type( + ObservabilityPipelineOcsfMapperProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be ocsf_mapper. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineOcsfMapperProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineOcsfMapperProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineOcsfMapperProcessor + */ + @JsonAnySetter + public ObservabilityPipelineOcsfMapperProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineOcsfMapperProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineOcsfMapperProcessor observabilityPipelineOcsfMapperProcessor = + (ObservabilityPipelineOcsfMapperProcessor) o; + return Objects.equals(this.id, observabilityPipelineOcsfMapperProcessor.id) + && Objects.equals(this.include, observabilityPipelineOcsfMapperProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineOcsfMapperProcessor.inputs) + && Objects.equals(this.mappings, observabilityPipelineOcsfMapperProcessor.mappings) + && Objects.equals(this.type, observabilityPipelineOcsfMapperProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineOcsfMapperProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, mappings, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineOcsfMapperProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" mappings: ").append(toIndentedString(mappings)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMapping.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMapping.java new file mode 100644 index 00000000000..dc8ff5f743f --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMapping.java @@ -0,0 +1,182 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines how specific events are transformed to OCSF using a mapping configuration. */ +@JsonPropertyOrder({ + ObservabilityPipelineOcsfMapperProcessorMapping.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineOcsfMapperProcessorMapping.JSON_PROPERTY_MAPPING +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineOcsfMapperProcessorMapping { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_MAPPING = "mapping"; + private ObservabilityPipelineOcsfMapperProcessorMappingMapping mapping; + + public ObservabilityPipelineOcsfMapperProcessorMapping() {} + + @JsonCreator + public ObservabilityPipelineOcsfMapperProcessorMapping( + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_MAPPING) + ObservabilityPipelineOcsfMapperProcessorMappingMapping mapping) { + this.include = include; + this.mapping = mapping; + this.unparsed |= mapping.unparsed; + } + + public ObservabilityPipelineOcsfMapperProcessorMapping include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to select the logs that this mapping should apply to. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineOcsfMapperProcessorMapping mapping( + ObservabilityPipelineOcsfMapperProcessorMappingMapping mapping) { + this.mapping = mapping; + this.unparsed |= mapping.unparsed; + return this; + } + + /** + * Defines a single mapping rule for transforming logs into the OCSF schema. + * + * @return mapping + */ + @JsonProperty(JSON_PROPERTY_MAPPING) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineOcsfMapperProcessorMappingMapping getMapping() { + return mapping; + } + + public void setMapping(ObservabilityPipelineOcsfMapperProcessorMappingMapping mapping) { + this.mapping = mapping; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineOcsfMapperProcessorMapping + */ + @JsonAnySetter + public ObservabilityPipelineOcsfMapperProcessorMapping putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineOcsfMapperProcessorMapping object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineOcsfMapperProcessorMapping + observabilityPipelineOcsfMapperProcessorMapping = + (ObservabilityPipelineOcsfMapperProcessorMapping) o; + return Objects.equals(this.include, observabilityPipelineOcsfMapperProcessorMapping.include) + && Objects.equals(this.mapping, observabilityPipelineOcsfMapperProcessorMapping.mapping) + && Objects.equals( + this.additionalProperties, + observabilityPipelineOcsfMapperProcessorMapping.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(include, mapping, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineOcsfMapperProcessorMapping {\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" mapping: ").append(toIndentedString(mapping)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMappingMapping.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMappingMapping.java new file mode 100644 index 00000000000..437f7ca48ee --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorMappingMapping.java @@ -0,0 +1,239 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineOcsfMapperProcessorMappingMapping + .ObservabilityPipelineOcsfMapperProcessorMappingMappingDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineOcsfMapperProcessorMappingMapping + .ObservabilityPipelineOcsfMapperProcessorMappingMappingSerializer.class) +public class ObservabilityPipelineOcsfMapperProcessorMappingMapping extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineOcsfMapperProcessorMappingMapping.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineOcsfMapperProcessorMappingMappingSerializer + extends StdSerializer { + public ObservabilityPipelineOcsfMapperProcessorMappingMappingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOcsfMapperProcessorMappingMappingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOcsfMapperProcessorMappingMapping value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineOcsfMapperProcessorMappingMappingDeserializer + extends StdDeserializer { + public ObservabilityPipelineOcsfMapperProcessorMappingMappingDeserializer() { + this(ObservabilityPipelineOcsfMapperProcessorMappingMapping.class); + } + + public ObservabilityPipelineOcsfMapperProcessorMappingMappingDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineOcsfMapperProcessorMappingMapping deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineOcsfMappingLibrary + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineOcsfMappingLibrary.class.equals(Integer.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Long.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Float.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Double.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Boolean.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineOcsfMappingLibrary.class.equals(Integer.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineOcsfMappingLibrary.class.equals(Float.class) + || ObservabilityPipelineOcsfMappingLibrary.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineOcsfMappingLibrary.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineOcsfMappingLibrary.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineOcsfMappingLibrary.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + deserialized = tmp; + match++; + + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineOcsfMappingLibrary'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineOcsfMappingLibrary'", + e); + } + + ObservabilityPipelineOcsfMapperProcessorMappingMapping ret = + new ObservabilityPipelineOcsfMapperProcessorMappingMapping(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineOcsfMapperProcessorMappingMapping getNullValue( + DeserializationContext ctxt) throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), + "ObservabilityPipelineOcsfMapperProcessorMappingMapping cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineOcsfMapperProcessorMappingMapping() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineOcsfMapperProcessorMappingMapping( + ObservabilityPipelineOcsfMappingLibrary o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineOcsfMappingLibrary", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineOcsfMapperProcessorMappingMapping.class, + Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineOcsfMapperProcessorMappingMapping.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineOcsfMappingLibrary + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineOcsfMappingLibrary.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineOcsfMappingLibrary"); + } + + /** + * Get the actual instance, which can be the following: ObservabilityPipelineOcsfMappingLibrary + * + * @return The actual instance (ObservabilityPipelineOcsfMappingLibrary) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineOcsfMappingLibrary`. If the actual instance is + * not `ObservabilityPipelineOcsfMappingLibrary`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineOcsfMappingLibrary` + * @throws ClassCastException if the instance is not `ObservabilityPipelineOcsfMappingLibrary` + */ + public ObservabilityPipelineOcsfMappingLibrary getObservabilityPipelineOcsfMappingLibrary() + throws ClassCastException { + return (ObservabilityPipelineOcsfMappingLibrary) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorType.java new file mode 100644 index 00000000000..d0a5a65856e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMapperProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be ocsf_mapper. */ +@JsonSerialize( + using = + ObservabilityPipelineOcsfMapperProcessorType + .ObservabilityPipelineOcsfMapperProcessorTypeSerializer.class) +public class ObservabilityPipelineOcsfMapperProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("ocsf_mapper")); + + public static final ObservabilityPipelineOcsfMapperProcessorType OCSF_MAPPER = + new ObservabilityPipelineOcsfMapperProcessorType("ocsf_mapper"); + + ObservabilityPipelineOcsfMapperProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineOcsfMapperProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineOcsfMapperProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOcsfMapperProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOcsfMapperProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineOcsfMapperProcessorType fromValue(String value) { + return new ObservabilityPipelineOcsfMapperProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMappingLibrary.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMappingLibrary.java new file mode 100644 index 00000000000..11c7480449c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOcsfMappingLibrary.java @@ -0,0 +1,96 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Predefined library mappings for common log formats. */ +@JsonSerialize( + using = + ObservabilityPipelineOcsfMappingLibrary.ObservabilityPipelineOcsfMappingLibrarySerializer + .class) +public class ObservabilityPipelineOcsfMappingLibrary extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "CloudTrail Account Change", + "GCP Cloud Audit CreateBucket", + "GCP Cloud Audit CreateSink", + "GCP Cloud Audit SetIamPolicy", + "GCP Cloud Audit UpdateSink", + "Github Audit Log API Activity", + "Google Workspace Admin Audit addPrivilege", + "Microsoft 365 Defender Incident", + "Microsoft 365 Defender UserLoggedIn", + "Okta System Log Authentication", + "Palo Alto Networks Firewall Traffic")); + + public static final ObservabilityPipelineOcsfMappingLibrary CLOUDTRAIL_ACCOUNT_CHANGE = + new ObservabilityPipelineOcsfMappingLibrary("CloudTrail Account Change"); + public static final ObservabilityPipelineOcsfMappingLibrary GCP_CLOUD_AUDIT_CREATEBUCKET = + new ObservabilityPipelineOcsfMappingLibrary("GCP Cloud Audit CreateBucket"); + public static final ObservabilityPipelineOcsfMappingLibrary GCP_CLOUD_AUDIT_CREATESINK = + new ObservabilityPipelineOcsfMappingLibrary("GCP Cloud Audit CreateSink"); + public static final ObservabilityPipelineOcsfMappingLibrary GCP_CLOUD_AUDIT_SETIAMPOLICY = + new ObservabilityPipelineOcsfMappingLibrary("GCP Cloud Audit SetIamPolicy"); + public static final ObservabilityPipelineOcsfMappingLibrary GCP_CLOUD_AUDIT_UPDATESINK = + new ObservabilityPipelineOcsfMappingLibrary("GCP Cloud Audit UpdateSink"); + public static final ObservabilityPipelineOcsfMappingLibrary GITHUB_AUDIT_LOG_API_ACTIVITY = + new ObservabilityPipelineOcsfMappingLibrary("Github Audit Log API Activity"); + public static final ObservabilityPipelineOcsfMappingLibrary + GOOGLE_WORKSPACE_ADMIN_AUDIT_ADDPRIVILEGE = + new ObservabilityPipelineOcsfMappingLibrary("Google Workspace Admin Audit addPrivilege"); + public static final ObservabilityPipelineOcsfMappingLibrary MICROSOFT_365_DEFENDER_INCIDENT = + new ObservabilityPipelineOcsfMappingLibrary("Microsoft 365 Defender Incident"); + public static final ObservabilityPipelineOcsfMappingLibrary MICROSOFT_365_DEFENDER_USERLOGGEDIN = + new ObservabilityPipelineOcsfMappingLibrary("Microsoft 365 Defender UserLoggedIn"); + public static final ObservabilityPipelineOcsfMappingLibrary OKTA_SYSTEM_LOG_AUTHENTICATION = + new ObservabilityPipelineOcsfMappingLibrary("Okta System Log Authentication"); + public static final ObservabilityPipelineOcsfMappingLibrary PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC = + new ObservabilityPipelineOcsfMappingLibrary("Palo Alto Networks Firewall Traffic"); + + ObservabilityPipelineOcsfMappingLibrary(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineOcsfMappingLibrarySerializer + extends StdSerializer { + public ObservabilityPipelineOcsfMappingLibrarySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOcsfMappingLibrarySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOcsfMappingLibrary value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineOcsfMappingLibrary fromValue(String value) { + return new ObservabilityPipelineOcsfMappingLibrary(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java new file mode 100644 index 00000000000..b9d0be60441 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestination.java @@ -0,0 +1,247 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The opensearch destination writes logs to an OpenSearch cluster. */ +@JsonPropertyOrder({ + ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_BULK_INDEX, + ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_ID, + ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineOpenSearchDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineOpenSearchDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_BULK_INDEX = "bulk_index"; + private String bulkIndex; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineOpenSearchDestinationType type = + ObservabilityPipelineOpenSearchDestinationType.OPENSEARCH; + + public ObservabilityPipelineOpenSearchDestination() {} + + @JsonCreator + public ObservabilityPipelineOpenSearchDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineOpenSearchDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineOpenSearchDestination bulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + return this; + } + + /** + * The index to write logs to. + * + * @return bulkIndex + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_BULK_INDEX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getBulkIndex() { + return bulkIndex; + } + + public void setBulkIndex(String bulkIndex) { + this.bulkIndex = bulkIndex; + } + + public ObservabilityPipelineOpenSearchDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineOpenSearchDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineOpenSearchDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineOpenSearchDestination type( + ObservabilityPipelineOpenSearchDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be opensearch. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineOpenSearchDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineOpenSearchDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineOpenSearchDestination + */ + @JsonAnySetter + public ObservabilityPipelineOpenSearchDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineOpenSearchDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineOpenSearchDestination observabilityPipelineOpenSearchDestination = + (ObservabilityPipelineOpenSearchDestination) o; + return Objects.equals(this.bulkIndex, observabilityPipelineOpenSearchDestination.bulkIndex) + && Objects.equals(this.id, observabilityPipelineOpenSearchDestination.id) + && Objects.equals(this.inputs, observabilityPipelineOpenSearchDestination.inputs) + && Objects.equals(this.type, observabilityPipelineOpenSearchDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineOpenSearchDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(bulkIndex, id, inputs, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineOpenSearchDestination {\n"); + sb.append(" bulkIndex: ").append(toIndentedString(bulkIndex)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestinationType.java new file mode 100644 index 00000000000..2870dac8182 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineOpenSearchDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be opensearch. */ +@JsonSerialize( + using = + ObservabilityPipelineOpenSearchDestinationType + .ObservabilityPipelineOpenSearchDestinationTypeSerializer.class) +public class ObservabilityPipelineOpenSearchDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("opensearch")); + + public static final ObservabilityPipelineOpenSearchDestinationType OPENSEARCH = + new ObservabilityPipelineOpenSearchDestinationType("opensearch"); + + ObservabilityPipelineOpenSearchDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineOpenSearchDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineOpenSearchDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineOpenSearchDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineOpenSearchDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineOpenSearchDestinationType fromValue(String value) { + return new ObservabilityPipelineOpenSearchDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java new file mode 100644 index 00000000000..4679da1d243 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessor.java @@ -0,0 +1,322 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The parse_grok processor extracts structured fields from unstructured log messages + * using Grok patterns. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_DISABLE_LIBRARY_RULES, + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_RULES, + ObservabilityPipelineParseGrokProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseGrokProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_DISABLE_LIBRARY_RULES = "disable_library_rules"; + private Boolean disableLibraryRules = false; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_RULES = "rules"; + private List rules = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineParseGrokProcessorType type = + ObservabilityPipelineParseGrokProcessorType.PARSE_GROK; + + public ObservabilityPipelineParseGrokProcessor() {} + + @JsonCreator + public ObservabilityPipelineParseGrokProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_RULES) + List rules, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineParseGrokProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.rules = rules; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineParseGrokProcessor disableLibraryRules(Boolean disableLibraryRules) { + this.disableLibraryRules = disableLibraryRules; + return this; + } + + /** + * If set to true, disables the default Grok rules provided by Datadog. + * + * @return disableLibraryRules + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_DISABLE_LIBRARY_RULES) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getDisableLibraryRules() { + return disableLibraryRules; + } + + public void setDisableLibraryRules(Boolean disableLibraryRules) { + this.disableLibraryRules = disableLibraryRules; + } + + public ObservabilityPipelineParseGrokProcessor id(String id) { + this.id = id; + return this; + } + + /** + * A unique identifier for this processor. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineParseGrokProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineParseGrokProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineParseGrokProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineParseGrokProcessor rules( + List rules) { + this.rules = rules; + for (ObservabilityPipelineParseGrokProcessorRule item : rules) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineParseGrokProcessor addRulesItem( + ObservabilityPipelineParseGrokProcessorRule rulesItem) { + this.rules.add(rulesItem); + this.unparsed |= rulesItem.unparsed; + return this; + } + + /** + * The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in + * order. The first successful match is applied. + * + * @return rules + */ + @JsonProperty(JSON_PROPERTY_RULES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getRules() { + return rules; + } + + public void setRules(List rules) { + this.rules = rules; + } + + public ObservabilityPipelineParseGrokProcessor type( + ObservabilityPipelineParseGrokProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be parse_grok. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineParseGrokProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineParseGrokProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseGrokProcessor + */ + @JsonAnySetter + public ObservabilityPipelineParseGrokProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseGrokProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseGrokProcessor observabilityPipelineParseGrokProcessor = + (ObservabilityPipelineParseGrokProcessor) o; + return Objects.equals( + this.disableLibraryRules, observabilityPipelineParseGrokProcessor.disableLibraryRules) + && Objects.equals(this.id, observabilityPipelineParseGrokProcessor.id) + && Objects.equals(this.include, observabilityPipelineParseGrokProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineParseGrokProcessor.inputs) + && Objects.equals(this.rules, observabilityPipelineParseGrokProcessor.rules) + && Objects.equals(this.type, observabilityPipelineParseGrokProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineParseGrokProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + disableLibraryRules, id, include, inputs, rules, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseGrokProcessor {\n"); + sb.append(" disableLibraryRules: ") + .append(toIndentedString(disableLibraryRules)) + .append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRule.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRule.java new file mode 100644 index 00000000000..9d42ccaa1f4 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRule.java @@ -0,0 +1,238 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * A Grok parsing rule used in the parse_grok processor. Each rule defines how to + * extract structured fields from a specific log field using Grok patterns. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseGrokProcessorRule.JSON_PROPERTY_MATCH_RULES, + ObservabilityPipelineParseGrokProcessorRule.JSON_PROPERTY_SOURCE, + ObservabilityPipelineParseGrokProcessorRule.JSON_PROPERTY_SUPPORT_RULES +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseGrokProcessorRule { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_MATCH_RULES = "match_rules"; + private List matchRules = new ArrayList<>(); + + public static final String JSON_PROPERTY_SOURCE = "source"; + private String source; + + public static final String JSON_PROPERTY_SUPPORT_RULES = "support_rules"; + private List supportRules = + new ArrayList<>(); + + public ObservabilityPipelineParseGrokProcessorRule() {} + + @JsonCreator + public ObservabilityPipelineParseGrokProcessorRule( + @JsonProperty(required = true, value = JSON_PROPERTY_MATCH_RULES) + List matchRules, + @JsonProperty(required = true, value = JSON_PROPERTY_SOURCE) String source, + @JsonProperty(required = true, value = JSON_PROPERTY_SUPPORT_RULES) + List supportRules) { + this.matchRules = matchRules; + this.source = source; + this.supportRules = supportRules; + } + + public ObservabilityPipelineParseGrokProcessorRule matchRules( + List matchRules) { + this.matchRules = matchRules; + for (ObservabilityPipelineParseGrokProcessorRuleMatchRule item : matchRules) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineParseGrokProcessorRule addMatchRulesItem( + ObservabilityPipelineParseGrokProcessorRuleMatchRule matchRulesItem) { + this.matchRules.add(matchRulesItem); + this.unparsed |= matchRulesItem.unparsed; + return this; + } + + /** + * A list of Grok parsing rules that define how to extract fields from the source field. Each rule + * must contain a name and a valid Grok pattern. + * + * @return matchRules + */ + @JsonProperty(JSON_PROPERTY_MATCH_RULES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMatchRules() { + return matchRules; + } + + public void setMatchRules(List matchRules) { + this.matchRules = matchRules; + } + + public ObservabilityPipelineParseGrokProcessorRule source(String source) { + this.source = source; + return this; + } + + /** + * The name of the field in the log event to apply the Grok rules to. + * + * @return source + */ + @JsonProperty(JSON_PROPERTY_SOURCE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public ObservabilityPipelineParseGrokProcessorRule supportRules( + List supportRules) { + this.supportRules = supportRules; + for (ObservabilityPipelineParseGrokProcessorRuleSupportRule item : supportRules) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineParseGrokProcessorRule addSupportRulesItem( + ObservabilityPipelineParseGrokProcessorRuleSupportRule supportRulesItem) { + this.supportRules.add(supportRulesItem); + this.unparsed |= supportRulesItem.unparsed; + return this; + } + + /** + * A list of Grok helper rules that can be referenced by the parsing rules. + * + * @return supportRules + */ + @JsonProperty(JSON_PROPERTY_SUPPORT_RULES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getSupportRules() { + return supportRules; + } + + public void setSupportRules( + List supportRules) { + this.supportRules = supportRules; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseGrokProcessorRule + */ + @JsonAnySetter + public ObservabilityPipelineParseGrokProcessorRule putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineParseGrokProcessorRule object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseGrokProcessorRule observabilityPipelineParseGrokProcessorRule = + (ObservabilityPipelineParseGrokProcessorRule) o; + return Objects.equals(this.matchRules, observabilityPipelineParseGrokProcessorRule.matchRules) + && Objects.equals(this.source, observabilityPipelineParseGrokProcessorRule.source) + && Objects.equals( + this.supportRules, observabilityPipelineParseGrokProcessorRule.supportRules) + && Objects.equals( + this.additionalProperties, + observabilityPipelineParseGrokProcessorRule.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(matchRules, source, supportRules, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseGrokProcessorRule {\n"); + sb.append(" matchRules: ").append(toIndentedString(matchRules)).append("\n"); + sb.append(" source: ").append(toIndentedString(source)).append("\n"); + sb.append(" supportRules: ").append(toIndentedString(supportRules)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleMatchRule.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleMatchRule.java new file mode 100644 index 00000000000..6b5706cdae1 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleMatchRule.java @@ -0,0 +1,184 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Defines a Grok parsing rule, which extracts structured fields from log content using named Grok + * patterns. Each rule must have a unique name and a valid Datadog Grok pattern that will be applied + * to the source field. + */ +@JsonPropertyOrder({ + ObservabilityPipelineParseGrokProcessorRuleMatchRule.JSON_PROPERTY_NAME, + ObservabilityPipelineParseGrokProcessorRuleMatchRule.JSON_PROPERTY_RULE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseGrokProcessorRuleMatchRule { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_RULE = "rule"; + private String rule; + + public ObservabilityPipelineParseGrokProcessorRuleMatchRule() {} + + @JsonCreator + public ObservabilityPipelineParseGrokProcessorRuleMatchRule( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_RULE) String rule) { + this.name = name; + this.rule = rule; + } + + public ObservabilityPipelineParseGrokProcessorRuleMatchRule name(String name) { + this.name = name; + return this; + } + + /** + * The name of the rule. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineParseGrokProcessorRuleMatchRule rule(String rule) { + this.rule = rule; + return this; + } + + /** + * The definition of the Grok rule. + * + * @return rule + */ + @JsonProperty(JSON_PROPERTY_RULE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRule() { + return rule; + } + + public void setRule(String rule) { + this.rule = rule; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseGrokProcessorRuleMatchRule + */ + @JsonAnySetter + public ObservabilityPipelineParseGrokProcessorRuleMatchRule putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineParseGrokProcessorRuleMatchRule object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseGrokProcessorRuleMatchRule + observabilityPipelineParseGrokProcessorRuleMatchRule = + (ObservabilityPipelineParseGrokProcessorRuleMatchRule) o; + return Objects.equals(this.name, observabilityPipelineParseGrokProcessorRuleMatchRule.name) + && Objects.equals(this.rule, observabilityPipelineParseGrokProcessorRuleMatchRule.rule) + && Objects.equals( + this.additionalProperties, + observabilityPipelineParseGrokProcessorRuleMatchRule.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, rule, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseGrokProcessorRuleMatchRule {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleSupportRule.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleSupportRule.java new file mode 100644 index 00000000000..c5d7bc4d94d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorRuleSupportRule.java @@ -0,0 +1,181 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The Grok helper rule referenced in the parsing rules. */ +@JsonPropertyOrder({ + ObservabilityPipelineParseGrokProcessorRuleSupportRule.JSON_PROPERTY_NAME, + ObservabilityPipelineParseGrokProcessorRuleSupportRule.JSON_PROPERTY_RULE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineParseGrokProcessorRuleSupportRule { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_RULE = "rule"; + private String rule; + + public ObservabilityPipelineParseGrokProcessorRuleSupportRule() {} + + @JsonCreator + public ObservabilityPipelineParseGrokProcessorRuleSupportRule( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_RULE) String rule) { + this.name = name; + this.rule = rule; + } + + public ObservabilityPipelineParseGrokProcessorRuleSupportRule name(String name) { + this.name = name; + return this; + } + + /** + * The name of the Grok helper rule. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineParseGrokProcessorRuleSupportRule rule(String rule) { + this.rule = rule; + return this; + } + + /** + * The definition of the Grok helper rule. + * + * @return rule + */ + @JsonProperty(JSON_PROPERTY_RULE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRule() { + return rule; + } + + public void setRule(String rule) { + this.rule = rule; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineParseGrokProcessorRuleSupportRule + */ + @JsonAnySetter + public ObservabilityPipelineParseGrokProcessorRuleSupportRule putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineParseGrokProcessorRuleSupportRule object is equal to + * o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineParseGrokProcessorRuleSupportRule + observabilityPipelineParseGrokProcessorRuleSupportRule = + (ObservabilityPipelineParseGrokProcessorRuleSupportRule) o; + return Objects.equals(this.name, observabilityPipelineParseGrokProcessorRuleSupportRule.name) + && Objects.equals(this.rule, observabilityPipelineParseGrokProcessorRuleSupportRule.rule) + && Objects.equals( + this.additionalProperties, + observabilityPipelineParseGrokProcessorRuleSupportRule.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, rule, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineParseGrokProcessorRuleSupportRule {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorType.java new file mode 100644 index 00000000000..381bddb9519 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineParseGrokProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be parse_grok. */ +@JsonSerialize( + using = + ObservabilityPipelineParseGrokProcessorType + .ObservabilityPipelineParseGrokProcessorTypeSerializer.class) +public class ObservabilityPipelineParseGrokProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("parse_grok")); + + public static final ObservabilityPipelineParseGrokProcessorType PARSE_GROK = + new ObservabilityPipelineParseGrokProcessorType("parse_grok"); + + ObservabilityPipelineParseGrokProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineParseGrokProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineParseGrokProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineParseGrokProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineParseGrokProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineParseGrokProcessorType fromValue(String value) { + return new ObservabilityPipelineParseGrokProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java index d9101e2ccfc..589a76b0604 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessor.java @@ -31,6 +31,7 @@ ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_INPUTS, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_LIMIT, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_NAME, + ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERFLOW_ACTION, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_OVERRIDES, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_PARTITION_FIELDS, ObservabilityPipelineQuotaProcessor.JSON_PROPERTY_TYPE @@ -61,6 +62,9 @@ public class ObservabilityPipelineQuotaProcessor { public static final String JSON_PROPERTY_NAME = "name"; private String name; + public static final String JSON_PROPERTY_OVERFLOW_ACTION = "overflow_action"; + private ObservabilityPipelineQuotaProcessorOverflowAction overflowAction; + public static final String JSON_PROPERTY_OVERRIDES = "overrides"; private List overrides = null; @@ -234,7 +238,7 @@ public ObservabilityPipelineQuotaProcessor name(String name) { } /** - * Name for identifying the processor. + * Name of the quota. * * @return name */ @@ -248,6 +252,34 @@ public void setName(String name) { this.name = name; } + public ObservabilityPipelineQuotaProcessor overflowAction( + ObservabilityPipelineQuotaProcessorOverflowAction overflowAction) { + this.overflowAction = overflowAction; + this.unparsed |= !overflowAction.isValid(); + return this; + } + + /** + * The action to take when the quota is exceeded. Options: - drop: Drop the event. - + * no_action: Let the event pass through. - overflow_routing: Route to + * an overflow destination. + * + * @return overflowAction + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_OVERFLOW_ACTION) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineQuotaProcessorOverflowAction getOverflowAction() { + return overflowAction; + } + + public void setOverflowAction(ObservabilityPipelineQuotaProcessorOverflowAction overflowAction) { + if (!overflowAction.isValid()) { + this.unparsed = true; + } + this.overflowAction = overflowAction; + } + public ObservabilityPipelineQuotaProcessor overrides( List overrides) { this.overrides = overrides; @@ -404,6 +436,7 @@ public boolean equals(Object o) { && Objects.equals(this.inputs, observabilityPipelineQuotaProcessor.inputs) && Objects.equals(this.limit, observabilityPipelineQuotaProcessor.limit) && Objects.equals(this.name, observabilityPipelineQuotaProcessor.name) + && Objects.equals(this.overflowAction, observabilityPipelineQuotaProcessor.overflowAction) && Objects.equals(this.overrides, observabilityPipelineQuotaProcessor.overrides) && Objects.equals(this.partitionFields, observabilityPipelineQuotaProcessor.partitionFields) && Objects.equals(this.type, observabilityPipelineQuotaProcessor.type) @@ -421,6 +454,7 @@ public int hashCode() { inputs, limit, name, + overflowAction, overrides, partitionFields, type, @@ -440,6 +474,7 @@ public String toString() { sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); sb.append(" limit: ").append(toIndentedString(limit)).append("\n"); sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" overflowAction: ").append(toIndentedString(overflowAction)).append("\n"); sb.append(" overrides: ").append(toIndentedString(overrides)).append("\n"); sb.append(" partitionFields: ").append(toIndentedString(partitionFields)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java new file mode 100644 index 00000000000..ddb04343a50 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineQuotaProcessorOverflowAction.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** + * The action to take when the quota is exceeded. Options: - drop: Drop the event. - + * no_action: Let the event pass through. - overflow_routing: Route to an + * overflow destination. + */ +@JsonSerialize( + using = + ObservabilityPipelineQuotaProcessorOverflowAction + .ObservabilityPipelineQuotaProcessorOverflowActionSerializer.class) +public class ObservabilityPipelineQuotaProcessorOverflowAction extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("drop", "no_action", "overflow_routing")); + + public static final ObservabilityPipelineQuotaProcessorOverflowAction DROP = + new ObservabilityPipelineQuotaProcessorOverflowAction("drop"); + public static final ObservabilityPipelineQuotaProcessorOverflowAction NO_ACTION = + new ObservabilityPipelineQuotaProcessorOverflowAction("no_action"); + public static final ObservabilityPipelineQuotaProcessorOverflowAction OVERFLOW_ROUTING = + new ObservabilityPipelineQuotaProcessorOverflowAction("overflow_routing"); + + ObservabilityPipelineQuotaProcessorOverflowAction(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineQuotaProcessorOverflowActionSerializer + extends StdSerializer { + public ObservabilityPipelineQuotaProcessorOverflowActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineQuotaProcessorOverflowActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineQuotaProcessorOverflowAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineQuotaProcessorOverflowAction fromValue(String value) { + return new ObservabilityPipelineQuotaProcessorOverflowAction(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java new file mode 100644 index 00000000000..ed70215b4b3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessor.java @@ -0,0 +1,324 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The reduce processor aggregates and merges logs based on matching keys and merge + * strategies. + */ +@JsonPropertyOrder({ + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_GROUP_BY, + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_MERGE_STRATEGIES, + ObservabilityPipelineReduceProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineReduceProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = new ArrayList<>(); + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_MERGE_STRATEGIES = "merge_strategies"; + private List mergeStrategies = + new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineReduceProcessorType type = + ObservabilityPipelineReduceProcessorType.REDUCE; + + public ObservabilityPipelineReduceProcessor() {} + + @JsonCreator + public ObservabilityPipelineReduceProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_GROUP_BY) List groupBy, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_MERGE_STRATEGIES) + List mergeStrategies, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineReduceProcessorType type) { + this.groupBy = groupBy; + this.id = id; + this.include = include; + this.inputs = inputs; + this.mergeStrategies = mergeStrategies; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineReduceProcessor groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineReduceProcessor addGroupByItem(String groupByItem) { + this.groupBy.add(groupByItem); + return this; + } + + /** + * A list of fields used to group log events for merging. + * + * @return groupBy + */ + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + + public ObservabilityPipelineReduceProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this processor. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineReduceProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineReduceProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineReduceProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineReduceProcessor mergeStrategies( + List mergeStrategies) { + this.mergeStrategies = mergeStrategies; + for (ObservabilityPipelineReduceProcessorMergeStrategy item : mergeStrategies) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineReduceProcessor addMergeStrategiesItem( + ObservabilityPipelineReduceProcessorMergeStrategy mergeStrategiesItem) { + this.mergeStrategies.add(mergeStrategiesItem); + this.unparsed |= mergeStrategiesItem.unparsed; + return this; + } + + /** + * List of merge strategies defining how values from grouped events should be combined. + * + * @return mergeStrategies + */ + @JsonProperty(JSON_PROPERTY_MERGE_STRATEGIES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMergeStrategies() { + return mergeStrategies; + } + + public void setMergeStrategies( + List mergeStrategies) { + this.mergeStrategies = mergeStrategies; + } + + public ObservabilityPipelineReduceProcessor type(ObservabilityPipelineReduceProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be reduce. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineReduceProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineReduceProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineReduceProcessor + */ + @JsonAnySetter + public ObservabilityPipelineReduceProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineReduceProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineReduceProcessor observabilityPipelineReduceProcessor = + (ObservabilityPipelineReduceProcessor) o; + return Objects.equals(this.groupBy, observabilityPipelineReduceProcessor.groupBy) + && Objects.equals(this.id, observabilityPipelineReduceProcessor.id) + && Objects.equals(this.include, observabilityPipelineReduceProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineReduceProcessor.inputs) + && Objects.equals( + this.mergeStrategies, observabilityPipelineReduceProcessor.mergeStrategies) + && Objects.equals(this.type, observabilityPipelineReduceProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineReduceProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(groupBy, id, include, inputs, mergeStrategies, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineReduceProcessor {\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" mergeStrategies: ").append(toIndentedString(mergeStrategies)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategy.java new file mode 100644 index 00000000000..64f910fff0e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategy.java @@ -0,0 +1,185 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines how a specific field should be merged across grouped events. */ +@JsonPropertyOrder({ + ObservabilityPipelineReduceProcessorMergeStrategy.JSON_PROPERTY_PATH, + ObservabilityPipelineReduceProcessorMergeStrategy.JSON_PROPERTY_STRATEGY +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineReduceProcessorMergeStrategy { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_PATH = "path"; + private String path; + + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineReduceProcessorMergeStrategyStrategy strategy; + + public ObservabilityPipelineReduceProcessorMergeStrategy() {} + + @JsonCreator + public ObservabilityPipelineReduceProcessorMergeStrategy( + @JsonProperty(required = true, value = JSON_PROPERTY_PATH) String path, + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineReduceProcessorMergeStrategyStrategy strategy) { + this.path = path; + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineReduceProcessorMergeStrategy path(String path) { + this.path = path; + return this; + } + + /** + * The field path in the log event. + * + * @return path + */ + @JsonProperty(JSON_PROPERTY_PATH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public ObservabilityPipelineReduceProcessorMergeStrategy strategy( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * The merge strategy to apply. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineReduceProcessorMergeStrategyStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineReduceProcessorMergeStrategyStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineReduceProcessorMergeStrategy + */ + @JsonAnySetter + public ObservabilityPipelineReduceProcessorMergeStrategy putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineReduceProcessorMergeStrategy object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineReduceProcessorMergeStrategy + observabilityPipelineReduceProcessorMergeStrategy = + (ObservabilityPipelineReduceProcessorMergeStrategy) o; + return Objects.equals(this.path, observabilityPipelineReduceProcessorMergeStrategy.path) + && Objects.equals(this.strategy, observabilityPipelineReduceProcessorMergeStrategy.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineReduceProcessorMergeStrategy.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(path, strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineReduceProcessorMergeStrategy {\n"); + sb.append(" path: ").append(toIndentedString(path)).append("\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategyStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategyStrategy.java new file mode 100644 index 00000000000..59ea0297894 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorMergeStrategyStrategy.java @@ -0,0 +1,98 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The merge strategy to apply. */ +@JsonSerialize( + using = + ObservabilityPipelineReduceProcessorMergeStrategyStrategy + .ObservabilityPipelineReduceProcessorMergeStrategyStrategySerializer.class) +public class ObservabilityPipelineReduceProcessorMergeStrategyStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "discard", + "retain", + "sum", + "max", + "min", + "array", + "concat", + "concat_newline", + "concat_raw", + "shortest_array", + "longest_array", + "flat_unique")); + + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy DISCARD = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("discard"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy RETAIN = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("retain"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy SUM = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("sum"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy MAX = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("max"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy MIN = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("min"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy ARRAY = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("array"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy CONCAT = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy CONCAT_NEWLINE = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat_newline"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy CONCAT_RAW = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat_raw"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy SHORTEST_ARRAY = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("shortest_array"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy LONGEST_ARRAY = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("longest_array"); + public static final ObservabilityPipelineReduceProcessorMergeStrategyStrategy FLAT_UNIQUE = + new ObservabilityPipelineReduceProcessorMergeStrategyStrategy("flat_unique"); + + ObservabilityPipelineReduceProcessorMergeStrategyStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineReduceProcessorMergeStrategyStrategySerializer + extends StdSerializer { + public ObservabilityPipelineReduceProcessorMergeStrategyStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineReduceProcessorMergeStrategyStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineReduceProcessorMergeStrategyStrategy fromValue(String value) { + return new ObservabilityPipelineReduceProcessorMergeStrategyStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorType.java new file mode 100644 index 00000000000..0013f4487f5 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineReduceProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be reduce. */ +@JsonSerialize( + using = + ObservabilityPipelineReduceProcessorType.ObservabilityPipelineReduceProcessorTypeSerializer + .class) +public class ObservabilityPipelineReduceProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("reduce")); + + public static final ObservabilityPipelineReduceProcessorType REDUCE = + new ObservabilityPipelineReduceProcessorType("reduce"); + + ObservabilityPipelineReduceProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineReduceProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineReduceProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineReduceProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineReduceProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineReduceProcessorType fromValue(String value) { + return new ObservabilityPipelineReduceProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java new file mode 100644 index 00000000000..3a2cd8d24a8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestination.java @@ -0,0 +1,277 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The rsyslog destination forwards logs to an external rsyslog server + * over TCP or UDP using the syslog protocol. + */ +@JsonPropertyOrder({ + ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_ID, + ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_KEEPALIVE, + ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineRsyslogDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineRsyslogDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEEPALIVE = "keepalive"; + private Long keepalive; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineRsyslogDestinationType type = + ObservabilityPipelineRsyslogDestinationType.RSYSLOG; + + public ObservabilityPipelineRsyslogDestination() {} + + @JsonCreator + public ObservabilityPipelineRsyslogDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineRsyslogDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineRsyslogDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineRsyslogDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineRsyslogDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineRsyslogDestination keepalive(Long keepalive) { + this.keepalive = keepalive; + return this; + } + + /** + * Optional socket keepalive duration in milliseconds. minimum: 0 + * + * @return keepalive + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEEPALIVE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getKeepalive() { + return keepalive; + } + + public void setKeepalive(Long keepalive) { + this.keepalive = keepalive; + } + + public ObservabilityPipelineRsyslogDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineRsyslogDestination type( + ObservabilityPipelineRsyslogDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be rsyslog. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineRsyslogDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineRsyslogDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineRsyslogDestination + */ + @JsonAnySetter + public ObservabilityPipelineRsyslogDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineRsyslogDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineRsyslogDestination observabilityPipelineRsyslogDestination = + (ObservabilityPipelineRsyslogDestination) o; + return Objects.equals(this.id, observabilityPipelineRsyslogDestination.id) + && Objects.equals(this.inputs, observabilityPipelineRsyslogDestination.inputs) + && Objects.equals(this.keepalive, observabilityPipelineRsyslogDestination.keepalive) + && Objects.equals(this.tls, observabilityPipelineRsyslogDestination.tls) + && Objects.equals(this.type, observabilityPipelineRsyslogDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineRsyslogDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, keepalive, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineRsyslogDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keepalive: ").append(toIndentedString(keepalive)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestinationType.java new file mode 100644 index 00000000000..ae603976d82 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be rsyslog. */ +@JsonSerialize( + using = + ObservabilityPipelineRsyslogDestinationType + .ObservabilityPipelineRsyslogDestinationTypeSerializer.class) +public class ObservabilityPipelineRsyslogDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("rsyslog")); + + public static final ObservabilityPipelineRsyslogDestinationType RSYSLOG = + new ObservabilityPipelineRsyslogDestinationType("rsyslog"); + + ObservabilityPipelineRsyslogDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineRsyslogDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineRsyslogDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineRsyslogDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineRsyslogDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineRsyslogDestinationType fromValue(String value) { + return new ObservabilityPipelineRsyslogDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java new file mode 100644 index 00000000000..ecf24cdb4f3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSource.java @@ -0,0 +1,248 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The rsyslog source listens for logs over TCP or UDP from an rsyslog + * server using the syslog protocol. + */ +@JsonPropertyOrder({ + ObservabilityPipelineRsyslogSource.JSON_PROPERTY_ID, + ObservabilityPipelineRsyslogSource.JSON_PROPERTY_MODE, + ObservabilityPipelineRsyslogSource.JSON_PROPERTY_TLS, + ObservabilityPipelineRsyslogSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineRsyslogSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_MODE = "mode"; + private ObservabilityPipelineSyslogSourceMode mode; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineRsyslogSourceType type = + ObservabilityPipelineRsyslogSourceType.RSYSLOG; + + public ObservabilityPipelineRsyslogSource() {} + + @JsonCreator + public ObservabilityPipelineRsyslogSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_MODE) + ObservabilityPipelineSyslogSourceMode mode, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineRsyslogSourceType type) { + this.id = id; + this.mode = mode; + this.unparsed |= !mode.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineRsyslogSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineRsyslogSource mode(ObservabilityPipelineSyslogSourceMode mode) { + this.mode = mode; + this.unparsed |= !mode.isValid(); + return this; + } + + /** + * Protocol used by the syslog source to receive messages. + * + * @return mode + */ + @JsonProperty(JSON_PROPERTY_MODE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSyslogSourceMode getMode() { + return mode; + } + + public void setMode(ObservabilityPipelineSyslogSourceMode mode) { + if (!mode.isValid()) { + this.unparsed = true; + } + this.mode = mode; + } + + public ObservabilityPipelineRsyslogSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineRsyslogSource type(ObservabilityPipelineRsyslogSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be rsyslog. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineRsyslogSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineRsyslogSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineRsyslogSource + */ + @JsonAnySetter + public ObservabilityPipelineRsyslogSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineRsyslogSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineRsyslogSource observabilityPipelineRsyslogSource = + (ObservabilityPipelineRsyslogSource) o; + return Objects.equals(this.id, observabilityPipelineRsyslogSource.id) + && Objects.equals(this.mode, observabilityPipelineRsyslogSource.mode) + && Objects.equals(this.tls, observabilityPipelineRsyslogSource.tls) + && Objects.equals(this.type, observabilityPipelineRsyslogSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineRsyslogSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, mode, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineRsyslogSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSourceType.java new file mode 100644 index 00000000000..9b71c382a5b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineRsyslogSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be rsyslog. */ +@JsonSerialize( + using = + ObservabilityPipelineRsyslogSourceType.ObservabilityPipelineRsyslogSourceTypeSerializer + .class) +public class ObservabilityPipelineRsyslogSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("rsyslog")); + + public static final ObservabilityPipelineRsyslogSourceType RSYSLOG = + new ObservabilityPipelineRsyslogSourceType("rsyslog"); + + ObservabilityPipelineRsyslogSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineRsyslogSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineRsyslogSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineRsyslogSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineRsyslogSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineRsyslogSourceType fromValue(String value) { + return new ObservabilityPipelineRsyslogSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java new file mode 100644 index 00000000000..c865cfaf352 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessor.java @@ -0,0 +1,300 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The sample processor allows probabilistic sampling of logs at a fixed rate. */ +@JsonPropertyOrder({ + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_PERCENTAGE, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_RATE, + ObservabilityPipelineSampleProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSampleProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_PERCENTAGE = "percentage"; + private Double percentage; + + public static final String JSON_PROPERTY_RATE = "rate"; + private Long rate; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSampleProcessorType type = + ObservabilityPipelineSampleProcessorType.SAMPLE; + + public ObservabilityPipelineSampleProcessor() {} + + @JsonCreator + public ObservabilityPipelineSampleProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSampleProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSampleProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (for example, as the input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSampleProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineSampleProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSampleProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSampleProcessor percentage(Double percentage) { + this.percentage = percentage; + return this; + } + + /** + * The percentage of logs to sample. + * + * @return percentage + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_PERCENTAGE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Double getPercentage() { + return percentage; + } + + public void setPercentage(Double percentage) { + this.percentage = percentage; + } + + public ObservabilityPipelineSampleProcessor rate(Long rate) { + this.rate = rate; + return this; + } + + /** + * Number of events to sample (1 in N). minimum: 1 + * + * @return rate + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_RATE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getRate() { + return rate; + } + + public void setRate(Long rate) { + this.rate = rate; + } + + public ObservabilityPipelineSampleProcessor type(ObservabilityPipelineSampleProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be sample. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSampleProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineSampleProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSampleProcessor + */ + @JsonAnySetter + public ObservabilityPipelineSampleProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSampleProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSampleProcessor observabilityPipelineSampleProcessor = + (ObservabilityPipelineSampleProcessor) o; + return Objects.equals(this.id, observabilityPipelineSampleProcessor.id) + && Objects.equals(this.include, observabilityPipelineSampleProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineSampleProcessor.inputs) + && Objects.equals(this.percentage, observabilityPipelineSampleProcessor.percentage) + && Objects.equals(this.rate, observabilityPipelineSampleProcessor.rate) + && Objects.equals(this.type, observabilityPipelineSampleProcessor.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSampleProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, percentage, rate, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSampleProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" percentage: ").append(toIndentedString(percentage)).append("\n"); + sb.append(" rate: ").append(toIndentedString(rate)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessorType.java new file mode 100644 index 00000000000..cc93cb749aa --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSampleProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be sample. */ +@JsonSerialize( + using = + ObservabilityPipelineSampleProcessorType.ObservabilityPipelineSampleProcessorTypeSerializer + .class) +public class ObservabilityPipelineSampleProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("sample")); + + public static final ObservabilityPipelineSampleProcessorType SAMPLE = + new ObservabilityPipelineSampleProcessorType("sample"); + + ObservabilityPipelineSampleProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSampleProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSampleProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSampleProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSampleProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSampleProcessorType fromValue(String value) { + return new ObservabilityPipelineSampleProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java new file mode 100644 index 00000000000..95be5eb4465 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessor.java @@ -0,0 +1,295 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The sensitive_data_scanner processor detects and optionally redacts sensitive data + * in log events. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_RULES, + ObservabilityPipelineSensitiveDataScannerProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_RULES = "rules"; + private List rules = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSensitiveDataScannerProcessorType type = + ObservabilityPipelineSensitiveDataScannerProcessorType.SENSITIVE_DATA_SCANNER; + + public ObservabilityPipelineSensitiveDataScannerProcessor() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_RULES) + List rules, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSensitiveDataScannerProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.rules = rules; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor rules( + List rules) { + this.rules = rules; + for (ObservabilityPipelineSensitiveDataScannerProcessorRule item : rules) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor addRulesItem( + ObservabilityPipelineSensitiveDataScannerProcessorRule rulesItem) { + this.rules.add(rulesItem); + this.unparsed |= rulesItem.unparsed; + return this; + } + + /** + * A list of rules for identifying and acting on sensitive data patterns. + * + * @return rules + */ + @JsonProperty(JSON_PROPERTY_RULES) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getRules() { + return rules; + } + + public void setRules(List rules) { + this.rules = rules; + } + + public ObservabilityPipelineSensitiveDataScannerProcessor type( + ObservabilityPipelineSensitiveDataScannerProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be sensitive_data_scanner. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineSensitiveDataScannerProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessor + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessor object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessor + observabilityPipelineSensitiveDataScannerProcessor = + (ObservabilityPipelineSensitiveDataScannerProcessor) o; + return Objects.equals(this.id, observabilityPipelineSensitiveDataScannerProcessor.id) + && Objects.equals(this.include, observabilityPipelineSensitiveDataScannerProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineSensitiveDataScannerProcessor.inputs) + && Objects.equals(this.rules, observabilityPipelineSensitiveDataScannerProcessor.rules) + && Objects.equals(this.type, observabilityPipelineSensitiveDataScannerProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, rules, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" rules: ").append(toIndentedString(rules)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorAction.java new file mode 100644 index 00000000000..daaff8a5f10 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorAction.java @@ -0,0 +1,473 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorAction + .ObservabilityPipelineSensitiveDataScannerProcessorActionDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorAction + .ObservabilityPipelineSensitiveDataScannerProcessorActionSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorAction + extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineSensitiveDataScannerProcessorAction.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineSensitiveDataScannerProcessorActionSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorActionDeserializer + extends StdDeserializer { + public ObservabilityPipelineSensitiveDataScannerProcessorActionDeserializer() { + this(ObservabilityPipelineSensitiveDataScannerProcessorAction.class); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorAction deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorActionRedact + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorActionRedact) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionRedact'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionRedact'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorActionHash + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorActionHash) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionHash'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionHash'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class + .equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + .class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class + .equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + .class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact) tmp) + .unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact'", + e); + } + + ObservabilityPipelineSensitiveDataScannerProcessorAction ret = + new ObservabilityPipelineSensitiveDataScannerProcessorAction(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorAction getNullValue( + DeserializationContext ctxt) throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), + "ObservabilityPipelineSensitiveDataScannerProcessorAction cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineSensitiveDataScannerProcessorAction() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorAction( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorAction( + ObservabilityPipelineSensitiveDataScannerProcessorActionHash o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorAction( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorActionRedact", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorActionHash", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact", + new GenericType< + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact>() {}); + JSON.registerDescendants( + ObservabilityPipelineSensitiveDataScannerProcessorAction.class, + Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineSensitiveDataScannerProcessorAction.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: + * ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + * ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + * ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorActionHash.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be" + + " ObservabilityPipelineSensitiveDataScannerProcessorActionRedact," + + " ObservabilityPipelineSensitiveDataScannerProcessorActionHash," + + " ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + * ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + * ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + * + * @return The actual instance (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + * ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + * ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorActionRedact`. If + * the actual instance is not `ObservabilityPipelineSensitiveDataScannerProcessorActionRedact`, + * the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorActionRedact` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorActionRedact` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact + getObservabilityPipelineSensitiveDataScannerProcessorActionRedact() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact) + super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorActionHash`. If + * the actual instance is not `ObservabilityPipelineSensitiveDataScannerProcessorActionHash`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorActionHash` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorActionHash` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash + getObservabilityPipelineSensitiveDataScannerProcessorActionHash() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorActionHash) super.getActualInstance(); + } + + /** + * Get the actual instance of + * `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact`. If the actual instance + * is not `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact`, the + * ClassCastException will be thrown. + * + * @return The actual instance of + * `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + getObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact) + super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHash.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHash.java new file mode 100644 index 00000000000..3968f93975d --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHash.java @@ -0,0 +1,191 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for hashing matched sensitive values. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorActionHash.JSON_PROPERTY_ACTION, + ObservabilityPipelineSensitiveDataScannerProcessorActionHash.JSON_PROPERTY_OPTIONS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorActionHash { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACTION = "action"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction action; + + public static final String JSON_PROPERTY_OPTIONS = "options"; + private Object options; + + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash( + @JsonProperty(required = true, value = JSON_PROPERTY_ACTION) + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction action) { + this.action = action; + this.unparsed |= !action.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash action( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction action) { + this.action = action; + this.unparsed |= !action.isValid(); + return this; + } + + /** + * Action type that replaces the matched sensitive data with a hashed representation, preserving + * structure while securing content. + * + * @return action + */ + @JsonProperty(JSON_PROPERTY_ACTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction getAction() { + return action; + } + + public void setAction(ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction action) { + if (!action.isValid()) { + this.unparsed = true; + } + this.action = action; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash options(Object options) { + this.options = options; + return this; + } + + /** + * The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options + * . + * + * @return options + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Object getOptions() { + return options; + } + + public void setOptions(Object options) { + this.options = options; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorActionHash + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorActionHash putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorActionHash object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorActionHash + observabilityPipelineSensitiveDataScannerProcessorActionHash = + (ObservabilityPipelineSensitiveDataScannerProcessorActionHash) o; + return Objects.equals( + this.action, observabilityPipelineSensitiveDataScannerProcessorActionHash.action) + && Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorActionHash.options) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorActionHash.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(action, options, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorActionHash {\n"); + sb.append(" action: ").append(toIndentedString(action)).append("\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction.java new file mode 100644 index 00000000000..3cfd54dc147 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** + * Action type that replaces the matched sensitive data with a hashed representation, preserving + * structure while securing content. + */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction + .ObservabilityPipelineSensitiveDataScannerProcessorActionHashActionSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("hash")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction HASH = + new ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction("hash"); + + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorActionHashActionSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorActionHashActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionHashActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.java new file mode 100644 index 00000000000..2a89fbc1f66 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.java @@ -0,0 +1,200 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for partially redacting matched sensitive data. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.JSON_PROPERTY_ACTION, + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.JSON_PROPERTY_OPTIONS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACTION = "action"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction action; + + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions options; + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact( + @JsonProperty(required = true, value = JSON_PROPERTY_ACTION) + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction action, + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions options) { + this.action = action; + this.unparsed |= !action.isValid(); + this.options = options; + this.unparsed |= options.unparsed; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact action( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction action) { + this.action = action; + this.unparsed |= !action.isValid(); + return this; + } + + /** + * Action type that redacts part of the sensitive data while preserving a configurable number of + * characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). + * + * @return action + */ + @JsonProperty(JSON_PROPERTY_ACTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction getAction() { + return action; + } + + public void setAction( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction action) { + if (!action.isValid()) { + this.unparsed = true; + } + this.action = action; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact options( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Controls how partial redaction is applied, including character count and direction. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions getOptions() { + return options; + } + + public void setOptions( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions options) { + this.options = options; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + * object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedact = + (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact) o; + return Objects.equals( + this.action, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.action) + && Objects.equals( + this.options, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedact.options) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedact + .additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(action, options, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact {\n"); + sb.append(" action: ").append(toIndentedString(action)).append("\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction.java new file mode 100644 index 00000000000..db7248cad3e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction.java @@ -0,0 +1,73 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** + * Action type that redacts part of the sensitive data while preserving a configurable number of + * characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). + */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction + .ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactActionSerializer + .class) +public class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("partial_redact")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction + PARTIAL_REDACT = + new ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction( + "partial_redact"); + + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction(String value) { + super(value, allowedValues); + } + + public static + class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactActionSerializer + extends StdSerializer< + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction> { + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction + fromValue(String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions.java new file mode 100644 index 00000000000..657b182e322 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions.java @@ -0,0 +1,205 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Controls how partial redaction is applied, including character count and direction. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + .JSON_PROPERTY_CHARACTERS, + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + .JSON_PROPERTY_DIRECTION +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CHARACTERS = "characters"; + private Long characters; + + public static final String JSON_PROPERTY_DIRECTION = "direction"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + direction; + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_CHARACTERS) Long characters, + @JsonProperty(required = true, value = JSON_PROPERTY_DIRECTION) + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + direction) { + this.characters = characters; + this.direction = direction; + this.unparsed |= !direction.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters( + Long characters) { + this.characters = characters; + return this; + } + + /** + * The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + * characters. + * + * @return characters + */ + @JsonProperty(JSON_PROPERTY_CHARACTERS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Long getCharacters() { + return characters; + } + + public void setCharacters(Long characters) { + this.characters = characters; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions direction( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + direction) { + this.direction = direction; + this.unparsed |= !direction.isValid(); + return this; + } + + /** + * Indicates whether to redact characters from the first or last part of the matched value. + * + * @return direction + */ + @JsonProperty(JSON_PROPERTY_DIRECTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + getDirection() { + return direction; + } + + public void setDirection( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + direction) { + if (!direction.isValid()) { + this.unparsed = true; + } + this.direction = direction; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this + * ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions object is equal to + * o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions) o; + return Objects.equals( + this.characters, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions.characters) + && Objects.equals( + this.direction, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions.direction) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + .additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(characters, direction, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append( + "class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions {\n"); + sb.append(" characters: ").append(toIndentedString(characters)).append("\n"); + sb.append(" direction: ").append(toIndentedString(direction)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection.java new file mode 100644 index 00000000000..5b4c2928389 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection.java @@ -0,0 +1,80 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Indicates whether to redact characters from the first or last part of the matched value. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + .ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirectionSerializer + .class) +public class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("first", "last")); + + public static final + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection FIRST = + new ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection( + "first"); + public static final + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection LAST = + new ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection( + "last"); + + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection( + String value) { + super(value, allowedValues); + } + + public static + class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirectionSerializer + extends StdSerializer< + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection> { + public + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirectionSerializer( + Class + t) { + super(t); + } + + public + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirectionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection( + value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.java new file mode 100644 index 00000000000..8b744ae8182 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.java @@ -0,0 +1,197 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for completely redacting matched sensitive data. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.JSON_PROPERTY_ACTION, + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact.JSON_PROPERTY_OPTIONS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorActionRedact { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACTION = "action"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction action; + + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions options; + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact( + @JsonProperty(required = true, value = JSON_PROPERTY_ACTION) + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction action, + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions options) { + this.action = action; + this.unparsed |= !action.isValid(); + this.options = options; + this.unparsed |= options.unparsed; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact action( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction action) { + this.action = action; + this.unparsed |= !action.isValid(); + return this; + } + + /** + * Action type that completely replaces the matched sensitive data with a fixed replacement string + * to remove all visibility. + * + * @return action + */ + @JsonProperty(JSON_PROPERTY_ACTION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction getAction() { + return action; + } + + public void setAction( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction action) { + if (!action.isValid()) { + this.unparsed = true; + } + this.action = action; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact options( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Configuration for fully redacting sensitive data. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions getOptions() { + return options; + } + + public void setOptions( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions options) { + this.options = options; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorActionRedact + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedact putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorActionRedact object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact + observabilityPipelineSensitiveDataScannerProcessorActionRedact = + (ObservabilityPipelineSensitiveDataScannerProcessorActionRedact) o; + return Objects.equals( + this.action, observabilityPipelineSensitiveDataScannerProcessorActionRedact.action) + && Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorActionRedact.options) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorActionRedact.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(action, options, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorActionRedact {\n"); + sb.append(" action: ").append(toIndentedString(action)).append("\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction.java new file mode 100644 index 00000000000..ad333668f92 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** + * Action type that completely replaces the matched sensitive data with a fixed replacement string + * to remove all visibility. + */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction + .ObservabilityPipelineSensitiveDataScannerProcessorActionRedactActionSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("redact")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction REDACT = + new ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction("redact"); + + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactActionSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactActionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactActionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions.java new file mode 100644 index 00000000000..3e0ad6654b0 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions.java @@ -0,0 +1,158 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Configuration for fully redacting sensitive data. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions.JSON_PROPERTY_REPLACE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_REPLACE = "replace"; + private String replace; + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_REPLACE) String replace) { + this.replace = replace; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace( + String replace) { + this.replace = replace; + return this; + } + + /** + * The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + * replace. + * + * @return replace + */ + @JsonProperty(JSON_PROPERTY_REPLACE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getReplace() { + return replace; + } + + public void setReplace(String replace) { + this.replace = replace; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + * object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + observabilityPipelineSensitiveDataScannerProcessorActionRedactOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions) o; + return Objects.equals( + this.replace, + observabilityPipelineSensitiveDataScannerProcessorActionRedactOptions.replace) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + .additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(replace, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions {\n"); + sb.append(" replace: ").append(toIndentedString(replace)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.java new file mode 100644 index 00000000000..629fa895e61 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.java @@ -0,0 +1,195 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Defines a custom regex-based pattern for identifying sensitive data in logs. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.JSON_PROPERTY_OPTIONS, + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions options; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType type; + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern( + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions options, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType type) { + this.options = options; + this.unparsed |= options.unparsed; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern options( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Options for defining a custom regex pattern. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions getOptions() { + return options; + } + + public void setOptions( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions options) { + this.options = options; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern type( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * Indicates a custom regular expression is used for matching. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType getType() { + return type; + } + + public void setType(ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern + observabilityPipelineSensitiveDataScannerProcessorCustomPattern = + (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern) o; + return Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorCustomPattern.options) + && Objects.equals( + this.type, observabilityPipelineSensitiveDataScannerProcessorCustomPattern.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorCustomPattern.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(options, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern {\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java new file mode 100644 index 00000000000..e74c5ffd699 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.java @@ -0,0 +1,155 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Options for defining a custom regex pattern. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.JSON_PROPERTY_RULE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_RULE = "rule"; + private String rule; + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_RULE) String rule) { + this.rule = rule; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions rule(String rule) { + this.rule = rule; + return this; + } + + /** + * A regular expression used to detect sensitive values. Must be a valid regex. + * + * @return rule + */ + @JsonProperty(JSON_PROPERTY_RULE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRule() { + return rule; + } + + public void setRule(String rule) { + this.rule = rule; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + * object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions) o; + return Objects.equals( + this.rule, observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions.rule) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + .additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(rule, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions {\n"); + sb.append(" rule: ").append(toIndentedString(rule)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType.java new file mode 100644 index 00000000000..986deec9a33 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Indicates a custom regular expression is used for matching. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType + .ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternTypeSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("custom")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType CUSTOM = + new ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType("custom"); + + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions.java new file mode 100644 index 00000000000..cc308046f9c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions.java @@ -0,0 +1,195 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Configuration for keywords used to reinforce sensitive data pattern detection. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions.JSON_PROPERTY_KEYWORDS, + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions.JSON_PROPERTY_PROXIMITY +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_KEYWORDS = "keywords"; + private List keywords = new ArrayList<>(); + + public static final String JSON_PROPERTY_PROXIMITY = "proximity"; + private Long proximity; + + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_KEYWORDS) List keywords, + @JsonProperty(required = true, value = JSON_PROPERTY_PROXIMITY) Long proximity) { + this.keywords = keywords; + this.proximity = proximity; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions keywords( + List keywords) { + this.keywords = keywords; + return this; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions addKeywordsItem( + String keywordsItem) { + this.keywords.add(keywordsItem); + return this; + } + + /** + * A list of keywords to match near the sensitive pattern. + * + * @return keywords + */ + @JsonProperty(JSON_PROPERTY_KEYWORDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getKeywords() { + return keywords; + } + + public void setKeywords(List keywords) { + this.keywords = keywords; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions proximity( + Long proximity) { + this.proximity = proximity; + return this; + } + + /** + * Maximum number of tokens between a keyword and a sensitive value match. + * + * @return proximity + */ + @JsonProperty(JSON_PROPERTY_PROXIMITY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Long getProximity() { + return proximity; + } + + public void setProximity(Long proximity) { + this.proximity = proximity; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions + observabilityPipelineSensitiveDataScannerProcessorKeywordOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions) o; + return Objects.equals( + this.keywords, + observabilityPipelineSensitiveDataScannerProcessorKeywordOptions.keywords) + && Objects.equals( + this.proximity, + observabilityPipelineSensitiveDataScannerProcessorKeywordOptions.proximity) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorKeywordOptions.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(keywords, proximity, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions {\n"); + sb.append(" keywords: ").append(toIndentedString(keywords)).append("\n"); + sb.append(" proximity: ").append(toIndentedString(proximity)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.java new file mode 100644 index 00000000000..422fd5d2b3c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.java @@ -0,0 +1,198 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data + * types. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.JSON_PROPERTY_OPTIONS, + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions options; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType type; + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern( + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions options, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType type) { + this.options = options; + this.unparsed |= options.unparsed; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern options( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Options for selecting a predefined library pattern and enabling keyword support. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions getOptions() { + return options; + } + + public void setOptions( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions options) { + this.options = options; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern type( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * Indicates that a predefined library pattern is used. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType getType() { + return type; + } + + public void setType(ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + observabilityPipelineSensitiveDataScannerProcessorLibraryPattern = + (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern) o; + return Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorLibraryPattern.options) + && Objects.equals( + this.type, observabilityPipelineSensitiveDataScannerProcessorLibraryPattern.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorLibraryPattern.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(options, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern {\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java new file mode 100644 index 00000000000..76f9882783c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.java @@ -0,0 +1,189 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Options for selecting a predefined library pattern and enabling keyword support. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.JSON_PROPERTY_ID, + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + .JSON_PROPERTY_USE_RECOMMENDED_KEYWORDS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_USE_RECOMMENDED_KEYWORDS = "use_recommended_keywords"; + private Boolean useRecommendedKeywords; + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id) { + this.id = id; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions id(String id) { + this.id = id; + return this; + } + + /** + * Identifier for a predefined pattern from the sensitive data scanner pattern library. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + useRecommendedKeywords(Boolean useRecommendedKeywords) { + this.useRecommendedKeywords = useRecommendedKeywords; + return this; + } + + /** + * Whether to augment the pattern with recommended keywords (optional). + * + * @return useRecommendedKeywords + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_USE_RECOMMENDED_KEYWORDS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getUseRecommendedKeywords() { + return useRecommendedKeywords; + } + + public void setUseRecommendedKeywords(Boolean useRecommendedKeywords) { + this.useRecommendedKeywords = useRecommendedKeywords; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + * object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions) o; + return Objects.equals( + this.id, observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions.id) + && Objects.equals( + this.useRecommendedKeywords, + observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + .useRecommendedKeywords) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + .additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, useRecommendedKeywords, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" useRecommendedKeywords: ") + .append(toIndentedString(useRecommendedKeywords)) + .append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType.java new file mode 100644 index 00000000000..457dcb0283b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Indicates that a predefined library pattern is used. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType + .ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternTypeSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("library")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType LIBRARY = + new ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType("library"); + + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorPattern.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorPattern.java new file mode 100644 index 00000000000..4124d1ff91a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorPattern.java @@ -0,0 +1,373 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorPattern + .ObservabilityPipelineSensitiveDataScannerProcessorPatternDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorPattern + .ObservabilityPipelineSensitiveDataScannerProcessorPatternSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorPattern + extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineSensitiveDataScannerProcessorPattern.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineSensitiveDataScannerProcessorPatternSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorPatternSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorPatternSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorPattern value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorPatternDeserializer + extends StdDeserializer { + public ObservabilityPipelineSensitiveDataScannerProcessorPatternDeserializer() { + this(ObservabilityPipelineSensitiveDataScannerProcessorPattern.class); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorPatternDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorPattern deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern'", + e); + } + + ObservabilityPipelineSensitiveDataScannerProcessorPattern ret = + new ObservabilityPipelineSensitiveDataScannerProcessorPattern(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorPattern getNullValue( + DeserializationContext ctxt) throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), + "ObservabilityPipelineSensitiveDataScannerProcessorPattern cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineSensitiveDataScannerProcessorPattern() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorPattern( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorPattern( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineSensitiveDataScannerProcessorPattern.class, + Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineSensitiveDataScannerProcessorPattern.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: + * ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + * ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be" + + " ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern," + + " ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + * ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + * + * @return The actual instance (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + * ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern`. + * If the actual instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern`, the ClassCastException will + * be thrown. + * + * @return The actual instance of + * `ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern + getObservabilityPipelineSensitiveDataScannerProcessorCustomPattern() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern) + super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern`. + * If the actual instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern`, the ClassCastException will + * be thrown. + * + * @return The actual instance of + * `ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern + getObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern) + super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorRule.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorRule.java new file mode 100644 index 00000000000..a9b3f4635b9 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorRule.java @@ -0,0 +1,322 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Defines a rule for detecting sensitive data, including matching pattern, scope, and the action to + * take. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_KEYWORD_OPTIONS, + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_NAME, + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_ON_MATCH, + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_PATTERN, + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_SCOPE, + ObservabilityPipelineSensitiveDataScannerProcessorRule.JSON_PROPERTY_TAGS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorRule { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_KEYWORD_OPTIONS = "keyword_options"; + private ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions keywordOptions; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_ON_MATCH = "on_match"; + private ObservabilityPipelineSensitiveDataScannerProcessorAction onMatch; + + public static final String JSON_PROPERTY_PATTERN = "pattern"; + private ObservabilityPipelineSensitiveDataScannerProcessorPattern pattern; + + public static final String JSON_PROPERTY_SCOPE = "scope"; + private ObservabilityPipelineSensitiveDataScannerProcessorScope scope; + + public static final String JSON_PROPERTY_TAGS = "tags"; + private List tags = new ArrayList<>(); + + public ObservabilityPipelineSensitiveDataScannerProcessorRule() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorRule( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_ON_MATCH) + ObservabilityPipelineSensitiveDataScannerProcessorAction onMatch, + @JsonProperty(required = true, value = JSON_PROPERTY_PATTERN) + ObservabilityPipelineSensitiveDataScannerProcessorPattern pattern, + @JsonProperty(required = true, value = JSON_PROPERTY_SCOPE) + ObservabilityPipelineSensitiveDataScannerProcessorScope scope, + @JsonProperty(required = true, value = JSON_PROPERTY_TAGS) List tags) { + this.name = name; + this.onMatch = onMatch; + this.unparsed |= onMatch.unparsed; + this.pattern = pattern; + this.unparsed |= pattern.unparsed; + this.scope = scope; + this.unparsed |= scope.unparsed; + this.tags = tags; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule keywordOptions( + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions keywordOptions) { + this.keywordOptions = keywordOptions; + this.unparsed |= keywordOptions.unparsed; + return this; + } + + /** + * Configuration for keywords used to reinforce sensitive data pattern detection. + * + * @return keywordOptions + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEYWORD_OPTIONS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions getKeywordOptions() { + return keywordOptions; + } + + public void setKeywordOptions( + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions keywordOptions) { + this.keywordOptions = keywordOptions; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule name(String name) { + this.name = name; + return this; + } + + /** + * A name identifying the rule. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule onMatch( + ObservabilityPipelineSensitiveDataScannerProcessorAction onMatch) { + this.onMatch = onMatch; + this.unparsed |= onMatch.unparsed; + return this; + } + + /** + * Defines what action to take when sensitive data is matched. + * + * @return onMatch + */ + @JsonProperty(JSON_PROPERTY_ON_MATCH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorAction getOnMatch() { + return onMatch; + } + + public void setOnMatch(ObservabilityPipelineSensitiveDataScannerProcessorAction onMatch) { + this.onMatch = onMatch; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule pattern( + ObservabilityPipelineSensitiveDataScannerProcessorPattern pattern) { + this.pattern = pattern; + this.unparsed |= pattern.unparsed; + return this; + } + + /** + * Pattern detection configuration for identifying sensitive data using either a custom regex or a + * library reference. + * + * @return pattern + */ + @JsonProperty(JSON_PROPERTY_PATTERN) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorPattern getPattern() { + return pattern; + } + + public void setPattern(ObservabilityPipelineSensitiveDataScannerProcessorPattern pattern) { + this.pattern = pattern; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule scope( + ObservabilityPipelineSensitiveDataScannerProcessorScope scope) { + this.scope = scope; + this.unparsed |= scope.unparsed; + return this; + } + + /** + * Determines which parts of the log the pattern-matching rule should be applied to. + * + * @return scope + */ + @JsonProperty(JSON_PROPERTY_SCOPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScope getScope() { + return scope; + } + + public void setScope(ObservabilityPipelineSensitiveDataScannerProcessorScope scope) { + this.scope = scope; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule tags(List tags) { + this.tags = tags; + return this; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorRule addTagsItem(String tagsItem) { + this.tags.add(tagsItem); + return this; + } + + /** + * Tags assigned to this rule for filtering and classification. + * + * @return tags + */ + @JsonProperty(JSON_PROPERTY_TAGS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getTags() { + return tags; + } + + public void setTags(List tags) { + this.tags = tags; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorRule + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorRule putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorRule object is equal to + * o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorRule + observabilityPipelineSensitiveDataScannerProcessorRule = + (ObservabilityPipelineSensitiveDataScannerProcessorRule) o; + return Objects.equals( + this.keywordOptions, + observabilityPipelineSensitiveDataScannerProcessorRule.keywordOptions) + && Objects.equals(this.name, observabilityPipelineSensitiveDataScannerProcessorRule.name) + && Objects.equals( + this.onMatch, observabilityPipelineSensitiveDataScannerProcessorRule.onMatch) + && Objects.equals( + this.pattern, observabilityPipelineSensitiveDataScannerProcessorRule.pattern) + && Objects.equals(this.scope, observabilityPipelineSensitiveDataScannerProcessorRule.scope) + && Objects.equals(this.tags, observabilityPipelineSensitiveDataScannerProcessorRule.tags) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorRule.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(keywordOptions, name, onMatch, pattern, scope, tags, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorRule {\n"); + sb.append(" keywordOptions: ").append(toIndentedString(keywordOptions)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" onMatch: ").append(toIndentedString(onMatch)).append("\n"); + sb.append(" pattern: ").append(toIndentedString(pattern)).append("\n"); + sb.append(" scope: ").append(toIndentedString(scope)).append("\n"); + sb.append(" tags: ").append(toIndentedString(tags)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScope.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScope.java new file mode 100644 index 00000000000..26e0e2050f6 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScope.java @@ -0,0 +1,464 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorScope + .ObservabilityPipelineSensitiveDataScannerProcessorScopeDeserializer.class) +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorScope + .ObservabilityPipelineSensitiveDataScannerProcessorScopeSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorScope extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineSensitiveDataScannerProcessorScope.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineSensitiveDataScannerProcessorScopeSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorScopeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorScope value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorScopeDeserializer + extends StdDeserializer { + public ObservabilityPipelineSensitiveDataScannerProcessorScopeDeserializer() { + this(ObservabilityPipelineSensitiveDataScannerProcessorScope.class); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorScope deserialize( + JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class + .equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class + .equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude'", + e); + } + + // deserialize ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals(Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals(Long.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals(Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals(Double.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Boolean.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Integer.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Float.class) + || ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class.equals( + String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSensitiveDataScannerProcessorScopeAll) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeAll'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineSensitiveDataScannerProcessorScopeAll'", + e); + } + + ObservabilityPipelineSensitiveDataScannerProcessorScope ret = + new ObservabilityPipelineSensitiveDataScannerProcessorScope(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineSensitiveDataScannerProcessorScope getNullValue( + DeserializationContext ctxt) throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), + "ObservabilityPipelineSensitiveDataScannerProcessorScope cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineSensitiveDataScannerProcessorScope() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScope( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScope( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScope( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSensitiveDataScannerProcessorScopeAll", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineSensitiveDataScannerProcessorScope.class, + Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineSensitiveDataScannerProcessorScope.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: + * ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be" + + " ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude," + + " ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude," + + " ObservabilityPipelineSensitiveDataScannerProcessorScopeAll"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + * + * @return The actual instance (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + * ObservabilityPipelineSensitiveDataScannerProcessorScopeAll) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude`. If + * the actual instance is not `ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude`, + * the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude + getObservabilityPipelineSensitiveDataScannerProcessorScopeInclude() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude) + super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude`. If + * the actual instance is not `ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude`, + * the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude + getObservabilityPipelineSensitiveDataScannerProcessorScopeExclude() + throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude) + super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeAll`. If the + * actual instance is not `ObservabilityPipelineSensitiveDataScannerProcessorScopeAll`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSensitiveDataScannerProcessorScopeAll` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineSensitiveDataScannerProcessorScopeAll` + */ + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + getObservabilityPipelineSensitiveDataScannerProcessorScopeAll() throws ClassCastException { + return (ObservabilityPipelineSensitiveDataScannerProcessorScopeAll) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.java new file mode 100644 index 00000000000..8551b7a18e8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.java @@ -0,0 +1,161 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Applies scanning across all available fields. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll.JSON_PROPERTY_TARGET +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeAll { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_TARGET = "target"; + private ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget target; + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAll() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAll( + @JsonProperty(required = true, value = JSON_PROPERTY_TARGET) + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget target) { + this.target = target; + this.unparsed |= !target.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAll target( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget target) { + this.target = target; + this.unparsed |= !target.isValid(); + return this; + } + + /** + * Applies the rule to all fields. + * + * @return target + */ + @JsonProperty(JSON_PROPERTY_TARGET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget getTarget() { + return target; + } + + public void setTarget(ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget target) { + if (!target.isValid()) { + this.unparsed = true; + } + this.target = target; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAll putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorScopeAll object is equal + * to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll + observabilityPipelineSensitiveDataScannerProcessorScopeAll = + (ObservabilityPipelineSensitiveDataScannerProcessorScopeAll) o; + return Objects.equals( + this.target, observabilityPipelineSensitiveDataScannerProcessorScopeAll.target) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorScopeAll.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(target, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorScopeAll {\n"); + sb.append(" target: ").append(toIndentedString(target)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget.java new file mode 100644 index 00000000000..de4a1a2f792 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Applies the rule to all fields. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget + .ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTargetSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("all")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget ALL = + new ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget("all"); + + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTargetSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTargetSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTargetSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.java new file mode 100644 index 00000000000..31d8611b50b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.java @@ -0,0 +1,195 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Excludes specific fields from sensitive data scanning. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.JSON_PROPERTY_OPTIONS, + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude.JSON_PROPERTY_TARGET +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options; + + public static final String JSON_PROPERTY_TARGET = "target"; + private ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget target; + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude( + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options, + @JsonProperty(required = true, value = JSON_PROPERTY_TARGET) + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget target) { + this.options = options; + this.unparsed |= options.unparsed; + this.target = target; + this.unparsed |= !target.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude options( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Fields to which the scope rule applies. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions getOptions() { + return options; + } + + public void setOptions(ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options) { + this.options = options; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude target( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget target) { + this.target = target; + this.unparsed |= !target.isValid(); + return this; + } + + /** + * Excludes specific fields from processing. + * + * @return target + */ + @JsonProperty(JSON_PROPERTY_TARGET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget getTarget() { + return target; + } + + public void setTarget( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget target) { + if (!target.isValid()) { + this.unparsed = true; + } + this.target = target; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude + observabilityPipelineSensitiveDataScannerProcessorScopeExclude = + (ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude) o; + return Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorScopeExclude.options) + && Objects.equals( + this.target, observabilityPipelineSensitiveDataScannerProcessorScopeExclude.target) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorScopeExclude.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(options, target, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude {\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" target: ").append(toIndentedString(target)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget.java new file mode 100644 index 00000000000..7453fa93f43 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Excludes specific fields from processing. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget + .ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTargetSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("exclude")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget EXCLUDE = + new ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget("exclude"); + + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTargetSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTargetSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTargetSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.java new file mode 100644 index 00000000000..033aca97a1e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.java @@ -0,0 +1,195 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Includes only specific fields for sensitive data scanning. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.JSON_PROPERTY_OPTIONS, + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude.JSON_PROPERTY_TARGET +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_OPTIONS = "options"; + private ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options; + + public static final String JSON_PROPERTY_TARGET = "target"; + private ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget target; + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude( + @JsonProperty(required = true, value = JSON_PROPERTY_OPTIONS) + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options, + @JsonProperty(required = true, value = JSON_PROPERTY_TARGET) + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget target) { + this.options = options; + this.unparsed |= options.unparsed; + this.target = target; + this.unparsed |= !target.isValid(); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude options( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options) { + this.options = options; + this.unparsed |= options.unparsed; + return this; + } + + /** + * Fields to which the scope rule applies. + * + * @return options + */ + @JsonProperty(JSON_PROPERTY_OPTIONS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions getOptions() { + return options; + } + + public void setOptions(ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions options) { + this.options = options; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude target( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget target) { + this.target = target; + this.unparsed |= !target.isValid(); + return this; + } + + /** + * Applies the rule only to included fields. + * + * @return target + */ + @JsonProperty(JSON_PROPERTY_TARGET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget getTarget() { + return target; + } + + public void setTarget( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget target) { + if (!target.isValid()) { + this.unparsed = true; + } + this.target = target; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude + observabilityPipelineSensitiveDataScannerProcessorScopeInclude = + (ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude) o; + return Objects.equals( + this.options, observabilityPipelineSensitiveDataScannerProcessorScopeInclude.options) + && Objects.equals( + this.target, observabilityPipelineSensitiveDataScannerProcessorScopeInclude.target) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorScopeInclude.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(options, target, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude {\n"); + sb.append(" options: ").append(toIndentedString(options)).append("\n"); + sb.append(" target: ").append(toIndentedString(target)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget.java new file mode 100644 index 00000000000..d1f5cb524c9 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Applies the rule only to included fields. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget + .ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTargetSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget + extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("include")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget INCLUDE = + new ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget("include"); + + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTargetSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTargetSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTargetSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget fromValue( + String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions.java new file mode 100644 index 00000000000..38dd5fa88db --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions.java @@ -0,0 +1,164 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Fields to which the scope rule applies. */ +@JsonPropertyOrder({ + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions.JSON_PROPERTY_FIELDS +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELDS = "fields"; + private List fields = new ArrayList<>(); + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions() {} + + @JsonCreator + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELDS) List fields) { + this.fields = fields; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields( + List fields) { + this.fields = fields; + return this; + } + + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions addFieldsItem( + String fieldsItem) { + this.fields.add(fieldsItem); + return this; + } + + /** + * The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields + * . + * + * @return fields + */ + @JsonProperty(JSON_PROPERTY_FIELDS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getFields() { + return fields; + } + + public void setFields(List fields) { + this.fields = fields; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions + */ + @JsonAnySetter + public ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions + observabilityPipelineSensitiveDataScannerProcessorScopeOptions = + (ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions) o; + return Objects.equals( + this.fields, observabilityPipelineSensitiveDataScannerProcessorScopeOptions.fields) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSensitiveDataScannerProcessorScopeOptions.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(fields, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions {\n"); + sb.append(" fields: ").append(toIndentedString(fields)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorType.java new file mode 100644 index 00000000000..a4b05f84e75 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSensitiveDataScannerProcessorType.java @@ -0,0 +1,64 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be sensitive_data_scanner. */ +@JsonSerialize( + using = + ObservabilityPipelineSensitiveDataScannerProcessorType + .ObservabilityPipelineSensitiveDataScannerProcessorTypeSerializer.class) +public class ObservabilityPipelineSensitiveDataScannerProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("sensitive_data_scanner")); + + public static final ObservabilityPipelineSensitiveDataScannerProcessorType + SENSITIVE_DATA_SCANNER = + new ObservabilityPipelineSensitiveDataScannerProcessorType("sensitive_data_scanner"); + + ObservabilityPipelineSensitiveDataScannerProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSensitiveDataScannerProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSensitiveDataScannerProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSensitiveDataScannerProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSensitiveDataScannerProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSensitiveDataScannerProcessorType fromValue(String value) { + return new ObservabilityPipelineSensitiveDataScannerProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java new file mode 100644 index 00000000000..01f5acac7ee --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestination.java @@ -0,0 +1,255 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The sentinel_one destination sends logs to SentinelOne. */ +@JsonPropertyOrder({ + ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_ID, + ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_REGION, + ObservabilityPipelineSentinelOneDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSentinelOneDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_REGION = "region"; + private ObservabilityPipelineSentinelOneDestinationRegion region; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSentinelOneDestinationType type = + ObservabilityPipelineSentinelOneDestinationType.SENTINEL_ONE; + + public ObservabilityPipelineSentinelOneDestination() {} + + @JsonCreator + public ObservabilityPipelineSentinelOneDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) + ObservabilityPipelineSentinelOneDestinationRegion region, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSentinelOneDestinationType type) { + this.id = id; + this.inputs = inputs; + this.region = region; + this.unparsed |= !region.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSentinelOneDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSentinelOneDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSentinelOneDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSentinelOneDestination region( + ObservabilityPipelineSentinelOneDestinationRegion region) { + this.region = region; + this.unparsed |= !region.isValid(); + return this; + } + + /** + * The SentinelOne region to send logs to. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSentinelOneDestinationRegion getRegion() { + return region; + } + + public void setRegion(ObservabilityPipelineSentinelOneDestinationRegion region) { + if (!region.isValid()) { + this.unparsed = true; + } + this.region = region; + } + + public ObservabilityPipelineSentinelOneDestination type( + ObservabilityPipelineSentinelOneDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be sentinel_one. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSentinelOneDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineSentinelOneDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSentinelOneDestination + */ + @JsonAnySetter + public ObservabilityPipelineSentinelOneDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSentinelOneDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSentinelOneDestination observabilityPipelineSentinelOneDestination = + (ObservabilityPipelineSentinelOneDestination) o; + return Objects.equals(this.id, observabilityPipelineSentinelOneDestination.id) + && Objects.equals(this.inputs, observabilityPipelineSentinelOneDestination.inputs) + && Objects.equals(this.region, observabilityPipelineSentinelOneDestination.region) + && Objects.equals(this.type, observabilityPipelineSentinelOneDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSentinelOneDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, region, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSentinelOneDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationRegion.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationRegion.java new file mode 100644 index 00000000000..a63286f6e06 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationRegion.java @@ -0,0 +1,69 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The SentinelOne region to send logs to. */ +@JsonSerialize( + using = + ObservabilityPipelineSentinelOneDestinationRegion + .ObservabilityPipelineSentinelOneDestinationRegionSerializer.class) +public class ObservabilityPipelineSentinelOneDestinationRegion extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("us", "eu", "ca", "data_set_us")); + + public static final ObservabilityPipelineSentinelOneDestinationRegion US = + new ObservabilityPipelineSentinelOneDestinationRegion("us"); + public static final ObservabilityPipelineSentinelOneDestinationRegion EU = + new ObservabilityPipelineSentinelOneDestinationRegion("eu"); + public static final ObservabilityPipelineSentinelOneDestinationRegion CA = + new ObservabilityPipelineSentinelOneDestinationRegion("ca"); + public static final ObservabilityPipelineSentinelOneDestinationRegion DATA_SET_US = + new ObservabilityPipelineSentinelOneDestinationRegion("data_set_us"); + + ObservabilityPipelineSentinelOneDestinationRegion(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSentinelOneDestinationRegionSerializer + extends StdSerializer { + public ObservabilityPipelineSentinelOneDestinationRegionSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSentinelOneDestinationRegionSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSentinelOneDestinationRegion value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSentinelOneDestinationRegion fromValue(String value) { + return new ObservabilityPipelineSentinelOneDestinationRegion(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationType.java new file mode 100644 index 00000000000..bc33542b270 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSentinelOneDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be sentinel_one. */ +@JsonSerialize( + using = + ObservabilityPipelineSentinelOneDestinationType + .ObservabilityPipelineSentinelOneDestinationTypeSerializer.class) +public class ObservabilityPipelineSentinelOneDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("sentinel_one")); + + public static final ObservabilityPipelineSentinelOneDestinationType SENTINEL_ONE = + new ObservabilityPipelineSentinelOneDestinationType("sentinel_one"); + + ObservabilityPipelineSentinelOneDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSentinelOneDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSentinelOneDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSentinelOneDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSentinelOneDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSentinelOneDestinationType fromValue(String value) { + return new ObservabilityPipelineSentinelOneDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpec.java similarity index 73% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpec.java index 8f865cc65d3..3ddc01f5e45 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequest.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpec.java @@ -17,43 +17,46 @@ import java.util.Map; import java.util.Objects; -/** Top-level schema representing a pipeline. */ -@JsonPropertyOrder({ObservabilityPipelineCreateRequest.JSON_PROPERTY_DATA}) +/** + * Input schema representing an observability pipeline configuration. Used in create and validate + * requests. + */ +@JsonPropertyOrder({ObservabilityPipelineSpec.JSON_PROPERTY_DATA}) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineCreateRequest { +public class ObservabilityPipelineSpec { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_DATA = "data"; - private ObservabilityPipelineCreateRequestData data; + private ObservabilityPipelineSpecData data; - public ObservabilityPipelineCreateRequest() {} + public ObservabilityPipelineSpec() {} @JsonCreator - public ObservabilityPipelineCreateRequest( + public ObservabilityPipelineSpec( @JsonProperty(required = true, value = JSON_PROPERTY_DATA) - ObservabilityPipelineCreateRequestData data) { + ObservabilityPipelineSpecData data) { this.data = data; this.unparsed |= data.unparsed; } - public ObservabilityPipelineCreateRequest data(ObservabilityPipelineCreateRequestData data) { + public ObservabilityPipelineSpec data(ObservabilityPipelineSpecData data) { this.data = data; this.unparsed |= data.unparsed; return this; } /** - * Contains the pipeline’s ID, type, and configuration attributes. + * Contains the the pipeline configuration. * * @return data */ @JsonProperty(JSON_PROPERTY_DATA) @JsonInclude(value = JsonInclude.Include.ALWAYS) - public ObservabilityPipelineCreateRequestData getData() { + public ObservabilityPipelineSpecData getData() { return data; } - public void setData(ObservabilityPipelineCreateRequestData data) { + public void setData(ObservabilityPipelineSpecData data) { this.data = data; } @@ -69,10 +72,10 @@ public void setData(ObservabilityPipelineCreateRequestData data) { * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineCreateRequest + * @return ObservabilityPipelineSpec */ @JsonAnySetter - public ObservabilityPipelineCreateRequest putAdditionalProperty(String key, Object value) { + public ObservabilityPipelineSpec putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -103,7 +106,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineCreateRequest object is equal to o. */ + /** Return true if this ObservabilityPipelineSpec object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -112,11 +115,10 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineCreateRequest observabilityPipelineCreateRequest = - (ObservabilityPipelineCreateRequest) o; - return Objects.equals(this.data, observabilityPipelineCreateRequest.data) + ObservabilityPipelineSpec observabilityPipelineSpec = (ObservabilityPipelineSpec) o; + return Objects.equals(this.data, observabilityPipelineSpec.data) && Objects.equals( - this.additionalProperties, observabilityPipelineCreateRequest.additionalProperties); + this.additionalProperties, observabilityPipelineSpec.additionalProperties); } @Override @@ -127,7 +129,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineCreateRequest {\n"); + sb.append("class ObservabilityPipelineSpec {\n"); sb.append(" data: ").append(toIndentedString(data)).append("\n"); sb.append(" additionalProperties: ") .append(toIndentedString(additionalProperties)) diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpecData.java similarity index 79% rename from src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java rename to src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpecData.java index a0145ea17d1..a119068ad5e 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineCreateRequestData.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSpecData.java @@ -17,14 +17,14 @@ import java.util.Map; import java.util.Objects; -/** Contains the pipeline’s ID, type, and configuration attributes. */ +/** Contains the the pipeline configuration. */ @JsonPropertyOrder({ - ObservabilityPipelineCreateRequestData.JSON_PROPERTY_ATTRIBUTES, - ObservabilityPipelineCreateRequestData.JSON_PROPERTY_TYPE + ObservabilityPipelineSpecData.JSON_PROPERTY_ATTRIBUTES, + ObservabilityPipelineSpecData.JSON_PROPERTY_TYPE }) @jakarta.annotation.Generated( value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") -public class ObservabilityPipelineCreateRequestData { +public class ObservabilityPipelineSpecData { @JsonIgnore public boolean unparsed = false; public static final String JSON_PROPERTY_ATTRIBUTES = "attributes"; private ObservabilityPipelineDataAttributes attributes; @@ -32,10 +32,10 @@ public class ObservabilityPipelineCreateRequestData { public static final String JSON_PROPERTY_TYPE = "type"; private String type = "pipelines"; - public ObservabilityPipelineCreateRequestData() {} + public ObservabilityPipelineSpecData() {} @JsonCreator - public ObservabilityPipelineCreateRequestData( + public ObservabilityPipelineSpecData( @JsonProperty(required = true, value = JSON_PROPERTY_ATTRIBUTES) ObservabilityPipelineDataAttributes attributes, @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) String type) { @@ -44,8 +44,7 @@ public ObservabilityPipelineCreateRequestData( this.type = type; } - public ObservabilityPipelineCreateRequestData attributes( - ObservabilityPipelineDataAttributes attributes) { + public ObservabilityPipelineSpecData attributes(ObservabilityPipelineDataAttributes attributes) { this.attributes = attributes; this.unparsed |= attributes.unparsed; return this; @@ -66,7 +65,7 @@ public void setAttributes(ObservabilityPipelineDataAttributes attributes) { this.attributes = attributes; } - public ObservabilityPipelineCreateRequestData type(String type) { + public ObservabilityPipelineSpecData type(String type) { this.type = type; return this; } @@ -99,10 +98,10 @@ public void setType(String type) { * * @param key The arbitrary key to set * @param value The associated value - * @return ObservabilityPipelineCreateRequestData + * @return ObservabilityPipelineSpecData */ @JsonAnySetter - public ObservabilityPipelineCreateRequestData putAdditionalProperty(String key, Object value) { + public ObservabilityPipelineSpecData putAdditionalProperty(String key, Object value) { if (this.additionalProperties == null) { this.additionalProperties = new HashMap(); } @@ -133,7 +132,7 @@ public Object getAdditionalProperty(String key) { return this.additionalProperties.get(key); } - /** Return true if this ObservabilityPipelineCreateRequestData object is equal to o. */ + /** Return true if this ObservabilityPipelineSpecData object is equal to o. */ @Override public boolean equals(Object o) { if (this == o) { @@ -142,12 +141,11 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } - ObservabilityPipelineCreateRequestData observabilityPipelineCreateRequestData = - (ObservabilityPipelineCreateRequestData) o; - return Objects.equals(this.attributes, observabilityPipelineCreateRequestData.attributes) - && Objects.equals(this.type, observabilityPipelineCreateRequestData.type) + ObservabilityPipelineSpecData observabilityPipelineSpecData = (ObservabilityPipelineSpecData) o; + return Objects.equals(this.attributes, observabilityPipelineSpecData.attributes) + && Objects.equals(this.type, observabilityPipelineSpecData.type) && Objects.equals( - this.additionalProperties, observabilityPipelineCreateRequestData.additionalProperties); + this.additionalProperties, observabilityPipelineSpecData.additionalProperties); } @Override @@ -158,7 +156,7 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("class ObservabilityPipelineCreateRequestData {\n"); + sb.append("class ObservabilityPipelineSpecData {\n"); sb.append(" attributes: ").append(toIndentedString(attributes)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" additionalProperties: ") diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java new file mode 100644 index 00000000000..3eafb768273 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java @@ -0,0 +1,343 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector + * (HEC). + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_INDEX, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_SOURCETYPE, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkHecDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP = "auto_extract_timestamp"; + private Boolean autoExtractTimestamp; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineSplunkHecDestinationEncoding encoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INDEX = "index"; + private String index; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_SOURCETYPE = "sourcetype"; + private String sourcetype; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkHecDestinationType type = + ObservabilityPipelineSplunkHecDestinationType.SPLUNK_HEC; + + public ObservabilityPipelineSplunkHecDestination() {} + + @JsonCreator + public ObservabilityPipelineSplunkHecDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkHecDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkHecDestination autoExtractTimestamp( + Boolean autoExtractTimestamp) { + this.autoExtractTimestamp = autoExtractTimestamp; + return this; + } + + /** + * If true, Splunk tries to extract timestamps from incoming log events. If + * false, Splunk assigns the time the event was received. + * + * @return autoExtractTimestamp + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getAutoExtractTimestamp() { + return autoExtractTimestamp; + } + + public void setAutoExtractTimestamp(Boolean autoExtractTimestamp) { + this.autoExtractTimestamp = autoExtractTimestamp; + } + + public ObservabilityPipelineSplunkHecDestination encoding( + ObservabilityPipelineSplunkHecDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineSplunkHecDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineSplunkHecDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineSplunkHecDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkHecDestination index(String index) { + this.index = index; + return this; + } + + /** + * Optional name of the Splunk index where logs are written. + * + * @return index + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_INDEX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public ObservabilityPipelineSplunkHecDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSplunkHecDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSplunkHecDestination sourcetype(String sourcetype) { + this.sourcetype = sourcetype; + return this; + } + + /** + * The Splunk sourcetype to assign to log events. + * + * @return sourcetype + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SOURCETYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getSourcetype() { + return sourcetype; + } + + public void setSourcetype(String sourcetype) { + this.sourcetype = sourcetype; + } + + public ObservabilityPipelineSplunkHecDestination type( + ObservabilityPipelineSplunkHecDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always splunk_hec. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkHecDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkHecDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkHecDestination + */ + @JsonAnySetter + public ObservabilityPipelineSplunkHecDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkHecDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkHecDestination observabilityPipelineSplunkHecDestination = + (ObservabilityPipelineSplunkHecDestination) o; + return Objects.equals( + this.autoExtractTimestamp, + observabilityPipelineSplunkHecDestination.autoExtractTimestamp) + && Objects.equals(this.encoding, observabilityPipelineSplunkHecDestination.encoding) + && Objects.equals(this.id, observabilityPipelineSplunkHecDestination.id) + && Objects.equals(this.index, observabilityPipelineSplunkHecDestination.index) + && Objects.equals(this.inputs, observabilityPipelineSplunkHecDestination.inputs) + && Objects.equals(this.sourcetype, observabilityPipelineSplunkHecDestination.sourcetype) + && Objects.equals(this.type, observabilityPipelineSplunkHecDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplunkHecDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + autoExtractTimestamp, encoding, id, index, inputs, sourcetype, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkHecDestination {\n"); + sb.append(" autoExtractTimestamp: ") + .append(toIndentedString(autoExtractTimestamp)) + .append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" index: ").append(toIndentedString(index)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" sourcetype: ").append(toIndentedString(sourcetype)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java new file mode 100644 index 00000000000..b523d269664 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecDestinationEncoding + .ObservabilityPipelineSplunkHecDestinationEncodingSerializer.class) +public class ObservabilityPipelineSplunkHecDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineSplunkHecDestinationEncoding JSON = + new ObservabilityPipelineSplunkHecDestinationEncoding("json"); + public static final ObservabilityPipelineSplunkHecDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineSplunkHecDestinationEncoding("raw_message"); + + ObservabilityPipelineSplunkHecDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineSplunkHecDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java new file mode 100644 index 00000000000..1051d259645 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always splunk_hec. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecDestinationType + .ObservabilityPipelineSplunkHecDestinationTypeSerializer.class) +public class ObservabilityPipelineSplunkHecDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_hec")); + + public static final ObservabilityPipelineSplunkHecDestinationType SPLUNK_HEC = + new ObservabilityPipelineSplunkHecDestinationType("splunk_hec"); + + ObservabilityPipelineSplunkHecDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecDestinationType fromValue(String value) { + return new ObservabilityPipelineSplunkHecDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java new file mode 100644 index 00000000000..a554df706a2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java @@ -0,0 +1,211 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API. */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_TLS, + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkHecSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkHecSourceType type = + ObservabilityPipelineSplunkHecSourceType.SPLUNK_HEC; + + public ObservabilityPipelineSplunkHecSource() {} + + @JsonCreator + public ObservabilityPipelineSplunkHecSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkHecSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkHecSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkHecSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSplunkHecSource type(ObservabilityPipelineSplunkHecSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always splunk_hec. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkHecSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkHecSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkHecSource + */ + @JsonAnySetter + public ObservabilityPipelineSplunkHecSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkHecSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkHecSource observabilityPipelineSplunkHecSource = + (ObservabilityPipelineSplunkHecSource) o; + return Objects.equals(this.id, observabilityPipelineSplunkHecSource.id) + && Objects.equals(this.tls, observabilityPipelineSplunkHecSource.tls) + && Objects.equals(this.type, observabilityPipelineSplunkHecSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSplunkHecSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkHecSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java new file mode 100644 index 00000000000..a366d7383ff --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always splunk_hec. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecSourceType.ObservabilityPipelineSplunkHecSourceTypeSerializer + .class) +public class ObservabilityPipelineSplunkHecSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_hec")); + + public static final ObservabilityPipelineSplunkHecSourceType SPLUNK_HEC = + new ObservabilityPipelineSplunkHecSourceType("splunk_hec"); + + ObservabilityPipelineSplunkHecSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecSourceType fromValue(String value) { + return new ObservabilityPipelineSplunkHecSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java new file mode 100644 index 00000000000..8589db20825 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java @@ -0,0 +1,214 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP. TLS + * is supported for secure transmission. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_TLS, + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkTcpSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkTcpSourceType type = + ObservabilityPipelineSplunkTcpSourceType.SPLUNK_TCP; + + public ObservabilityPipelineSplunkTcpSource() {} + + @JsonCreator + public ObservabilityPipelineSplunkTcpSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkTcpSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkTcpSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkTcpSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSplunkTcpSource type(ObservabilityPipelineSplunkTcpSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always splunk_tcp. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkTcpSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkTcpSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkTcpSource + */ + @JsonAnySetter + public ObservabilityPipelineSplunkTcpSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkTcpSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkTcpSource observabilityPipelineSplunkTcpSource = + (ObservabilityPipelineSplunkTcpSource) o; + return Objects.equals(this.id, observabilityPipelineSplunkTcpSource.id) + && Objects.equals(this.tls, observabilityPipelineSplunkTcpSource.tls) + && Objects.equals(this.type, observabilityPipelineSplunkTcpSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSplunkTcpSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkTcpSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java new file mode 100644 index 00000000000..ecd01f1deb8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always splunk_tcp. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkTcpSourceType.ObservabilityPipelineSplunkTcpSourceTypeSerializer + .class) +public class ObservabilityPipelineSplunkTcpSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_tcp")); + + public static final ObservabilityPipelineSplunkTcpSourceType SPLUNK_TCP = + new ObservabilityPipelineSplunkTcpSourceType("splunk_tcp"); + + ObservabilityPipelineSplunkTcpSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkTcpSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkTcpSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkTcpSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkTcpSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkTcpSourceType fromValue(String value) { + return new ObservabilityPipelineSplunkTcpSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java new file mode 100644 index 00000000000..ae5f61dcbce --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestination.java @@ -0,0 +1,394 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** The sumo_logic destination forwards logs to Sumo Logic. */ +@JsonPropertyOrder({ + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_HEADER_CUSTOM_FIELDS, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_HEADER_HOST_NAME, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_HEADER_SOURCE_CATEGORY, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_HEADER_SOURCE_NAME, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_ID, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSumoLogicDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSumoLogicDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineSumoLogicDestinationEncoding encoding; + + public static final String JSON_PROPERTY_HEADER_CUSTOM_FIELDS = "header_custom_fields"; + private List headerCustomFields = + null; + + public static final String JSON_PROPERTY_HEADER_HOST_NAME = "header_host_name"; + private String headerHostName; + + public static final String JSON_PROPERTY_HEADER_SOURCE_CATEGORY = "header_source_category"; + private String headerSourceCategory; + + public static final String JSON_PROPERTY_HEADER_SOURCE_NAME = "header_source_name"; + private String headerSourceName; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSumoLogicDestinationType type = + ObservabilityPipelineSumoLogicDestinationType.SUMO_LOGIC; + + public ObservabilityPipelineSumoLogicDestination() {} + + @JsonCreator + public ObservabilityPipelineSumoLogicDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSumoLogicDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSumoLogicDestination encoding( + ObservabilityPipelineSumoLogicDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * The output encoding format. + * + * @return encoding + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineSumoLogicDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineSumoLogicDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineSumoLogicDestination headerCustomFields( + List headerCustomFields) { + this.headerCustomFields = headerCustomFields; + for (ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem item : + headerCustomFields) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineSumoLogicDestination addHeaderCustomFieldsItem( + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem headerCustomFieldsItem) { + if (this.headerCustomFields == null) { + this.headerCustomFields = new ArrayList<>(); + } + this.headerCustomFields.add(headerCustomFieldsItem); + this.unparsed |= headerCustomFieldsItem.unparsed; + return this; + } + + /** + * A list of custom headers to include in the request to Sumo Logic. + * + * @return headerCustomFields + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADER_CUSTOM_FIELDS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List + getHeaderCustomFields() { + return headerCustomFields; + } + + public void setHeaderCustomFields( + List headerCustomFields) { + this.headerCustomFields = headerCustomFields; + } + + public ObservabilityPipelineSumoLogicDestination headerHostName(String headerHostName) { + this.headerHostName = headerHostName; + return this; + } + + /** + * Optional override for the host name header. + * + * @return headerHostName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADER_HOST_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeaderHostName() { + return headerHostName; + } + + public void setHeaderHostName(String headerHostName) { + this.headerHostName = headerHostName; + } + + public ObservabilityPipelineSumoLogicDestination headerSourceCategory( + String headerSourceCategory) { + this.headerSourceCategory = headerSourceCategory; + return this; + } + + /** + * Optional override for the source category header. + * + * @return headerSourceCategory + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADER_SOURCE_CATEGORY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeaderSourceCategory() { + return headerSourceCategory; + } + + public void setHeaderSourceCategory(String headerSourceCategory) { + this.headerSourceCategory = headerSourceCategory; + } + + public ObservabilityPipelineSumoLogicDestination headerSourceName(String headerSourceName) { + this.headerSourceName = headerSourceName; + return this; + } + + /** + * Optional override for the source name header. + * + * @return headerSourceName + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_HEADER_SOURCE_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getHeaderSourceName() { + return headerSourceName; + } + + public void setHeaderSourceName(String headerSourceName) { + this.headerSourceName = headerSourceName; + } + + public ObservabilityPipelineSumoLogicDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSumoLogicDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSumoLogicDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSumoLogicDestination type( + ObservabilityPipelineSumoLogicDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be sumo_logic. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSumoLogicDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineSumoLogicDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSumoLogicDestination + */ + @JsonAnySetter + public ObservabilityPipelineSumoLogicDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSumoLogicDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSumoLogicDestination observabilityPipelineSumoLogicDestination = + (ObservabilityPipelineSumoLogicDestination) o; + return Objects.equals(this.encoding, observabilityPipelineSumoLogicDestination.encoding) + && Objects.equals( + this.headerCustomFields, observabilityPipelineSumoLogicDestination.headerCustomFields) + && Objects.equals( + this.headerHostName, observabilityPipelineSumoLogicDestination.headerHostName) + && Objects.equals( + this.headerSourceCategory, + observabilityPipelineSumoLogicDestination.headerSourceCategory) + && Objects.equals( + this.headerSourceName, observabilityPipelineSumoLogicDestination.headerSourceName) + && Objects.equals(this.id, observabilityPipelineSumoLogicDestination.id) + && Objects.equals(this.inputs, observabilityPipelineSumoLogicDestination.inputs) + && Objects.equals(this.type, observabilityPipelineSumoLogicDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSumoLogicDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + encoding, + headerCustomFields, + headerHostName, + headerSourceCategory, + headerSourceName, + id, + inputs, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSumoLogicDestination {\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" headerCustomFields: ").append(toIndentedString(headerCustomFields)).append("\n"); + sb.append(" headerHostName: ").append(toIndentedString(headerHostName)).append("\n"); + sb.append(" headerSourceCategory: ") + .append(toIndentedString(headerSourceCategory)) + .append("\n"); + sb.append(" headerSourceName: ").append(toIndentedString(headerSourceName)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationEncoding.java new file mode 100644 index 00000000000..47b9254d3fc --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationEncoding.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The output encoding format. */ +@JsonSerialize( + using = + ObservabilityPipelineSumoLogicDestinationEncoding + .ObservabilityPipelineSumoLogicDestinationEncodingSerializer.class) +public class ObservabilityPipelineSumoLogicDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message", "logfmt")); + + public static final ObservabilityPipelineSumoLogicDestinationEncoding JSON = + new ObservabilityPipelineSumoLogicDestinationEncoding("json"); + public static final ObservabilityPipelineSumoLogicDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineSumoLogicDestinationEncoding("raw_message"); + public static final ObservabilityPipelineSumoLogicDestinationEncoding LOGFMT = + new ObservabilityPipelineSumoLogicDestinationEncoding("logfmt"); + + ObservabilityPipelineSumoLogicDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSumoLogicDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineSumoLogicDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSumoLogicDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSumoLogicDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSumoLogicDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineSumoLogicDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.java new file mode 100644 index 00000000000..d5b352b6dd2 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.java @@ -0,0 +1,183 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Single key-value pair used as a custom log header for Sumo Logic. */ +@JsonPropertyOrder({ + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.JSON_PROPERTY_NAME, + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private String value; + + public ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem() {} + + @JsonCreator + public ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { + this.name = name; + this.value = value; + } + + public ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem name(String name) { + this.name = name; + return this; + } + + /** + * The header field name. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem value(String value) { + this.value = value; + return this; + } + + /** + * The header field value. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem + */ + @JsonAnySetter + public ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem object is + * equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem + observabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem = + (ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem) o; + return Objects.equals( + this.name, observabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.name) + && Objects.equals( + this.value, observabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.value) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationType.java new file mode 100644 index 00000000000..4c99bdb4968 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be sumo_logic. */ +@JsonSerialize( + using = + ObservabilityPipelineSumoLogicDestinationType + .ObservabilityPipelineSumoLogicDestinationTypeSerializer.class) +public class ObservabilityPipelineSumoLogicDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("sumo_logic")); + + public static final ObservabilityPipelineSumoLogicDestinationType SUMO_LOGIC = + new ObservabilityPipelineSumoLogicDestinationType("sumo_logic"); + + ObservabilityPipelineSumoLogicDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSumoLogicDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSumoLogicDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSumoLogicDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSumoLogicDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSumoLogicDestinationType fromValue(String value) { + return new ObservabilityPipelineSumoLogicDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java new file mode 100644 index 00000000000..cfdcae3a4d3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSource.java @@ -0,0 +1,183 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The sumo_logic source receives logs from Sumo Logic collectors. */ +@JsonPropertyOrder({ + ObservabilityPipelineSumoLogicSource.JSON_PROPERTY_ID, + ObservabilityPipelineSumoLogicSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSumoLogicSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSumoLogicSourceType type = + ObservabilityPipelineSumoLogicSourceType.SUMO_LOGIC; + + public ObservabilityPipelineSumoLogicSource() {} + + @JsonCreator + public ObservabilityPipelineSumoLogicSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSumoLogicSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSumoLogicSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSumoLogicSource type(ObservabilityPipelineSumoLogicSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be sumo_logic. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSumoLogicSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSumoLogicSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSumoLogicSource + */ + @JsonAnySetter + public ObservabilityPipelineSumoLogicSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSumoLogicSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSumoLogicSource observabilityPipelineSumoLogicSource = + (ObservabilityPipelineSumoLogicSource) o; + return Objects.equals(this.id, observabilityPipelineSumoLogicSource.id) + && Objects.equals(this.type, observabilityPipelineSumoLogicSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSumoLogicSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSumoLogicSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSourceType.java new file mode 100644 index 00000000000..f4a5a0d4b3a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSumoLogicSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be sumo_logic. */ +@JsonSerialize( + using = + ObservabilityPipelineSumoLogicSourceType.ObservabilityPipelineSumoLogicSourceTypeSerializer + .class) +public class ObservabilityPipelineSumoLogicSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("sumo_logic")); + + public static final ObservabilityPipelineSumoLogicSourceType SUMO_LOGIC = + new ObservabilityPipelineSumoLogicSourceType("sumo_logic"); + + ObservabilityPipelineSumoLogicSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSumoLogicSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSumoLogicSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSumoLogicSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSumoLogicSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSumoLogicSourceType fromValue(String value) { + return new ObservabilityPipelineSumoLogicSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java new file mode 100644 index 00000000000..b8b70fe8139 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestination.java @@ -0,0 +1,277 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The syslog_ng destination forwards logs to an external syslog-ng server + * over TCP or UDP using the syslog protocol. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_ID, + ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_KEEPALIVE, + ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_TLS, + ObservabilityPipelineSyslogNgDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSyslogNgDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEEPALIVE = "keepalive"; + private Long keepalive; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSyslogNgDestinationType type = + ObservabilityPipelineSyslogNgDestinationType.SYSLOG_NG; + + public ObservabilityPipelineSyslogNgDestination() {} + + @JsonCreator + public ObservabilityPipelineSyslogNgDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSyslogNgDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSyslogNgDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSyslogNgDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSyslogNgDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSyslogNgDestination keepalive(Long keepalive) { + this.keepalive = keepalive; + return this; + } + + /** + * Optional socket keepalive duration in milliseconds. minimum: 0 + * + * @return keepalive + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_KEEPALIVE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Long getKeepalive() { + return keepalive; + } + + public void setKeepalive(Long keepalive) { + this.keepalive = keepalive; + } + + public ObservabilityPipelineSyslogNgDestination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSyslogNgDestination type( + ObservabilityPipelineSyslogNgDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. The value should always be syslog_ng. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSyslogNgDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineSyslogNgDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSyslogNgDestination + */ + @JsonAnySetter + public ObservabilityPipelineSyslogNgDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSyslogNgDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSyslogNgDestination observabilityPipelineSyslogNgDestination = + (ObservabilityPipelineSyslogNgDestination) o; + return Objects.equals(this.id, observabilityPipelineSyslogNgDestination.id) + && Objects.equals(this.inputs, observabilityPipelineSyslogNgDestination.inputs) + && Objects.equals(this.keepalive, observabilityPipelineSyslogNgDestination.keepalive) + && Objects.equals(this.tls, observabilityPipelineSyslogNgDestination.tls) + && Objects.equals(this.type, observabilityPipelineSyslogNgDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSyslogNgDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, inputs, keepalive, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSyslogNgDestination {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keepalive: ").append(toIndentedString(keepalive)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestinationType.java new file mode 100644 index 00000000000..7a464468928 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. The value should always be syslog_ng. */ +@JsonSerialize( + using = + ObservabilityPipelineSyslogNgDestinationType + .ObservabilityPipelineSyslogNgDestinationTypeSerializer.class) +public class ObservabilityPipelineSyslogNgDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("syslog_ng")); + + public static final ObservabilityPipelineSyslogNgDestinationType SYSLOG_NG = + new ObservabilityPipelineSyslogNgDestinationType("syslog_ng"); + + ObservabilityPipelineSyslogNgDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSyslogNgDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSyslogNgDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSyslogNgDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSyslogNgDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSyslogNgDestinationType fromValue(String value) { + return new ObservabilityPipelineSyslogNgDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java new file mode 100644 index 00000000000..6f371c6e840 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSource.java @@ -0,0 +1,248 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The syslog_ng source listens for logs over TCP or UDP from a syslog-ng + * server using the syslog protocol. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSyslogNgSource.JSON_PROPERTY_ID, + ObservabilityPipelineSyslogNgSource.JSON_PROPERTY_MODE, + ObservabilityPipelineSyslogNgSource.JSON_PROPERTY_TLS, + ObservabilityPipelineSyslogNgSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSyslogNgSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_MODE = "mode"; + private ObservabilityPipelineSyslogSourceMode mode; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSyslogNgSourceType type = + ObservabilityPipelineSyslogNgSourceType.SYSLOG_NG; + + public ObservabilityPipelineSyslogNgSource() {} + + @JsonCreator + public ObservabilityPipelineSyslogNgSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_MODE) + ObservabilityPipelineSyslogSourceMode mode, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSyslogNgSourceType type) { + this.id = id; + this.mode = mode; + this.unparsed |= !mode.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSyslogNgSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSyslogNgSource mode(ObservabilityPipelineSyslogSourceMode mode) { + this.mode = mode; + this.unparsed |= !mode.isValid(); + return this; + } + + /** + * Protocol used by the syslog source to receive messages. + * + * @return mode + */ + @JsonProperty(JSON_PROPERTY_MODE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSyslogSourceMode getMode() { + return mode; + } + + public void setMode(ObservabilityPipelineSyslogSourceMode mode) { + if (!mode.isValid()) { + this.unparsed = true; + } + this.mode = mode; + } + + public ObservabilityPipelineSyslogNgSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption between the pipeline component and external services. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSyslogNgSource type(ObservabilityPipelineSyslogNgSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. The value should always be syslog_ng. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSyslogNgSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSyslogNgSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSyslogNgSource + */ + @JsonAnySetter + public ObservabilityPipelineSyslogNgSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSyslogNgSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSyslogNgSource observabilityPipelineSyslogNgSource = + (ObservabilityPipelineSyslogNgSource) o; + return Objects.equals(this.id, observabilityPipelineSyslogNgSource.id) + && Objects.equals(this.mode, observabilityPipelineSyslogNgSource.mode) + && Objects.equals(this.tls, observabilityPipelineSyslogNgSource.tls) + && Objects.equals(this.type, observabilityPipelineSyslogNgSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSyslogNgSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, mode, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSyslogNgSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSourceType.java new file mode 100644 index 00000000000..67855713f42 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogNgSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. The value should always be syslog_ng. */ +@JsonSerialize( + using = + ObservabilityPipelineSyslogNgSourceType.ObservabilityPipelineSyslogNgSourceTypeSerializer + .class) +public class ObservabilityPipelineSyslogNgSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("syslog_ng")); + + public static final ObservabilityPipelineSyslogNgSourceType SYSLOG_NG = + new ObservabilityPipelineSyslogNgSourceType("syslog_ng"); + + ObservabilityPipelineSyslogNgSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSyslogNgSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSyslogNgSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSyslogNgSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSyslogNgSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSyslogNgSourceType fromValue(String value) { + return new ObservabilityPipelineSyslogNgSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogSourceMode.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogSourceMode.java new file mode 100644 index 00000000000..923ddb4b02b --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSyslogSourceMode.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Protocol used by the syslog source to receive messages. */ +@JsonSerialize( + using = + ObservabilityPipelineSyslogSourceMode.ObservabilityPipelineSyslogSourceModeSerializer.class) +public class ObservabilityPipelineSyslogSourceMode extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("tcp", "udp")); + + public static final ObservabilityPipelineSyslogSourceMode TCP = + new ObservabilityPipelineSyslogSourceMode("tcp"); + public static final ObservabilityPipelineSyslogSourceMode UDP = + new ObservabilityPipelineSyslogSourceMode("udp"); + + ObservabilityPipelineSyslogSourceMode(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSyslogSourceModeSerializer + extends StdSerializer { + public ObservabilityPipelineSyslogSourceModeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSyslogSourceModeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSyslogSourceMode value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSyslogSourceMode fromValue(String value) { + return new ObservabilityPipelineSyslogSourceMode(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java new file mode 100644 index 00000000000..6110d8603d0 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessor.java @@ -0,0 +1,342 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The throttle processor limits the number of events that pass through over a given + * time window. + */ +@JsonPropertyOrder({ + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_GROUP_BY, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_THRESHOLD, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_TYPE, + ObservabilityPipelineThrottleProcessor.JSON_PROPERTY_WINDOW +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineThrottleProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = null; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_THRESHOLD = "threshold"; + private Long threshold; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineThrottleProcessorType type = + ObservabilityPipelineThrottleProcessorType.THROTTLE; + + public static final String JSON_PROPERTY_WINDOW = "window"; + private Double window; + + public ObservabilityPipelineThrottleProcessor() {} + + @JsonCreator + public ObservabilityPipelineThrottleProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_THRESHOLD) Long threshold, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineThrottleProcessorType type, + @JsonProperty(required = true, value = JSON_PROPERTY_WINDOW) Double window) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.threshold = threshold; + this.type = type; + this.unparsed |= !type.isValid(); + this.window = window; + } + + public ObservabilityPipelineThrottleProcessor groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineThrottleProcessor addGroupByItem(String groupByItem) { + if (this.groupBy == null) { + this.groupBy = new ArrayList<>(); + } + this.groupBy.add(groupByItem); + return this; + } + + /** + * Optional list of fields used to group events before the threshold has been reached. + * + * @return groupBy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + + public ObservabilityPipelineThrottleProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this processor. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineThrottleProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineThrottleProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineThrottleProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineThrottleProcessor threshold(Long threshold) { + this.threshold = threshold; + return this; + } + + /** + * the number of events allowed in a given time window. Events sent after the threshold has been + * reached, are dropped. + * + * @return threshold + */ + @JsonProperty(JSON_PROPERTY_THRESHOLD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Long getThreshold() { + return threshold; + } + + public void setThreshold(Long threshold) { + this.threshold = threshold; + } + + public ObservabilityPipelineThrottleProcessor type( + ObservabilityPipelineThrottleProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. The value should always be throttle. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineThrottleProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineThrottleProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + public ObservabilityPipelineThrottleProcessor window(Double window) { + this.window = window; + return this; + } + + /** + * The time window in seconds over which the threshold applies. + * + * @return window + */ + @JsonProperty(JSON_PROPERTY_WINDOW) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public Double getWindow() { + return window; + } + + public void setWindow(Double window) { + this.window = window; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineThrottleProcessor + */ + @JsonAnySetter + public ObservabilityPipelineThrottleProcessor putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineThrottleProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineThrottleProcessor observabilityPipelineThrottleProcessor = + (ObservabilityPipelineThrottleProcessor) o; + return Objects.equals(this.groupBy, observabilityPipelineThrottleProcessor.groupBy) + && Objects.equals(this.id, observabilityPipelineThrottleProcessor.id) + && Objects.equals(this.include, observabilityPipelineThrottleProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineThrottleProcessor.inputs) + && Objects.equals(this.threshold, observabilityPipelineThrottleProcessor.threshold) + && Objects.equals(this.type, observabilityPipelineThrottleProcessor.type) + && Objects.equals(this.window, observabilityPipelineThrottleProcessor.window) + && Objects.equals( + this.additionalProperties, observabilityPipelineThrottleProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + groupBy, id, include, inputs, threshold, type, window, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineThrottleProcessor {\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" threshold: ").append(toIndentedString(threshold)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" window: ").append(toIndentedString(window)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessorType.java new file mode 100644 index 00000000000..ff038f0c190 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineThrottleProcessorType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. The value should always be throttle. */ +@JsonSerialize( + using = + ObservabilityPipelineThrottleProcessorType + .ObservabilityPipelineThrottleProcessorTypeSerializer.class) +public class ObservabilityPipelineThrottleProcessorType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("throttle")); + + public static final ObservabilityPipelineThrottleProcessorType THROTTLE = + new ObservabilityPipelineThrottleProcessorType("throttle"); + + ObservabilityPipelineThrottleProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineThrottleProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineThrottleProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineThrottleProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineThrottleProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineThrottleProcessorType fromValue(String value) { + return new ObservabilityPipelineThrottleProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java index e737cc86e11..1c68391dd39 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineTls.java @@ -17,7 +17,9 @@ import java.util.Map; import java.util.Objects; -/** Configuration for enabling TLS encryption. */ +/** + * Configuration for enabling TLS encryption between the pipeline component and external services. + */ @JsonPropertyOrder({ ObservabilityPipelineTls.JSON_PROPERTY_CA_FILE, ObservabilityPipelineTls.JSON_PROPERTY_CRT_FILE, diff --git a/src/main/java/com/datadog/api/client/v2/model/ValidationError.java b/src/main/java/com/datadog/api/client/v2/model/ValidationError.java new file mode 100644 index 00000000000..ab36fcd7f4c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ValidationError.java @@ -0,0 +1,172 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** Represents a single validation error, including a human-readable title and metadata. */ +@JsonPropertyOrder({ValidationError.JSON_PROPERTY_META, ValidationError.JSON_PROPERTY_TITLE}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ValidationError { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_META = "meta"; + private ValidationErrorMeta meta; + + public static final String JSON_PROPERTY_TITLE = "title"; + private String title; + + public ValidationError() {} + + @JsonCreator + public ValidationError( + @JsonProperty(required = true, value = JSON_PROPERTY_META) ValidationErrorMeta meta, + @JsonProperty(required = true, value = JSON_PROPERTY_TITLE) String title) { + this.meta = meta; + this.unparsed |= meta.unparsed; + this.title = title; + } + + public ValidationError meta(ValidationErrorMeta meta) { + this.meta = meta; + this.unparsed |= meta.unparsed; + return this; + } + + /** + * Describes additional metadata for validation errors, including field names and error messages. + * + * @return meta + */ + @JsonProperty(JSON_PROPERTY_META) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ValidationErrorMeta getMeta() { + return meta; + } + + public void setMeta(ValidationErrorMeta meta) { + this.meta = meta; + } + + public ValidationError title(String title) { + this.title = title; + return this; + } + + /** + * A short, human-readable summary of the error. + * + * @return title + */ + @JsonProperty(JSON_PROPERTY_TITLE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ValidationError + */ + @JsonAnySetter + public ValidationError putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ValidationError object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValidationError validationError = (ValidationError) o; + return Objects.equals(this.meta, validationError.meta) + && Objects.equals(this.title, validationError.title) + && Objects.equals(this.additionalProperties, validationError.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(meta, title, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ValidationError {\n"); + sb.append(" meta: ").append(toIndentedString(meta)).append("\n"); + sb.append(" title: ").append(toIndentedString(title)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ValidationErrorMeta.java b/src/main/java/com/datadog/api/client/v2/model/ValidationErrorMeta.java new file mode 100644 index 00000000000..7bed6ba84e3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ValidationErrorMeta.java @@ -0,0 +1,201 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Describes additional metadata for validation errors, including field names and error messages. + */ +@JsonPropertyOrder({ + ValidationErrorMeta.JSON_PROPERTY_FIELD, + ValidationErrorMeta.JSON_PROPERTY_ID, + ValidationErrorMeta.JSON_PROPERTY_MESSAGE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ValidationErrorMeta { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_MESSAGE = "message"; + private String message; + + public ValidationErrorMeta() {} + + @JsonCreator + public ValidationErrorMeta( + @JsonProperty(required = true, value = JSON_PROPERTY_MESSAGE) String message) { + this.message = message; + } + + public ValidationErrorMeta field(String field) { + this.field = field; + return this; + } + + /** + * The field name that caused the error. + * + * @return field + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ValidationErrorMeta id(String id) { + this.id = id; + return this; + } + + /** + * The ID of the component in which the error occurred. + * + * @return id + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ValidationErrorMeta message(String message) { + this.message = message; + return this; + } + + /** + * The detailed error message. + * + * @return message + */ + @JsonProperty(JSON_PROPERTY_MESSAGE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ValidationErrorMeta + */ + @JsonAnySetter + public ValidationErrorMeta putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ValidationErrorMeta object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValidationErrorMeta validationErrorMeta = (ValidationErrorMeta) o; + return Objects.equals(this.field, validationErrorMeta.field) + && Objects.equals(this.id, validationErrorMeta.id) + && Objects.equals(this.message, validationErrorMeta.message) + && Objects.equals(this.additionalProperties, validationErrorMeta.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, id, message, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ValidationErrorMeta {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" message: ").append(toIndentedString(message)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ValidationResponse.java b/src/main/java/com/datadog/api/client/v2/model/ValidationResponse.java new file mode 100644 index 00000000000..0a9b2e0b6ff --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ValidationResponse.java @@ -0,0 +1,149 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** Response containing validation errors. */ +@JsonPropertyOrder({ValidationResponse.JSON_PROPERTY_ERRORS}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ValidationResponse { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ERRORS = "errors"; + private List errors = null; + + public ValidationResponse errors(List errors) { + this.errors = errors; + for (ValidationError item : errors) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ValidationResponse addErrorsItem(ValidationError errorsItem) { + if (this.errors == null) { + this.errors = new ArrayList<>(); + } + this.errors.add(errorsItem); + this.unparsed |= errorsItem.unparsed; + return this; + } + + /** + * The ValidationResponse errors. + * + * @return errors + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ERRORS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getErrors() { + return errors; + } + + public void setErrors(List errors) { + this.errors = errors; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ValidationResponse + */ + @JsonAnySetter + public ValidationResponse putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ValidationResponse object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ValidationResponse validationResponse = (ValidationResponse) o; + return Objects.equals(this.errors, validationResponse.errors) + && Objects.equals(this.additionalProperties, validationResponse.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(errors, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ValidationResponse {\n"); + sb.append(" errors: ").append(toIndentedString(errors)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze index d1b8c94c601..71a6a094850 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-04-04T10:36:58.031Z \ No newline at end of file +2025-04-25T17:29:21.625Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze index 0fc9b398ff7..a6261e736fd 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-04-04T10:36:58.537Z \ No newline at end of file +2025-04-25T17:29:23.837Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json index c86254f34da..f2acc5bb0bc 100644 --- a/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Create_a_new_pipeline_returns_OK_response.json @@ -12,7 +12,7 @@ "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"be354bf0-1140-11f0-9a6b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"d891e45c-21fa-11f0-96dc-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -27,13 +27,13 @@ "timeToLive": { "unlimited": true }, - "id": "ccedcb24-9a42-7cb7-5505-c852f00f431a" + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431b" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/be354bf0-1140-11f0-9a6b-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/d891e45c-21fa-11f0-96dc-da7ad0900002", "keepAlive": false, "secure": true }, @@ -52,6 +52,6 @@ "timeToLive": { "unlimited": true }, - "id": "212e06b7-2870-882f-a7c6-ff252ecb2958" + "id": "1cf1cc77-a690-6f28-4206-62c129b23adb" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze index 4f32c245d61..033a0755d0b 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_Not_Found_response.freeze @@ -1 +1 @@ -2025-04-04T10:36:59.510Z \ No newline at end of file +2025-04-25T17:29:25.292Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze index 5489af2d1a6..9cbc728e003 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-04-04T10:37:00.184Z \ No newline at end of file +2025-04-25T17:29:26.847Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json index 8f003a7382c..83345417a11 100644 --- a/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Delete_a_pipeline_returns_OK_response.json @@ -12,7 +12,7 @@ "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"bf2c23da-1140-11f0-9a95-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"da5f2218-21fa-11f0-96de-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -33,7 +33,7 @@ "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/da5f2218-21fa-11f0-96de-da7ad0900002", "keepAlive": false, "secure": true }, @@ -52,13 +52,13 @@ "timeToLive": { "unlimited": true }, - "id": "0445a79a-5afd-bfc6-6aad-bf9e83e08d8f" + "id": "598cf7c6-bf15-0219-bc90-925671f48c4a" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/da5f2218-21fa-11f0-96de-da7ad0900002", "keepAlive": false, "secure": true }, @@ -78,6 +78,6 @@ "timeToLive": { "unlimited": true }, - "id": "0445a79a-5afd-bfc6-6aad-bf9e83e08d90" + "id": "598cf7c6-bf15-0219-bc90-925671f48c4b" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze index 50029945824..f863fd1ac2d 100644 --- a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-04-04T10:37:02.118Z \ No newline at end of file +2025-04-25T17:29:29.196Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json index eb906a33d2c..9f37ce0de88 100644 --- a/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Get_a_specific_pipeline_returns_OK_response.json @@ -12,7 +12,7 @@ "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c06831ee-1140-11f0-9fe5-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"dbbff3d0-21fa-11f0-96e0-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -27,18 +27,18 @@ "timeToLive": { "unlimited": true }, - "id": "ccedcb24-9a42-7cb7-5505-c852f00f431c" + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431d" }, { "httpRequest": { "headers": {}, "method": "GET", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/dbbff3d0-21fa-11f0-96e0-da7ad0900002", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c06831ee-1140-11f0-9fe5-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"dbbff3d0-21fa-11f0-96e0-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -53,13 +53,13 @@ "timeToLive": { "unlimited": true }, - "id": "1b08c3e8-e45d-c983-c7d1-8fa834c00d39" + "id": "556b6b39-efde-699b-7097-b64716e70c01" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/dbbff3d0-21fa-11f0-96e0-da7ad0900002", "keepAlive": false, "secure": true }, @@ -78,6 +78,6 @@ "timeToLive": { "unlimited": true }, - "id": "157c5e04-ad54-8e72-7615-8bd9095245a7" + "id": "91b13ea6-a00d-aad4-d3c5-b58c06e0c937" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze new file mode 100644 index 00000000000..3a7978da8a7 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.freeze @@ -0,0 +1 @@ +2025-04-25T17:29:31.769Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json new file mode 100644 index 00000000000..3b86c9b32e2 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_Bad_Request_response.json @@ -0,0 +1,33 @@ +[ + { + "httpRequest": { + "headers": {}, + "method": "GET", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "queryStringParameters": { + "page[size]": [ + "0" + ] + }, + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"page[size] must be a number between 1 and 50\"}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 400, + "reasonPhrase": "Bad Request" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "986c80b1-2bab-98f5-4c61-f0c3e4c7fb8c" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze new file mode 100644 index 00000000000..4c9f3cfcbc3 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-25T17:29:32.322Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json new file mode 100644 index 00000000000..477cb7b8511 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/List_pipelines_returns_OK_response.json @@ -0,0 +1,83 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":{\"id\":\"dd87c652-21fa-11f0-96e2-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 201, + "reasonPhrase": "Created" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431a" + }, + { + "httpRequest": { + "headers": {}, + "method": "GET", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"data\":[{\"id\":\"8d85d864-0f09-11f0-9711-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"3f339054-10ab-11f0-88a2-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"b8c068de-10ab-11f0-88a8-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"32e2f90a-1139-11f0-8501-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"67e79020-1139-11f0-98d1-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"30966e06-113a-11f0-98e1-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"3e3f9382-138f-11f0-8cf0-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"test pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"service:my-service\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"tls\":{\"crt_file\":\"/path/to/cert.crt\"},\"type\":\"datadog_agent\"}]}}},{\"id\":\"42159650-138f-11f0-a2aa-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"test pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"service:my-service\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"tls\":{\"crt_file\":\"/path/to/cert.crt\"},\"type\":\"datadog_agent\"}]}}},{\"id\":\"6cc001f8-1392-11f0-9e35-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"agent with tls\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"source-with-tls\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"id\":\"source-with-tls\",\"tls\":{\"ca_file\":\"/etc/certs/ca.crt\",\"crt_file\":\"/etc/certs/agent.crt\",\"key_file\":\"/etc/certs/agent.key\"},\"type\":\"datadog_agent\"}]}}},{\"id\":\"bdf5078e-139d-11f0-8e4f-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"e67402d2-139d-11f0-8e51-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"e9849f22-139d-11f0-8e53-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"f9845bb0-139d-11f0-b101-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"2ebd82ca-139e-11f0-b103-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"31ba5a34-139e-11f0-8e55-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"3aa802c2-139e-11f0-8e57-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"7b3daaf8-139e-11f0-8e59-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"7e72d32e-139e-11f0-8e5b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"818a8728-139e-11f0-8e5d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"bed86e88-139e-11f0-8e5f-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"c202b050-139e-11f0-b107-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"c5111084-139e-11f0-8e61-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"e01ae6de-139e-11f0-8e63-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"e33ad356-139e-11f0-b109-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"e6783e78-139e-11f0-b10b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"04407bfa-139f-11f0-8e65-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"126be958-139f-11f0-8e67-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"97607354-139f-11f0-8e6b-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"305f9a88-13a2-11f0-8dd9-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"parse-json-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"env:parse\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"45669120-13a2-11f0-8ddb-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"parse-json-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"env:parse\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"c00f7548-13a4-11f0-9315-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"test pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"service:my-service\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"c07cf206-13a6-11f0-949d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"test pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"service:my-service\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"8ff06e36-13ad-11f0-8243-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"add-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"add-fields-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"name\":\"custom.field\",\"value\":\"hello-world\"}],\"id\":\"add-fields-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"add_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"8ff1a508-13ad-11f0-b934-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"agent with tls\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"source-with-tls\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"id\":\"source-with-tls\",\"tls\":{\"crt_file\":\"/etc/certs/agent.crt\",\"key_file\":\"/etc/certs/agent.key\"},\"type\":\"datadog_agent\"}]}}},{\"id\":\"8ff38a08-13ad-11f0-8245-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"parse-json-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"parser-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"field\":\"message\",\"id\":\"parser-1\",\"include\":\"env:parse\",\"inputs\":[\"source-1\"],\"type\":\"parse_json\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"8ff54f0a-13ad-11f0-8247-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"kafka pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"kafka-source-1\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"group_id\":\"consumer-group-1\",\"id\":\"kafka-source-1\",\"sasl\":{\"mechanism\":\"PLAIN\"},\"tls\":{\"ca_file\":\"\",\"crt_file\":\"/path/to/kafka.crt\"},\"topics\":[\"topic-a\",\"topic-b\"],\"type\":\"kafka\"}]}}},{\"id\":\"f814235e-13ad-11f0-b942-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"agent with tls\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"source-with-tls\"],\"type\":\"datadog_logs\"}],\"processors\":[],\"sources\":[{\"id\":\"source-with-tls\",\"tls\":{\"crt_file\":\"/etc/certs/agent.crt\",\"key_file\":\"/etc/certs/agent.key\"},\"type\":\"datadog_agent\"}]}}},{\"id\":\"5b1a9314-13ce-11f0-a9de-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"rename-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"rename-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[{\"destination\":\"new.field\",\"preserve_source\":true,\"source\":\"old.field\"}],\"id\":\"rename-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"rename_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"1e356802-1419-11f0-8834-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"14156a86-142b-11f0-96f3-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"a5280a06-143a-11f0-aac4-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"5005f6c2-1481-11f0-8faa-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"remove-fields-pipeline\",\"config\":{\"destinations\":[{\"id\":\"destination-1\",\"inputs\":[\"remove-1\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"fields\":[\"temp.debug\",\"internal.trace_id\"],\"id\":\"remove-1\",\"include\":\"*\",\"inputs\":[\"source-1\"],\"type\":\"remove_fields\"}],\"sources\":[{\"id\":\"source-1\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"9c1776c0-14d7-11f0-87c9-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"b1e4c58c-1501-11f0-b0bd-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"43a8a4cc-15c3-11f0-b111-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"45042a58-15c3-11f0-b113-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"f9de3956-15cc-11f0-ac43-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"b3efaa52-1693-11f0-89e0-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"24c83620-1696-11f0-89e6-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}},{\"id\":\"5d2875d6-17a2-11f0-9bd1-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}],\"meta\":{\"totalCount\":128}}\n", + "headers": { + "Content-Type": [ + "application/vnd.api+json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "2a1ecc5b-42fa-71d4-1e8a-9990a3446289" + }, + { + "httpRequest": { + "headers": {}, + "method": "DELETE", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/dd87c652-21fa-11f0-96e2-da7ad0900002", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 204, + "reasonPhrase": "No Content" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "365d8eb6-69ce-7c00-a088-8b8c6211d176" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze index 01305de98f7..a8aa863f11e 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.freeze @@ -1 +1 @@ -2025-04-04T10:37:04.190Z \ No newline at end of file +2025-04-25T17:29:34.672Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json index e4f9a29e621..c20ac247e5f 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Bad_Request_response.json @@ -12,7 +12,7 @@ "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c193d7da-1140-11f0-9a6d-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"deeea5f6-21fa-11f0-96e4-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -27,7 +27,7 @@ "timeToLive": { "unlimited": true }, - "id": "ccedcb24-9a42-7cb7-5505-c852f00f431d" + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431e" }, { "httpRequest": { @@ -37,7 +37,7 @@ }, "headers": {}, "method": "PUT", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/deeea5f6-21fa-11f0-96e4-da7ad0900002", "keepAlive": false, "secure": true }, @@ -57,13 +57,13 @@ "timeToLive": { "unlimited": true }, - "id": "6afff49a-081f-0b40-67f4-ae1d3de8cfe5" + "id": "0e70fa04-3ccb-96c7-2483-c1dd21d002d5" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/deeea5f6-21fa-11f0-96e4-da7ad0900002", "keepAlive": false, "secure": true }, @@ -82,6 +82,6 @@ "timeToLive": { "unlimited": true }, - "id": "6d34d800-bf5f-73da-5d16-d90b90837ea3" + "id": "b56473f4-0b11-d086-9576-a13195a8ddb5" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze index b453d330d37..81821e325e0 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_Not_Found_response.freeze @@ -1 +1 @@ -2025-04-04T10:37:06.031Z \ No newline at end of file +2025-04-25T17:29:36.564Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze index 4fba4617c1e..8195f92c72e 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.freeze @@ -1 +1 @@ -2025-04-04T10:37:06.485Z \ No newline at end of file +2025-04-25T17:29:36.987Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json index b635dab320d..0a589d53a09 100644 --- a/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json +++ b/src/test/resources/cassettes/features/v2/Update_a_pipeline_returns_OK_response.json @@ -12,7 +12,7 @@ "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c2ee25ae-1140-11f0-9fe7-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"e04d5230-21fa-11f0-96e6-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Main Observability Pipeline\",\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -27,7 +27,7 @@ "timeToLive": { "unlimited": true }, - "id": "ccedcb24-9a42-7cb7-5505-c852f00f431b" + "id": "ccedcb24-9a42-7cb7-5505-c852f00f431c" }, { "httpRequest": { @@ -37,12 +37,12 @@ }, "headers": {}, "method": "PUT", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/e04d5230-21fa-11f0-96e6-da7ad0900002", "keepAlive": false, "secure": true }, "httpResponse": { - "body": "{\"data\":{\"id\":\"c2ee25ae-1140-11f0-9fe7-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Updated Pipeline Name\",\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", + "body": "{\"data\":{\"id\":\"e04d5230-21fa-11f0-96e6-da7ad0900002\",\"type\":\"pipelines\",\"attributes\":{\"name\":\"Updated Pipeline Name\",\"config\":{\"destinations\":[{\"id\":\"updated-datadog-logs-destination-id\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]}}}}\n", "headers": { "Content-Type": [ "application/json" @@ -57,13 +57,13 @@ "timeToLive": { "unlimited": true }, - "id": "050e84d7-7940-6295-f299-faa6f7c0e81d" + "id": "622ee817-795a-322e-6bd4-8da965872af3" }, { "httpRequest": { "headers": {}, "method": "DELETE", - "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/e04d5230-21fa-11f0-96e6-da7ad0900002", "keepAlive": false, "secure": true }, @@ -82,6 +82,6 @@ "timeToLive": { "unlimited": true }, - "id": "2d68f649-589e-1e0c-78cd-02a141246b4e" + "id": "147650d8-60f4-a341-4d1a-b17f29d4fcee" } ] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze new file mode 100644 index 00000000000..f19e635b279 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.freeze @@ -0,0 +1 @@ +2025-04-25T17:29:39.236Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json new file mode 100644 index 00000000000..97a18d217ff --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_Bad_Request_response.json @@ -0,0 +1,32 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[{\"title\":\"Field 'include' is required\",\"meta\":{\"field\":\"include\",\"id\":\"filter-processor\",\"message\":\"Field 'include' is required\"}}]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 400, + "reasonPhrase": "Bad Request" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "be868b77-7a39-d125-df95-70b7226efe40" + } +] \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze new file mode 100644 index 00000000000..c2e1e3c7367 --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.freeze @@ -0,0 +1 @@ +2025-04-25T17:29:39.613Z \ No newline at end of file diff --git a/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json new file mode 100644 index 00000000000..6a30865ed2c --- /dev/null +++ b/src/test/resources/cassettes/features/v2/Validate_an_observability_pipeline_returns_OK_response.json @@ -0,0 +1,32 @@ +[ + { + "httpRequest": { + "body": { + "type": "JSON", + "json": "{\"data\":{\"attributes\":{\"config\":{\"destinations\":[{\"id\":\"datadog-logs-destination\",\"inputs\":[\"filter-processor\"],\"type\":\"datadog_logs\"}],\"processors\":[{\"id\":\"filter-processor\",\"include\":\"service:my-service\",\"inputs\":[\"datadog-agent-source\"],\"type\":\"filter\"}],\"sources\":[{\"id\":\"datadog-agent-source\",\"type\":\"datadog_agent\"}]},\"name\":\"Main Observability Pipeline\"},\"type\":\"pipelines\"}}" + }, + "headers": {}, + "method": "POST", + "path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "keepAlive": false, + "secure": true + }, + "httpResponse": { + "body": "{\"errors\":[]}\n", + "headers": { + "Content-Type": [ + "application/json" + ] + }, + "statusCode": 200, + "reasonPhrase": "OK" + }, + "times": { + "remainingTimes": 1 + }, + "timeToLive": { + "unlimited": true + }, + "id": "78f615a7-dcd1-8ec7-7251-44f1fe9ba135" + } +] \ No newline at end of file diff --git a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature index f66f9bc6185..c7149b1e4fa 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature +++ b/src/test/resources/com/datadog/api/client/v2/api/observability_pipelines.feature @@ -78,6 +78,28 @@ Feature: Observability Pipelines And the response "data.attributes.config.processors" has length 1 And the response "data.attributes.config.destinations" has length 1 + @team:DataDog/observability-pipelines + Scenario: List pipelines returns "Bad Request" response + Given operation "ListPipelines" enabled + And new "ListPipelines" request + And request contains "page[size]" parameter with value 0 + When the request is sent + Then the response status is 400 Bad Request + + @team:DataDog/observability-pipelines + Scenario: List pipelines returns "OK" response + Given operation "ListPipelines" enabled + And there is a valid "pipeline" in the system + And new "ListPipelines" request + When the request is sent + Then the response status is 200 OK + And the response "data[0]" has field "id" + And the response "data[0].type" is equal to "pipelines" + And the response "data[0].attributes.name" is equal to "Main Observability Pipeline" + And the response "data[0].attributes.config.sources" has length 1 + And the response "data[0].attributes.config.processors" has length 1 + And the response "data[0].attributes.config.destinations" has length 1 + @team:DataDog/observability-pipelines Scenario: Update a pipeline returns "Bad Request" response Given operation "UpdatePipeline" enabled @@ -122,3 +144,24 @@ Feature: Observability Pipelines And the response "data.attributes.config.processors" has length 1 And the response "data.attributes.config.destinations" has length 1 And the response "data.attributes.config.destinations[0].id" is equal to "updated-datadog-logs-destination-id" + + @team:DataDog/observability-pipelines + Scenario: Validate an observability pipeline returns "Bad Request" response + Given operation "ValidatePipeline" enabled + And new "ValidatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 400 Bad Request + And the response "errors[0].title" is equal to "Field 'include' is required" + And the response "errors[0].meta.field" is equal to "include" + And the response "errors[0].meta.id" is equal to "filter-processor" + And the response "errors[0].meta.message" is equal to "Field 'include' is required" + + @team:DataDog/observability-pipelines + Scenario: Validate an observability pipeline returns "OK" response + Given operation "ValidatePipeline" enabled + And new "ValidatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 200 OK + And the response "errors" has length 0 diff --git a/src/test/resources/com/datadog/api/client/v2/api/undo.json b/src/test/resources/com/datadog/api/client/v2/api/undo.json index 67dca81fb69..4aba315c34e 100644 --- a/src/test/resources/com/datadog/api/client/v2/api/undo.json +++ b/src/test/resources/com/datadog/api/client/v2/api/undo.json @@ -2098,6 +2098,12 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreatePipeline": { "tag": "Observability Pipelines", "undo": { @@ -2111,6 +2117,12 @@ "type": "unsafe" } }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": {