diff --git a/.apigentools-info b/.apigentools-info index 89fe27797f1..7eec375c25c 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-17 13:26:18.289754", - "spec_repo_commit": "12ab5180" + "regenerated": "2025-04-21 09:00:05.176820", + "spec_repo_commit": "fcdf7cbc" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-17 13:26:18.308287", - "spec_repo_commit": "12ab5180" + "regenerated": "2025-04-21 09:00:05.192305", + "spec_repo_commit": "fcdf7cbc" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index a191ada020a..a29e5c79cc2 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -22541,6 +22541,139 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAmazonS3Destination: + description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + format to an Amazon S3 bucket for archiving. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + bucket: + description: S3 bucket name. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: amazon-s3-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys. + nullable: true + type: string + region: + description: AWS region of the S3 bucket. + example: us-east-1 + type: string + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationStorageClass' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationType' + required: + - id + - type + - inputs + - bucket + - region + - storage_class + type: object + ObservabilityPipelineAmazonS3DestinationStorageClass: + description: S3 storage class. + enum: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + ObservabilityPipelineAmazonS3DestinationType: + default: amazon_s3 + description: The destination type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAmazonS3Source: + description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. + + It supports AWS authentication and TLS encryption. + + ' + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: aws-s3-source + type: string + region: + description: AWS region where the S3 bucket resides. + example: us-east-1 + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3SourceType' + required: + - id + - type + - region + type: object + ObservabilityPipelineAmazonS3SourceType: + default: amazon_s3 + description: The source type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAwsAuth: + description: "AWS authentication credentials used for accessing AWS services + such as S3.\nIf omitted, the system\u2019s default credentials are used (for + example, the IAM role and environment variables).\n" + properties: + assume_role: + description: The Amazon Resource Name (ARN) of the role to assume. + nullable: true + type: string + external_id: + description: A unique identifier for cross-account role assumption. + nullable: true + type: string + session_name: + description: A session identifier used for logging and tracing the assumed + role session. + nullable: true + type: string + type: object ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -22583,6 +22716,9 @@ components: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: @@ -22592,11 +22728,15 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' ObservabilityPipelineCreateRequest: description: Top-level schema representing a pipeline. properties: @@ -22768,6 +22908,246 @@ components: type: string x-enum-varnames: - FILTER + ObservabilityPipelineGcpAuth: + description: 'GCP credentials used to authenticate with Google Cloud Storage. + + ' + properties: + credentials_file: + description: Path to the GCP service account key file. + example: /var/secrets/gcp-credentials.json + type: string + required: + - credentials_file + type: object + ObservabilityPipelineGenerateMetricsProcessor: + description: 'The `generate_datadog_metrics` processor creates custom metrics + from logs and sends them to Datadog. + + Metrics can be counters, gauges, or distributions and optionally grouped by + log fields. + + ' + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline. + example: generate-metrics-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this processor. + example: + - source-id + items: + type: string + type: array + metrics: + description: Configuration for generating individual metrics. + items: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetric' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessorType' + required: + - id + - type + - inputs + - include + - metrics + type: object + ObservabilityPipelineGenerateMetricsProcessorType: + default: generate_datadog_metrics + description: The processor type. Always `generate_datadog_metrics`. + enum: + - generate_datadog_metrics + example: generate_datadog_metrics + type: string + x-enum-varnames: + - GENERATE_DATADOG_METRICS + ObservabilityPipelineGeneratedMetric: + description: 'Defines a log-based custom metric, including its name, type, filter, + value computation strategy, + + and optional grouping fields. + + ' + properties: + group_by: + description: Optional fields used to group the metric series. + example: + - service + - env + items: + type: string + type: array + include: + description: Datadog filter query to match logs for metric generation. + example: service:billing + type: string + metric_type: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricMetricType' + name: + description: Name of the custom metric to be created. + example: logs.processed + type: string + value: + $ref: '#/components/schemas/ObservabilityPipelineMetricValue' + required: + - name + - include + - metric_type + - value + type: object + ObservabilityPipelineGeneratedMetricIncrementByField: + description: The definition of `ObservabilityPipelineGeneratedMetricIncrementByField` + object. + properties: + field: + description: Name of the log field containing the numeric value to increment + the metric by. + example: errors + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy' + required: + - strategy + - field + type: object + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy: + description: Uses a numeric field in the log event as the metric increment. + enum: + - increment_by_field + example: increment_by_field + type: string + x-enum-varnames: + - INCREMENT_BY_FIELD + ObservabilityPipelineGeneratedMetricIncrementByOne: + description: The definition of `ObservabilityPipelineGeneratedMetricIncrementByOne` + object. + properties: + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy' + required: + - strategy + type: object + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy: + description: Increments the metric by 1 for each matching event. + enum: + - increment_by_one + example: increment_by_one + type: string + x-enum-varnames: + - INCREMENT_BY_ONE + ObservabilityPipelineGeneratedMetricMetricType: + description: Type of metric to create. + enum: + - count + - gauge + - distribution + example: count + type: string + x-enum-varnames: + - COUNT + - GAUGE + - DISTRIBUTION + ObservabilityPipelineGoogleCloudStorageDestination: + description: 'The `google_cloud_storage` destination stores logs in a Google + Cloud Storage (GCS) bucket. + + It requires a bucket name, GCP authentication, and metadata fields. + + ' + properties: + acl: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + bucket: + description: Name of the GCS bucket. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: gcs-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys within the GCS bucket. + nullable: true + type: string + metadata: + description: Custom metadata key-value pairs added to each object. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetadataEntry' + type: array + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass' + type: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationType' + required: + - id + - type + - inputs + - bucket + - auth + - storage_class + - acl + - metadata + type: object + ObservabilityPipelineGoogleCloudStorageDestinationAcl: + description: Access control list setting for objects written to the bucket. + enum: + - private + - project-private + - public-read + - authenticated-read + - bucket-owner-read + - bucket-owner-full-control + example: private + type: string + x-enum-varnames: + - PRIVATE + - PROJECTNOT_PRIVATE + - PUBLICNOT_READ + - AUTHENTICATEDNOT_READ + - BUCKETNOT_OWNERNOT_READ + - BUCKETNOT_OWNERNOT_FULLNOT_CONTROL + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass: + description: Storage class used for objects stored in GCS. + enum: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + ObservabilityPipelineGoogleCloudStorageDestinationType: + default: google_cloud_storage + description: The destination type. Always `google_cloud_storage`. + enum: + - google_cloud_storage + example: google_cloud_storage + type: string + x-enum-varnames: + - GOOGLE_CLOUD_STORAGE ObservabilityPipelineKafkaSource: description: The `kafka` source ingests data from Apache Kafka topics. properties: @@ -22841,6 +23221,27 @@ components: type: string x-enum-varnames: - KAFKA + ObservabilityPipelineMetadataEntry: + description: A custom metadata entry to attach to each object uploaded to the + GCS bucket. + properties: + name: + description: The metadata key. + example: environment + type: string + value: + description: The metadata value. + example: production + type: string + required: + - name + - value + type: object + ObservabilityPipelineMetricValue: + description: Specifies how the value of the generated metric is computed. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineParseJSONProcessor: description: The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded @@ -23130,6 +23531,131 @@ components: type: string x-enum-varnames: - RENAME_FIELDS + ObservabilityPipelineSplunkHecDestination: + description: 'The `splunk_hec` destination forwards logs to Splunk using the + HTTP Event Collector (HEC). + + ' + properties: + auto_extract_timestamp: + description: 'If `true`, Splunk tries to extract timestamps from incoming + log events. + + If `false`, Splunk assigns the time the event was received. + + ' + example: true + type: boolean + encoding: + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-hec-destination + type: string + index: + description: Optional name of the Splunk index where logs are written. + example: main + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + sourcetype: + description: The Splunk sourcetype to assign to log events. + example: custom_sourcetype + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineSplunkHecDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineSplunkHecDestinationType: + default: splunk_hec + description: The destination type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkHecSource: + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector + (HEC) API. + + ' + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-hec-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSourceType' + required: + - id + - type + type: object + ObservabilityPipelineSplunkHecSourceType: + default: splunk_hec + description: The source type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkTcpSource: + description: 'The `splunk_tcp` source receives logs from a Splunk Universal + Forwarder over TCP. + + TLS is supported for secure transmission. + + ' + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-tcp-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSourceType' + required: + - id + - type + type: object + ObservabilityPipelineSplunkTcpSourceType: + default: splunk_tcp + description: The source type. Always `splunk_tcp`. + enum: + - splunk_tcp + example: splunk_tcp + type: string + x-enum-varnames: + - SPLUNK_TCP ObservabilityPipelineTls: description: Configuration for enabling TLS encryption. properties: diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java new file mode 100644 index 00000000000..d2e1d6522dd --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Destination.java @@ -0,0 +1,409 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import org.openapitools.jackson.nullable.JsonNullable; + +/** + * The amazon_s3 destination sends your logs in Datadog-rehydratable format to an + * Amazon S3 bucket for archiving. + */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_BUCKET, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_KEY_PREFIX, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_REGION, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_STORAGE_CLASS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_TLS, + ObservabilityPipelineAmazonS3Destination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonS3Destination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAwsAuth auth; + + public static final String JSON_PROPERTY_BUCKET = "bucket"; + private String bucket; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_PREFIX = "key_prefix"; + private JsonNullable keyPrefix = JsonNullable.undefined(); + + public static final String JSON_PROPERTY_REGION = "region"; + private String region; + + public static final String JSON_PROPERTY_STORAGE_CLASS = "storage_class"; + private ObservabilityPipelineAmazonS3DestinationStorageClass storageClass; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonS3DestinationType type = + ObservabilityPipelineAmazonS3DestinationType.AMAZON_S3; + + public ObservabilityPipelineAmazonS3Destination() {} + + @JsonCreator + public ObservabilityPipelineAmazonS3Destination( + @JsonProperty(required = true, value = JSON_PROPERTY_BUCKET) String bucket, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) String region, + @JsonProperty(required = true, value = JSON_PROPERTY_STORAGE_CLASS) + ObservabilityPipelineAmazonS3DestinationStorageClass storageClass, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonS3DestinationType type) { + this.bucket = bucket; + this.id = id; + this.inputs = inputs; + this.region = region; + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonS3Destination auth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + * + * @return auth + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineAwsAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonS3Destination bucket(String bucket) { + this.bucket = bucket; + return this; + } + + /** + * S3 bucket name. + * + * @return bucket + */ + @JsonProperty(JSON_PROPERTY_BUCKET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getBucket() { + return bucket; + } + + public void setBucket(String bucket) { + this.bucket = bucket; + } + + public ObservabilityPipelineAmazonS3Destination id(String id) { + this.id = id; + return this; + } + + /** + * Unique identifier for the destination component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonS3Destination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineAmazonS3Destination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineAmazonS3Destination keyPrefix(String keyPrefix) { + this.keyPrefix = JsonNullable.of(keyPrefix); + return this; + } + + /** + * Optional prefix for object keys. + * + * @return keyPrefix + */ + @jakarta.annotation.Nullable + @JsonIgnore + public String getKeyPrefix() { + return keyPrefix.orElse(null); + } + + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public JsonNullable getKeyPrefix_JsonNullable() { + return keyPrefix; + } + + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + public void setKeyPrefix_JsonNullable(JsonNullable keyPrefix) { + this.keyPrefix = keyPrefix; + } + + public void setKeyPrefix(String keyPrefix) { + this.keyPrefix = JsonNullable.of(keyPrefix); + } + + public ObservabilityPipelineAmazonS3Destination region(String region) { + this.region = region; + return this; + } + + /** + * AWS region of the S3 bucket. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRegion() { + return region; + } + + public void setRegion(String region) { + this.region = region; + } + + public ObservabilityPipelineAmazonS3Destination storageClass( + ObservabilityPipelineAmazonS3DestinationStorageClass storageClass) { + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + return this; + } + + /** + * S3 storage class. + * + * @return storageClass + */ + @JsonProperty(JSON_PROPERTY_STORAGE_CLASS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3DestinationStorageClass getStorageClass() { + return storageClass; + } + + public void setStorageClass(ObservabilityPipelineAmazonS3DestinationStorageClass storageClass) { + if (!storageClass.isValid()) { + this.unparsed = true; + } + this.storageClass = storageClass; + } + + public ObservabilityPipelineAmazonS3Destination tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineAmazonS3Destination type( + ObservabilityPipelineAmazonS3DestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always amazon_s3. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3DestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonS3DestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonS3Destination + */ + @JsonAnySetter + public ObservabilityPipelineAmazonS3Destination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonS3Destination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonS3Destination observabilityPipelineAmazonS3Destination = + (ObservabilityPipelineAmazonS3Destination) o; + return Objects.equals(this.auth, observabilityPipelineAmazonS3Destination.auth) + && Objects.equals(this.bucket, observabilityPipelineAmazonS3Destination.bucket) + && Objects.equals(this.id, observabilityPipelineAmazonS3Destination.id) + && Objects.equals(this.inputs, observabilityPipelineAmazonS3Destination.inputs) + && Objects.equals(this.keyPrefix, observabilityPipelineAmazonS3Destination.keyPrefix) + && Objects.equals(this.region, observabilityPipelineAmazonS3Destination.region) + && Objects.equals(this.storageClass, observabilityPipelineAmazonS3Destination.storageClass) + && Objects.equals(this.tls, observabilityPipelineAmazonS3Destination.tls) + && Objects.equals(this.type, observabilityPipelineAmazonS3Destination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineAmazonS3Destination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + auth, bucket, id, inputs, keyPrefix, region, storageClass, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonS3Destination {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyPrefix: ").append(toIndentedString(keyPrefix)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" storageClass: ").append(toIndentedString(storageClass)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java new file mode 100644 index 00000000000..52c457c7840 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationStorageClass.java @@ -0,0 +1,89 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** S3 storage class. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3DestinationStorageClass + .ObservabilityPipelineAmazonS3DestinationStorageClassSerializer.class) +public class ObservabilityPipelineAmazonS3DestinationStorageClass extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "STANDARD", + "REDUCED_REDUNDANCY", + "INTELLIGENT_TIERING", + "STANDARD_IA", + "EXPRESS_ONEZONE", + "ONEZONE_IA", + "GLACIER", + "GLACIER_IR", + "DEEP_ARCHIVE")); + + public static final ObservabilityPipelineAmazonS3DestinationStorageClass STANDARD = + new ObservabilityPipelineAmazonS3DestinationStorageClass("STANDARD"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass REDUCED_REDUNDANCY = + new ObservabilityPipelineAmazonS3DestinationStorageClass("REDUCED_REDUNDANCY"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass INTELLIGENT_TIERING = + new ObservabilityPipelineAmazonS3DestinationStorageClass("INTELLIGENT_TIERING"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass STANDARD_IA = + new ObservabilityPipelineAmazonS3DestinationStorageClass("STANDARD_IA"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass EXPRESS_ONEZONE = + new ObservabilityPipelineAmazonS3DestinationStorageClass("EXPRESS_ONEZONE"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass ONEZONE_IA = + new ObservabilityPipelineAmazonS3DestinationStorageClass("ONEZONE_IA"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass GLACIER = + new ObservabilityPipelineAmazonS3DestinationStorageClass("GLACIER"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass GLACIER_IR = + new ObservabilityPipelineAmazonS3DestinationStorageClass("GLACIER_IR"); + public static final ObservabilityPipelineAmazonS3DestinationStorageClass DEEP_ARCHIVE = + new ObservabilityPipelineAmazonS3DestinationStorageClass("DEEP_ARCHIVE"); + + ObservabilityPipelineAmazonS3DestinationStorageClass(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3DestinationStorageClassSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3DestinationStorageClassSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3DestinationStorageClassSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3DestinationStorageClass value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3DestinationStorageClass fromValue(String value) { + return new ObservabilityPipelineAmazonS3DestinationStorageClass(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java new file mode 100644 index 00000000000..045b92c06b3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3DestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always amazon_s3. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3DestinationType + .ObservabilityPipelineAmazonS3DestinationTypeSerializer.class) +public class ObservabilityPipelineAmazonS3DestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("amazon_s3")); + + public static final ObservabilityPipelineAmazonS3DestinationType AMAZON_S3 = + new ObservabilityPipelineAmazonS3DestinationType("amazon_s3"); + + ObservabilityPipelineAmazonS3DestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3DestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3DestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3DestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3DestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3DestinationType fromValue(String value) { + return new ObservabilityPipelineAmazonS3DestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java new file mode 100644 index 00000000000..a5c7842d06c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3Source.java @@ -0,0 +1,271 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The amazon_s3 source ingests logs from an Amazon S3 bucket. It supports AWS + * authentication and TLS encryption. + */ +@JsonPropertyOrder({ + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_AUTH, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_ID, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_REGION, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TLS, + ObservabilityPipelineAmazonS3Source.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAmazonS3Source { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineAwsAuth auth; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_REGION = "region"; + private String region; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineAmazonS3SourceType type = + ObservabilityPipelineAmazonS3SourceType.AMAZON_S3; + + public ObservabilityPipelineAmazonS3Source() {} + + @JsonCreator + public ObservabilityPipelineAmazonS3Source( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_REGION) String region, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineAmazonS3SourceType type) { + this.id = id; + this.region = region; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineAmazonS3Source auth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + * + * @return auth + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineAwsAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineAwsAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineAmazonS3Source id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineAmazonS3Source region(String region) { + this.region = region; + return this; + } + + /** + * AWS region where the S3 bucket resides. + * + * @return region + */ + @JsonProperty(JSON_PROPERTY_REGION) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getRegion() { + return region; + } + + public void setRegion(String region) { + this.region = region; + } + + public ObservabilityPipelineAmazonS3Source tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineAmazonS3Source type(ObservabilityPipelineAmazonS3SourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always amazon_s3. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineAmazonS3SourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineAmazonS3SourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAmazonS3Source + */ + @JsonAnySetter + public ObservabilityPipelineAmazonS3Source putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAmazonS3Source object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAmazonS3Source observabilityPipelineAmazonS3Source = + (ObservabilityPipelineAmazonS3Source) o; + return Objects.equals(this.auth, observabilityPipelineAmazonS3Source.auth) + && Objects.equals(this.id, observabilityPipelineAmazonS3Source.id) + && Objects.equals(this.region, observabilityPipelineAmazonS3Source.region) + && Objects.equals(this.tls, observabilityPipelineAmazonS3Source.tls) + && Objects.equals(this.type, observabilityPipelineAmazonS3Source.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineAmazonS3Source.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(auth, id, region, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAmazonS3Source {\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" region: ").append(toIndentedString(region)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java new file mode 100644 index 00000000000..8546c2103ba --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAmazonS3SourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always amazon_s3. */ +@JsonSerialize( + using = + ObservabilityPipelineAmazonS3SourceType.ObservabilityPipelineAmazonS3SourceTypeSerializer + .class) +public class ObservabilityPipelineAmazonS3SourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("amazon_s3")); + + public static final ObservabilityPipelineAmazonS3SourceType AMAZON_S3 = + new ObservabilityPipelineAmazonS3SourceType("amazon_s3"); + + ObservabilityPipelineAmazonS3SourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineAmazonS3SourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineAmazonS3SourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineAmazonS3SourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineAmazonS3SourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineAmazonS3SourceType fromValue(String value) { + return new ObservabilityPipelineAmazonS3SourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java new file mode 100644 index 00000000000..ef25ed486c4 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineAwsAuth.java @@ -0,0 +1,226 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.openapitools.jackson.nullable.JsonNullable; + +/** + * AWS authentication credentials used for accessing AWS services such as S3. If omitted, the + * system’s default credentials are used (for example, the IAM role and environment variables). + */ +@JsonPropertyOrder({ + ObservabilityPipelineAwsAuth.JSON_PROPERTY_ASSUME_ROLE, + ObservabilityPipelineAwsAuth.JSON_PROPERTY_EXTERNAL_ID, + ObservabilityPipelineAwsAuth.JSON_PROPERTY_SESSION_NAME +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineAwsAuth { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ASSUME_ROLE = "assume_role"; + private JsonNullable assumeRole = JsonNullable.undefined(); + + public static final String JSON_PROPERTY_EXTERNAL_ID = "external_id"; + private JsonNullable externalId = JsonNullable.undefined(); + + public static final String JSON_PROPERTY_SESSION_NAME = "session_name"; + private JsonNullable sessionName = JsonNullable.undefined(); + + public ObservabilityPipelineAwsAuth assumeRole(String assumeRole) { + this.assumeRole = JsonNullable.of(assumeRole); + return this; + } + + /** + * The Amazon Resource Name (ARN) of the role to assume. + * + * @return assumeRole + */ + @jakarta.annotation.Nullable + @JsonIgnore + public String getAssumeRole() { + return assumeRole.orElse(null); + } + + @JsonProperty(JSON_PROPERTY_ASSUME_ROLE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public JsonNullable getAssumeRole_JsonNullable() { + return assumeRole; + } + + @JsonProperty(JSON_PROPERTY_ASSUME_ROLE) + public void setAssumeRole_JsonNullable(JsonNullable assumeRole) { + this.assumeRole = assumeRole; + } + + public void setAssumeRole(String assumeRole) { + this.assumeRole = JsonNullable.of(assumeRole); + } + + public ObservabilityPipelineAwsAuth externalId(String externalId) { + this.externalId = JsonNullable.of(externalId); + return this; + } + + /** + * A unique identifier for cross-account role assumption. + * + * @return externalId + */ + @jakarta.annotation.Nullable + @JsonIgnore + public String getExternalId() { + return externalId.orElse(null); + } + + @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public JsonNullable getExternalId_JsonNullable() { + return externalId; + } + + @JsonProperty(JSON_PROPERTY_EXTERNAL_ID) + public void setExternalId_JsonNullable(JsonNullable externalId) { + this.externalId = externalId; + } + + public void setExternalId(String externalId) { + this.externalId = JsonNullable.of(externalId); + } + + public ObservabilityPipelineAwsAuth sessionName(String sessionName) { + this.sessionName = JsonNullable.of(sessionName); + return this; + } + + /** + * A session identifier used for logging and tracing the assumed role session. + * + * @return sessionName + */ + @jakarta.annotation.Nullable + @JsonIgnore + public String getSessionName() { + return sessionName.orElse(null); + } + + @JsonProperty(JSON_PROPERTY_SESSION_NAME) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public JsonNullable getSessionName_JsonNullable() { + return sessionName; + } + + @JsonProperty(JSON_PROPERTY_SESSION_NAME) + public void setSessionName_JsonNullable(JsonNullable sessionName) { + this.sessionName = sessionName; + } + + public void setSessionName(String sessionName) { + this.sessionName = JsonNullable.of(sessionName); + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineAwsAuth + */ + @JsonAnySetter + public ObservabilityPipelineAwsAuth putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineAwsAuth object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineAwsAuth observabilityPipelineAwsAuth = (ObservabilityPipelineAwsAuth) o; + return Objects.equals(this.assumeRole, observabilityPipelineAwsAuth.assumeRole) + && Objects.equals(this.externalId, observabilityPipelineAwsAuth.externalId) + && Objects.equals(this.sessionName, observabilityPipelineAwsAuth.sessionName) + && Objects.equals( + this.additionalProperties, observabilityPipelineAwsAuth.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(assumeRole, externalId, sessionName, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineAwsAuth {\n"); + sb.append(" assumeRole: ").append(toIndentedString(assumeRole)).append("\n"); + sb.append(" externalId: ").append(toIndentedString(externalId)).append("\n"); + sb.append(" sessionName: ").append(toIndentedString(sessionName)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java index f6d74a9f38a..e20a6cac7d0 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigDestinationItem.java @@ -142,6 +142,162 @@ public ObservabilityPipelineConfigDestinationItem deserialize( e); } + // deserialize ObservabilityPipelineAmazonS3Destination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonS3Destination.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Long.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Double.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Boolean.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonS3Destination.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonS3Destination.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Destination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Destination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Destination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineAmazonS3Destination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonS3Destination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Destination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonS3Destination'", + e); + } + + // deserialize ObservabilityPipelineGoogleCloudStorageDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Long.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Double.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Integer.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Float.class) + || ObservabilityPipelineGoogleCloudStorageDestination.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGoogleCloudStorageDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGoogleCloudStorageDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGoogleCloudStorageDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGoogleCloudStorageDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGoogleCloudStorageDestination'", + e); + } + + // deserialize ObservabilityPipelineSplunkHecDestination + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkHecDestination.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkHecDestination.class.equals(Float.class) + || ObservabilityPipelineSplunkHecDestination.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkHecDestination.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkHecDestination.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineSplunkHecDestination.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkHecDestination) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecDestination'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkHecDestination'", + e); + } + ObservabilityPipelineConfigDestinationItem ret = new ObservabilityPipelineConfigDestinationItem(); if (match == 1) { @@ -178,10 +334,35 @@ public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineDatadogLo setActualInstance(o); } + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineAmazonS3Destination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem( + ObservabilityPipelineGoogleCloudStorageDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigDestinationItem(ObservabilityPipelineSplunkHecDestination o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineDatadogLogsDestination", new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonS3Destination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGoogleCloudStorageDestination", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkHecDestination", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigDestinationItem.class, Collections.unmodifiableMap(schemas)); } @@ -193,7 +374,9 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid - * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination + * against the oneOf child schemas: ObservabilityPipelineDatadogLogsDestination, + * ObservabilityPipelineAmazonS3Destination, ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -205,20 +388,44 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonS3Destination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGoogleCloudStorageDestination.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkHecDestination.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); return; } throw new RuntimeException( - "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination"); + "Invalid instance type. Must be ObservabilityPipelineDatadogLogsDestination," + + " ObservabilityPipelineAmazonS3Destination," + + " ObservabilityPipelineGoogleCloudStorageDestination," + + " ObservabilityPipelineSplunkHecDestination"); } /** * Get the actual instance, which can be the following: - * ObservabilityPipelineDatadogLogsDestination + * ObservabilityPipelineDatadogLogsDestination, ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, ObservabilityPipelineSplunkHecDestination * - * @return The actual instance (ObservabilityPipelineDatadogLogsDestination) + * @return The actual instance (ObservabilityPipelineDatadogLogsDestination, + * ObservabilityPipelineAmazonS3Destination, + * ObservabilityPipelineGoogleCloudStorageDestination, + * ObservabilityPipelineSplunkHecDestination) */ @Override public Object getActualInstance() { @@ -237,4 +444,42 @@ public Object getActualInstance() { getObservabilityPipelineDatadogLogsDestination() throws ClassCastException { return (ObservabilityPipelineDatadogLogsDestination) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonS3Destination`. If the actual instance + * is not `ObservabilityPipelineAmazonS3Destination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonS3Destination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAmazonS3Destination` + */ + public ObservabilityPipelineAmazonS3Destination getObservabilityPipelineAmazonS3Destination() + throws ClassCastException { + return (ObservabilityPipelineAmazonS3Destination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGoogleCloudStorageDestination`. If the actual + * instance is not `ObservabilityPipelineGoogleCloudStorageDestination`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGoogleCloudStorageDestination` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGoogleCloudStorageDestination` + */ + public ObservabilityPipelineGoogleCloudStorageDestination + getObservabilityPipelineGoogleCloudStorageDestination() throws ClassCastException { + return (ObservabilityPipelineGoogleCloudStorageDestination) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkHecDestination`. If the actual instance + * is not `ObservabilityPipelineSplunkHecDestination`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkHecDestination` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecDestination` + */ + public ObservabilityPipelineSplunkHecDestination getObservabilityPipelineSplunkHecDestination() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecDestination) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java index aa99b3edb23..19ff3f3b473 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigProcessorItem.java @@ -393,6 +393,58 @@ public ObservabilityPipelineConfigProcessorItem deserialize( e); } + // deserialize ObservabilityPipelineGenerateMetricsProcessor + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Integer.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGenerateMetricsProcessor.class.equals(Float.class) + || ObservabilityPipelineGenerateMetricsProcessor.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGenerateMetricsProcessor.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGenerateMetricsProcessor.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGenerateMetricsProcessor) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGenerateMetricsProcessor'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGenerateMetricsProcessor'", + e); + } + ObservabilityPipelineConfigProcessorItem ret = new ObservabilityPipelineConfigProcessorItem(); if (match == 1) { ret.setActualInstance(deserialized); @@ -453,6 +505,11 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameField setActualInstance(o); } + public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineGenerateMetricsProcessor o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineFilterProcessor", @@ -472,6 +529,9 @@ public ObservabilityPipelineConfigProcessorItem(ObservabilityPipelineRenameField schemas.put( "ObservabilityPipelineRenameFieldsProcessor", new GenericType() {}); + schemas.put( + "ObservabilityPipelineGenerateMetricsProcessor", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigProcessorItem.class, Collections.unmodifiableMap(schemas)); } @@ -486,7 +546,7 @@ public Map getSchemas() { * against the oneOf child schemas: ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -523,6 +583,11 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineGenerateMetricsProcessor.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -533,19 +598,20 @@ public void setActualInstance(Object instance) { + " ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor," + " ObservabilityPipelineAddFieldsProcessor," + " ObservabilityPipelineRemoveFieldsProcessor," - + " ObservabilityPipelineRenameFieldsProcessor"); + + " ObservabilityPipelineRenameFieldsProcessor," + + " ObservabilityPipelineGenerateMetricsProcessor"); } /** * Get the actual instance, which can be the following: ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor * * @return The actual instance (ObservabilityPipelineFilterProcessor, * ObservabilityPipelineParseJSONProcessor, ObservabilityPipelineQuotaProcessor, * ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, - * ObservabilityPipelineRenameFieldsProcessor) + * ObservabilityPipelineRenameFieldsProcessor, ObservabilityPipelineGenerateMetricsProcessor) */ @Override public Object getActualInstance() { @@ -623,4 +689,18 @@ public ObservabilityPipelineRenameFieldsProcessor getObservabilityPipelineRename throws ClassCastException { return (ObservabilityPipelineRenameFieldsProcessor) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineGenerateMetricsProcessor`. If the actual + * instance is not `ObservabilityPipelineGenerateMetricsProcessor`, the ClassCastException will be + * thrown. + * + * @return The actual instance of `ObservabilityPipelineGenerateMetricsProcessor` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGenerateMetricsProcessor` + */ + public ObservabilityPipelineGenerateMetricsProcessor + getObservabilityPipelineGenerateMetricsProcessor() throws ClassCastException { + return (ObservabilityPipelineGenerateMetricsProcessor) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java index 68261e72ac1..8accb8006c9 100644 --- a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineConfigSourceItem.java @@ -186,6 +186,152 @@ public ObservabilityPipelineConfigSourceItem deserialize( e); } + // deserialize ObservabilityPipelineSplunkTcpSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkTcpSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkTcpSource.class.equals(Float.class) + || ObservabilityPipelineSplunkTcpSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkTcpSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkTcpSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkTcpSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkTcpSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkTcpSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkTcpSource'", + e); + } + + // deserialize ObservabilityPipelineSplunkHecSource + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + || ObservabilityPipelineSplunkHecSource.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineSplunkHecSource.class.equals(Integer.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineSplunkHecSource.class.equals(Float.class) + || ObservabilityPipelineSplunkHecSource.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineSplunkHecSource.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineSplunkHecSource.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineSplunkHecSource.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineSplunkHecSource) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineSplunkHecSource'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineSplunkHecSource'", + e); + } + + // deserialize ObservabilityPipelineAmazonS3Source + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + || ObservabilityPipelineAmazonS3Source.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineAmazonS3Source.class.equals(Integer.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineAmazonS3Source.class.equals(Float.class) + || ObservabilityPipelineAmazonS3Source.class.equals(Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Source.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineAmazonS3Source.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = tree.traverse(jp.getCodec()).readValueAs(ObservabilityPipelineAmazonS3Source.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineAmazonS3Source) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log(Level.FINER, "Input data matches schema 'ObservabilityPipelineAmazonS3Source'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineAmazonS3Source'", + e); + } + ObservabilityPipelineConfigSourceItem ret = new ObservabilityPipelineConfigSourceItem(); if (match == 1) { ret.setActualInstance(deserialized); @@ -226,12 +372,36 @@ public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineDatadogAgentSo setActualInstance(o); } + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkTcpSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineSplunkHecSource o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineConfigSourceItem(ObservabilityPipelineAmazonS3Source o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + static { schemas.put( "ObservabilityPipelineKafkaSource", new GenericType() {}); schemas.put( "ObservabilityPipelineDatadogAgentSource", new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkTcpSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineSplunkHecSource", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineAmazonS3Source", + new GenericType() {}); JSON.registerDescendants( ObservabilityPipelineConfigSourceItem.class, Collections.unmodifiableMap(schemas)); } @@ -244,7 +414,8 @@ public Map getSchemas() { /** * Set the instance that matches the oneOf child schema, check the instance parameter is valid * against the oneOf child schemas: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source * *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a * composed schema (allOf, anyOf, oneOf). @@ -261,6 +432,21 @@ public void setActualInstance(Object instance) { super.setActualInstance(instance); return; } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkTcpSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineSplunkHecSource.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineAmazonS3Source.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { super.setActualInstance(instance); @@ -268,15 +454,18 @@ public void setActualInstance(Object instance) { } throw new RuntimeException( "Invalid instance type. Must be ObservabilityPipelineKafkaSource," - + " ObservabilityPipelineDatadogAgentSource"); + + " ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource," + + " ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source"); } /** * Get the actual instance, which can be the following: ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source * * @return The actual instance (ObservabilityPipelineKafkaSource, - * ObservabilityPipelineDatadogAgentSource) + * ObservabilityPipelineDatadogAgentSource, ObservabilityPipelineSplunkTcpSource, + * ObservabilityPipelineSplunkHecSource, ObservabilityPipelineAmazonS3Source) */ @Override public Object getActualInstance() { @@ -306,4 +495,40 @@ public ObservabilityPipelineDatadogAgentSource getObservabilityPipelineDatadogAg throws ClassCastException { return (ObservabilityPipelineDatadogAgentSource) super.getActualInstance(); } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkTcpSource`. If the actual instance is + * not `ObservabilityPipelineSplunkTcpSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkTcpSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkTcpSource` + */ + public ObservabilityPipelineSplunkTcpSource getObservabilityPipelineSplunkTcpSource() + throws ClassCastException { + return (ObservabilityPipelineSplunkTcpSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineSplunkHecSource`. If the actual instance is + * not `ObservabilityPipelineSplunkHecSource`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineSplunkHecSource` + * @throws ClassCastException if the instance is not `ObservabilityPipelineSplunkHecSource` + */ + public ObservabilityPipelineSplunkHecSource getObservabilityPipelineSplunkHecSource() + throws ClassCastException { + return (ObservabilityPipelineSplunkHecSource) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineAmazonS3Source`. If the actual instance is not + * `ObservabilityPipelineAmazonS3Source`, the ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineAmazonS3Source` + * @throws ClassCastException if the instance is not `ObservabilityPipelineAmazonS3Source` + */ + public ObservabilityPipelineAmazonS3Source getObservabilityPipelineAmazonS3Source() + throws ClassCastException { + return (ObservabilityPipelineAmazonS3Source) super.getActualInstance(); + } } diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java new file mode 100644 index 00000000000..6e65889c195 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGcpAuth.java @@ -0,0 +1,145 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** GCP credentials used to authenticate with Google Cloud Storage. */ +@JsonPropertyOrder({ObservabilityPipelineGcpAuth.JSON_PROPERTY_CREDENTIALS_FILE}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGcpAuth { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_CREDENTIALS_FILE = "credentials_file"; + private String credentialsFile; + + public ObservabilityPipelineGcpAuth() {} + + @JsonCreator + public ObservabilityPipelineGcpAuth( + @JsonProperty(required = true, value = JSON_PROPERTY_CREDENTIALS_FILE) + String credentialsFile) { + this.credentialsFile = credentialsFile; + } + + public ObservabilityPipelineGcpAuth credentialsFile(String credentialsFile) { + this.credentialsFile = credentialsFile; + return this; + } + + /** + * Path to the GCP service account key file. + * + * @return credentialsFile + */ + @JsonProperty(JSON_PROPERTY_CREDENTIALS_FILE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getCredentialsFile() { + return credentialsFile; + } + + public void setCredentialsFile(String credentialsFile) { + this.credentialsFile = credentialsFile; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGcpAuth + */ + @JsonAnySetter + public ObservabilityPipelineGcpAuth putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGcpAuth object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGcpAuth observabilityPipelineGcpAuth = (ObservabilityPipelineGcpAuth) o; + return Objects.equals(this.credentialsFile, observabilityPipelineGcpAuth.credentialsFile) + && Objects.equals( + this.additionalProperties, observabilityPipelineGcpAuth.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(credentialsFile, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGcpAuth {\n"); + sb.append(" credentialsFile: ").append(toIndentedString(credentialsFile)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java new file mode 100644 index 00000000000..9a17ba776eb --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessor.java @@ -0,0 +1,293 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The generate_datadog_metrics processor creates custom metrics from logs and sends + * them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log + * fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_ID, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_INPUTS, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_METRICS, + ObservabilityPipelineGenerateMetricsProcessor.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGenerateMetricsProcessor { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_METRICS = "metrics"; + private List metrics = new ArrayList<>(); + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGenerateMetricsProcessorType type = + ObservabilityPipelineGenerateMetricsProcessorType.GENERATE_DATADOG_METRICS; + + public ObservabilityPipelineGenerateMetricsProcessor() {} + + @JsonCreator + public ObservabilityPipelineGenerateMetricsProcessor( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_METRICS) + List metrics, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGenerateMetricsProcessorType type) { + this.id = id; + this.include = include; + this.inputs = inputs; + this.metrics = metrics; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGenerateMetricsProcessor id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGenerateMetricsProcessor include(String include) { + this.include = include; + return this; + } + + /** + * A Datadog search query used to determine which logs this processor targets. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineGenerateMetricsProcessor inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineGenerateMetricsProcessor addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this processor. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineGenerateMetricsProcessor metrics( + List metrics) { + this.metrics = metrics; + for (ObservabilityPipelineGeneratedMetric item : metrics) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineGenerateMetricsProcessor addMetricsItem( + ObservabilityPipelineGeneratedMetric metricsItem) { + this.metrics.add(metricsItem); + this.unparsed |= metricsItem.unparsed; + return this; + } + + /** + * Configuration for generating individual metrics. + * + * @return metrics + */ + @JsonProperty(JSON_PROPERTY_METRICS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMetrics() { + return metrics; + } + + public void setMetrics(List metrics) { + this.metrics = metrics; + } + + public ObservabilityPipelineGenerateMetricsProcessor type( + ObservabilityPipelineGenerateMetricsProcessorType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The processor type. Always generate_datadog_metrics. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGenerateMetricsProcessorType getType() { + return type; + } + + public void setType(ObservabilityPipelineGenerateMetricsProcessorType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGenerateMetricsProcessor + */ + @JsonAnySetter + public ObservabilityPipelineGenerateMetricsProcessor putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGenerateMetricsProcessor object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGenerateMetricsProcessor observabilityPipelineGenerateMetricsProcessor = + (ObservabilityPipelineGenerateMetricsProcessor) o; + return Objects.equals(this.id, observabilityPipelineGenerateMetricsProcessor.id) + && Objects.equals(this.include, observabilityPipelineGenerateMetricsProcessor.include) + && Objects.equals(this.inputs, observabilityPipelineGenerateMetricsProcessor.inputs) + && Objects.equals(this.metrics, observabilityPipelineGenerateMetricsProcessor.metrics) + && Objects.equals(this.type, observabilityPipelineGenerateMetricsProcessor.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGenerateMetricsProcessor.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, include, inputs, metrics, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGenerateMetricsProcessor {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" metrics: ").append(toIndentedString(metrics)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java new file mode 100644 index 00000000000..63639261867 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGenerateMetricsProcessorType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The processor type. Always generate_datadog_metrics. */ +@JsonSerialize( + using = + ObservabilityPipelineGenerateMetricsProcessorType + .ObservabilityPipelineGenerateMetricsProcessorTypeSerializer.class) +public class ObservabilityPipelineGenerateMetricsProcessorType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("generate_datadog_metrics")); + + public static final ObservabilityPipelineGenerateMetricsProcessorType GENERATE_DATADOG_METRICS = + new ObservabilityPipelineGenerateMetricsProcessorType("generate_datadog_metrics"); + + ObservabilityPipelineGenerateMetricsProcessorType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGenerateMetricsProcessorTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGenerateMetricsProcessorTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGenerateMetricsProcessorTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGenerateMetricsProcessorType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGenerateMetricsProcessorType fromValue(String value) { + return new ObservabilityPipelineGenerateMetricsProcessorType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java new file mode 100644 index 00000000000..ccd6b71a7b7 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetric.java @@ -0,0 +1,281 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Defines a log-based custom metric, including its name, type, filter, value computation strategy, + * and optional grouping fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_GROUP_BY, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_INCLUDE, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_METRIC_TYPE, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_NAME, + ObservabilityPipelineGeneratedMetric.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetric { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_GROUP_BY = "group_by"; + private List groupBy = null; + + public static final String JSON_PROPERTY_INCLUDE = "include"; + private String include; + + public static final String JSON_PROPERTY_METRIC_TYPE = "metric_type"; + private ObservabilityPipelineGeneratedMetricMetricType metricType; + + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private ObservabilityPipelineMetricValue value; + + public ObservabilityPipelineGeneratedMetric() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetric( + @JsonProperty(required = true, value = JSON_PROPERTY_INCLUDE) String include, + @JsonProperty(required = true, value = JSON_PROPERTY_METRIC_TYPE) + ObservabilityPipelineGeneratedMetricMetricType metricType, + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) + ObservabilityPipelineMetricValue value) { + this.include = include; + this.metricType = metricType; + this.unparsed |= !metricType.isValid(); + this.name = name; + this.value = value; + this.unparsed |= value.unparsed; + } + + public ObservabilityPipelineGeneratedMetric groupBy(List groupBy) { + this.groupBy = groupBy; + return this; + } + + public ObservabilityPipelineGeneratedMetric addGroupByItem(String groupByItem) { + if (this.groupBy == null) { + this.groupBy = new ArrayList<>(); + } + this.groupBy.add(groupByItem); + return this; + } + + /** + * Optional fields used to group the metric series. + * + * @return groupBy + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_GROUP_BY) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public List getGroupBy() { + return groupBy; + } + + public void setGroupBy(List groupBy) { + this.groupBy = groupBy; + } + + public ObservabilityPipelineGeneratedMetric include(String include) { + this.include = include; + return this; + } + + /** + * Datadog filter query to match logs for metric generation. + * + * @return include + */ + @JsonProperty(JSON_PROPERTY_INCLUDE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getInclude() { + return include; + } + + public void setInclude(String include) { + this.include = include; + } + + public ObservabilityPipelineGeneratedMetric metricType( + ObservabilityPipelineGeneratedMetricMetricType metricType) { + this.metricType = metricType; + this.unparsed |= !metricType.isValid(); + return this; + } + + /** + * Type of metric to create. + * + * @return metricType + */ + @JsonProperty(JSON_PROPERTY_METRIC_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricMetricType getMetricType() { + return metricType; + } + + public void setMetricType(ObservabilityPipelineGeneratedMetricMetricType metricType) { + if (!metricType.isValid()) { + this.unparsed = true; + } + this.metricType = metricType; + } + + public ObservabilityPipelineGeneratedMetric name(String name) { + this.name = name; + return this; + } + + /** + * Name of the custom metric to be created. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineGeneratedMetric value(ObservabilityPipelineMetricValue value) { + this.value = value; + this.unparsed |= value.unparsed; + return this; + } + + /** + * Specifies how the value of the generated metric is computed. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineMetricValue getValue() { + return value; + } + + public void setValue(ObservabilityPipelineMetricValue value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetric + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetric putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineGeneratedMetric object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetric observabilityPipelineGeneratedMetric = + (ObservabilityPipelineGeneratedMetric) o; + return Objects.equals(this.groupBy, observabilityPipelineGeneratedMetric.groupBy) + && Objects.equals(this.include, observabilityPipelineGeneratedMetric.include) + && Objects.equals(this.metricType, observabilityPipelineGeneratedMetric.metricType) + && Objects.equals(this.name, observabilityPipelineGeneratedMetric.name) + && Objects.equals(this.value, observabilityPipelineGeneratedMetric.value) + && Objects.equals( + this.additionalProperties, observabilityPipelineGeneratedMetric.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(groupBy, include, metricType, name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetric {\n"); + sb.append(" groupBy: ").append(toIndentedString(groupBy)).append("\n"); + sb.append(" include: ").append(toIndentedString(include)).append("\n"); + sb.append(" metricType: ").append(toIndentedString(metricType)).append("\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java new file mode 100644 index 00000000000..8afca87a9c3 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByField.java @@ -0,0 +1,188 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The definition of ObservabilityPipelineGeneratedMetricIncrementByField object. */ +@JsonPropertyOrder({ + ObservabilityPipelineGeneratedMetricIncrementByField.JSON_PROPERTY_FIELD, + ObservabilityPipelineGeneratedMetricIncrementByField.JSON_PROPERTY_STRATEGY +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetricIncrementByField { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_FIELD = "field"; + private String field; + + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy; + + public ObservabilityPipelineGeneratedMetricIncrementByField() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetricIncrementByField( + @JsonProperty(required = true, value = JSON_PROPERTY_FIELD) String field, + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + this.field = field; + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineGeneratedMetricIncrementByField field(String field) { + this.field = field; + return this; + } + + /** + * Name of the log field containing the numeric value to increment the metric by. + * + * @return field + */ + @JsonProperty(JSON_PROPERTY_FIELD) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public ObservabilityPipelineGeneratedMetricIncrementByField strategy( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * Uses a numeric field in the log event as the metric increment. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetricIncrementByField + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetricIncrementByField putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGeneratedMetricIncrementByField object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetricIncrementByField + observabilityPipelineGeneratedMetricIncrementByField = + (ObservabilityPipelineGeneratedMetricIncrementByField) o; + return Objects.equals(this.field, observabilityPipelineGeneratedMetricIncrementByField.field) + && Objects.equals( + this.strategy, observabilityPipelineGeneratedMetricIncrementByField.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGeneratedMetricIncrementByField.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(field, strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetricIncrementByField {\n"); + sb.append(" field: ").append(toIndentedString(field)).append("\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java new file mode 100644 index 00000000000..8b83fe24a38 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.java @@ -0,0 +1,66 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Uses a numeric field in the log event as the metric increment. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + .ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer.class) +public class ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("increment_by_field")); + + public static final ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + INCREMENT_BY_FIELD = + new ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy("increment_by_field"); + + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricIncrementByFieldStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy fromValue( + String value) { + return new ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java new file mode 100644 index 00000000000..fdfe7654d8e --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOne.java @@ -0,0 +1,158 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The definition of ObservabilityPipelineGeneratedMetricIncrementByOne object. */ +@JsonPropertyOrder({ObservabilityPipelineGeneratedMetricIncrementByOne.JSON_PROPERTY_STRATEGY}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGeneratedMetricIncrementByOne { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_STRATEGY = "strategy"; + private ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy; + + public ObservabilityPipelineGeneratedMetricIncrementByOne() {} + + @JsonCreator + public ObservabilityPipelineGeneratedMetricIncrementByOne( + @JsonProperty(required = true, value = JSON_PROPERTY_STRATEGY) + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + } + + public ObservabilityPipelineGeneratedMetricIncrementByOne strategy( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + this.strategy = strategy; + this.unparsed |= !strategy.isValid(); + return this; + } + + /** + * Increments the metric by 1 for each matching event. + * + * @return strategy + */ + @JsonProperty(JSON_PROPERTY_STRATEGY) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategy getStrategy() { + return strategy; + } + + public void setStrategy(ObservabilityPipelineGeneratedMetricIncrementByOneStrategy strategy) { + if (!strategy.isValid()) { + this.unparsed = true; + } + this.strategy = strategy; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGeneratedMetricIncrementByOne + */ + @JsonAnySetter + public ObservabilityPipelineGeneratedMetricIncrementByOne putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGeneratedMetricIncrementByOne object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGeneratedMetricIncrementByOne + observabilityPipelineGeneratedMetricIncrementByOne = + (ObservabilityPipelineGeneratedMetricIncrementByOne) o; + return Objects.equals( + this.strategy, observabilityPipelineGeneratedMetricIncrementByOne.strategy) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGeneratedMetricIncrementByOne.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(strategy, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGeneratedMetricIncrementByOne {\n"); + sb.append(" strategy: ").append(toIndentedString(strategy)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java new file mode 100644 index 00000000000..1f1f47ed1ad --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Increments the metric by 1 for each matching event. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy + .ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer.class) +public class ObservabilityPipelineGeneratedMetricIncrementByOneStrategy extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("increment_by_one")); + + public static final ObservabilityPipelineGeneratedMetricIncrementByOneStrategy INCREMENT_BY_ONE = + new ObservabilityPipelineGeneratedMetricIncrementByOneStrategy("increment_by_one"); + + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricIncrementByOneStrategySerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricIncrementByOneStrategy fromValue(String value) { + return new ObservabilityPipelineGeneratedMetricIncrementByOneStrategy(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java new file mode 100644 index 00000000000..62b2d2e15f9 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGeneratedMetricMetricType.java @@ -0,0 +1,67 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Type of metric to create. */ +@JsonSerialize( + using = + ObservabilityPipelineGeneratedMetricMetricType + .ObservabilityPipelineGeneratedMetricMetricTypeSerializer.class) +public class ObservabilityPipelineGeneratedMetricMetricType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("count", "gauge", "distribution")); + + public static final ObservabilityPipelineGeneratedMetricMetricType COUNT = + new ObservabilityPipelineGeneratedMetricMetricType("count"); + public static final ObservabilityPipelineGeneratedMetricMetricType GAUGE = + new ObservabilityPipelineGeneratedMetricMetricType("gauge"); + public static final ObservabilityPipelineGeneratedMetricMetricType DISTRIBUTION = + new ObservabilityPipelineGeneratedMetricMetricType("distribution"); + + ObservabilityPipelineGeneratedMetricMetricType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGeneratedMetricMetricTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGeneratedMetricMetricTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGeneratedMetricMetricTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGeneratedMetricMetricType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGeneratedMetricMetricType fromValue(String value) { + return new ObservabilityPipelineGeneratedMetricMetricType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java new file mode 100644 index 00000000000..9e3e233f2ce --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestination.java @@ -0,0 +1,447 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import org.openapitools.jackson.nullable.JsonNullable; + +/** + * The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) + * bucket. It requires a bucket name, GCP authentication, and metadata fields. + */ +@JsonPropertyOrder({ + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ACL, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_AUTH, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_BUCKET, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_ID, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_KEY_PREFIX, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_METADATA, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_STORAGE_CLASS, + ObservabilityPipelineGoogleCloudStorageDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineGoogleCloudStorageDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ACL = "acl"; + private ObservabilityPipelineGoogleCloudStorageDestinationAcl acl; + + public static final String JSON_PROPERTY_AUTH = "auth"; + private ObservabilityPipelineGcpAuth auth; + + public static final String JSON_PROPERTY_BUCKET = "bucket"; + private String bucket; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_KEY_PREFIX = "key_prefix"; + private JsonNullable keyPrefix = JsonNullable.undefined(); + + public static final String JSON_PROPERTY_METADATA = "metadata"; + private List metadata = new ArrayList<>(); + + public static final String JSON_PROPERTY_STORAGE_CLASS = "storage_class"; + private ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineGoogleCloudStorageDestinationType type = + ObservabilityPipelineGoogleCloudStorageDestinationType.GOOGLE_CLOUD_STORAGE; + + public ObservabilityPipelineGoogleCloudStorageDestination() {} + + @JsonCreator + public ObservabilityPipelineGoogleCloudStorageDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ACL) + ObservabilityPipelineGoogleCloudStorageDestinationAcl acl, + @JsonProperty(required = true, value = JSON_PROPERTY_AUTH) ObservabilityPipelineGcpAuth auth, + @JsonProperty(required = true, value = JSON_PROPERTY_BUCKET) String bucket, + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_METADATA) + List metadata, + @JsonProperty(required = true, value = JSON_PROPERTY_STORAGE_CLASS) + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineGoogleCloudStorageDestinationType type) { + this.acl = acl; + this.unparsed |= !acl.isValid(); + this.auth = auth; + this.unparsed |= auth.unparsed; + this.bucket = bucket; + this.id = id; + this.inputs = inputs; + this.metadata = metadata; + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineGoogleCloudStorageDestination acl( + ObservabilityPipelineGoogleCloudStorageDestinationAcl acl) { + this.acl = acl; + this.unparsed |= !acl.isValid(); + return this; + } + + /** + * Access control list setting for objects written to the bucket. + * + * @return acl + */ + @JsonProperty(JSON_PROPERTY_ACL) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationAcl getAcl() { + return acl; + } + + public void setAcl(ObservabilityPipelineGoogleCloudStorageDestinationAcl acl) { + if (!acl.isValid()) { + this.unparsed = true; + } + this.acl = acl; + } + + public ObservabilityPipelineGoogleCloudStorageDestination auth( + ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + this.unparsed |= auth.unparsed; + return this; + } + + /** + * GCP credentials used to authenticate with Google Cloud Storage. + * + * @return auth + */ + @JsonProperty(JSON_PROPERTY_AUTH) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGcpAuth getAuth() { + return auth; + } + + public void setAuth(ObservabilityPipelineGcpAuth auth) { + this.auth = auth; + } + + public ObservabilityPipelineGoogleCloudStorageDestination bucket(String bucket) { + this.bucket = bucket; + return this; + } + + /** + * Name of the GCS bucket. + * + * @return bucket + */ + @JsonProperty(JSON_PROPERTY_BUCKET) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getBucket() { + return bucket; + } + + public void setBucket(String bucket) { + this.bucket = bucket; + } + + public ObservabilityPipelineGoogleCloudStorageDestination id(String id) { + this.id = id; + return this; + } + + /** + * Unique identifier for the destination component. + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineGoogleCloudStorageDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineGoogleCloudStorageDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineGoogleCloudStorageDestination keyPrefix(String keyPrefix) { + this.keyPrefix = JsonNullable.of(keyPrefix); + return this; + } + + /** + * Optional prefix for object keys within the GCS bucket. + * + * @return keyPrefix + */ + @jakarta.annotation.Nullable + @JsonIgnore + public String getKeyPrefix() { + return keyPrefix.orElse(null); + } + + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public JsonNullable getKeyPrefix_JsonNullable() { + return keyPrefix; + } + + @JsonProperty(JSON_PROPERTY_KEY_PREFIX) + public void setKeyPrefix_JsonNullable(JsonNullable keyPrefix) { + this.keyPrefix = keyPrefix; + } + + public void setKeyPrefix(String keyPrefix) { + this.keyPrefix = JsonNullable.of(keyPrefix); + } + + public ObservabilityPipelineGoogleCloudStorageDestination metadata( + List metadata) { + this.metadata = metadata; + for (ObservabilityPipelineMetadataEntry item : metadata) { + this.unparsed |= item.unparsed; + } + return this; + } + + public ObservabilityPipelineGoogleCloudStorageDestination addMetadataItem( + ObservabilityPipelineMetadataEntry metadataItem) { + this.metadata.add(metadataItem); + this.unparsed |= metadataItem.unparsed; + return this; + } + + /** + * Custom metadata key-value pairs added to each object. + * + * @return metadata + */ + @JsonProperty(JSON_PROPERTY_METADATA) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getMetadata() { + return metadata; + } + + public void setMetadata(List metadata) { + this.metadata = metadata; + } + + public ObservabilityPipelineGoogleCloudStorageDestination storageClass( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass) { + this.storageClass = storageClass; + this.unparsed |= !storageClass.isValid(); + return this; + } + + /** + * Storage class used for objects stored in GCS. + * + * @return storageClass + */ + @JsonProperty(JSON_PROPERTY_STORAGE_CLASS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClass getStorageClass() { + return storageClass; + } + + public void setStorageClass( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass storageClass) { + if (!storageClass.isValid()) { + this.unparsed = true; + } + this.storageClass = storageClass; + } + + public ObservabilityPipelineGoogleCloudStorageDestination type( + ObservabilityPipelineGoogleCloudStorageDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always google_cloud_storage. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineGoogleCloudStorageDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineGoogleCloudStorageDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineGoogleCloudStorageDestination + */ + @JsonAnySetter + public ObservabilityPipelineGoogleCloudStorageDestination putAdditionalProperty( + String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** + * Return true if this ObservabilityPipelineGoogleCloudStorageDestination object is equal to o. + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineGoogleCloudStorageDestination + observabilityPipelineGoogleCloudStorageDestination = + (ObservabilityPipelineGoogleCloudStorageDestination) o; + return Objects.equals(this.acl, observabilityPipelineGoogleCloudStorageDestination.acl) + && Objects.equals(this.auth, observabilityPipelineGoogleCloudStorageDestination.auth) + && Objects.equals(this.bucket, observabilityPipelineGoogleCloudStorageDestination.bucket) + && Objects.equals(this.id, observabilityPipelineGoogleCloudStorageDestination.id) + && Objects.equals(this.inputs, observabilityPipelineGoogleCloudStorageDestination.inputs) + && Objects.equals( + this.keyPrefix, observabilityPipelineGoogleCloudStorageDestination.keyPrefix) + && Objects.equals( + this.metadata, observabilityPipelineGoogleCloudStorageDestination.metadata) + && Objects.equals( + this.storageClass, observabilityPipelineGoogleCloudStorageDestination.storageClass) + && Objects.equals(this.type, observabilityPipelineGoogleCloudStorageDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineGoogleCloudStorageDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + acl, + auth, + bucket, + id, + inputs, + keyPrefix, + metadata, + storageClass, + type, + additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineGoogleCloudStorageDestination {\n"); + sb.append(" acl: ").append(toIndentedString(acl)).append("\n"); + sb.append(" auth: ").append(toIndentedString(auth)).append("\n"); + sb.append(" bucket: ").append(toIndentedString(bucket)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" keyPrefix: ").append(toIndentedString(keyPrefix)).append("\n"); + sb.append(" metadata: ").append(toIndentedString(metadata)).append("\n"); + sb.append(" storageClass: ").append(toIndentedString(storageClass)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java new file mode 100644 index 00000000000..21213127a0a --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationAcl.java @@ -0,0 +1,82 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Access control list setting for objects written to the bucket. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationAcl + .ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationAcl extends ModelEnum { + + private static final Set allowedValues = + new HashSet( + Arrays.asList( + "private", + "project-private", + "public-read", + "authenticated-read", + "bucket-owner-read", + "bucket-owner-full-control")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PRIVATE = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("private"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PROJECTNOT_PRIVATE = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("project-private"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl PUBLICNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("public-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl AUTHENTICATEDNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("authenticated-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl + BUCKETNOT_OWNERNOT_READ = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-read"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationAcl + BUCKETNOT_OWNERNOT_FULLNOT_CONTROL = + new ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-full-control"); + + ObservabilityPipelineGoogleCloudStorageDestinationAcl(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationAclSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationAcl value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationAcl fromValue(String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationAcl(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java new file mode 100644 index 00000000000..eac7fa16f47 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.java @@ -0,0 +1,71 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Storage class used for objects stored in GCS. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass + .ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationStorageClass + extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("STANDARD", "NEARLINE", "COLDLINE", "ARCHIVE")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass STANDARD = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("STANDARD"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass NEARLINE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("NEARLINE"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass COLDLINE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("COLDLINE"); + public static final ObservabilityPipelineGoogleCloudStorageDestinationStorageClass ARCHIVE = + new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("ARCHIVE"); + + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationStorageClassSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationStorageClass fromValue( + String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationStorageClass(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java new file mode 100644 index 00000000000..85342e037af --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineGoogleCloudStorageDestinationType.java @@ -0,0 +1,63 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always google_cloud_storage. */ +@JsonSerialize( + using = + ObservabilityPipelineGoogleCloudStorageDestinationType + .ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer.class) +public class ObservabilityPipelineGoogleCloudStorageDestinationType extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("google_cloud_storage")); + + public static final ObservabilityPipelineGoogleCloudStorageDestinationType GOOGLE_CLOUD_STORAGE = + new ObservabilityPipelineGoogleCloudStorageDestinationType("google_cloud_storage"); + + ObservabilityPipelineGoogleCloudStorageDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineGoogleCloudStorageDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineGoogleCloudStorageDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineGoogleCloudStorageDestinationType fromValue(String value) { + return new ObservabilityPipelineGoogleCloudStorageDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java new file mode 100644 index 00000000000..9b7d800b53c --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetadataEntry.java @@ -0,0 +1,175 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** A custom metadata entry to attach to each object uploaded to the GCS bucket. */ +@JsonPropertyOrder({ + ObservabilityPipelineMetadataEntry.JSON_PROPERTY_NAME, + ObservabilityPipelineMetadataEntry.JSON_PROPERTY_VALUE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineMetadataEntry { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_NAME = "name"; + private String name; + + public static final String JSON_PROPERTY_VALUE = "value"; + private String value; + + public ObservabilityPipelineMetadataEntry() {} + + @JsonCreator + public ObservabilityPipelineMetadataEntry( + @JsonProperty(required = true, value = JSON_PROPERTY_NAME) String name, + @JsonProperty(required = true, value = JSON_PROPERTY_VALUE) String value) { + this.name = name; + this.value = value; + } + + public ObservabilityPipelineMetadataEntry name(String name) { + this.name = name; + return this; + } + + /** + * The metadata key. + * + * @return name + */ + @JsonProperty(JSON_PROPERTY_NAME) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public ObservabilityPipelineMetadataEntry value(String value) { + this.value = value; + return this; + } + + /** + * The metadata value. + * + * @return value + */ + @JsonProperty(JSON_PROPERTY_VALUE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineMetadataEntry + */ + @JsonAnySetter + public ObservabilityPipelineMetadataEntry putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineMetadataEntry object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineMetadataEntry observabilityPipelineMetadataEntry = + (ObservabilityPipelineMetadataEntry) o; + return Objects.equals(this.name, observabilityPipelineMetadataEntry.name) + && Objects.equals(this.value, observabilityPipelineMetadataEntry.value) + && Objects.equals( + this.additionalProperties, observabilityPipelineMetadataEntry.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(name, value, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineMetadataEntry {\n"); + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" value: ").append(toIndentedString(value)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java new file mode 100644 index 00000000000..fea24e8cb05 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineMetricValue.java @@ -0,0 +1,325 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.AbstractOpenApiSchema; +import com.datadog.api.client.JSON; +import com.datadog.api.client.UnparsedObject; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import jakarta.ws.rs.core.GenericType; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +@JsonDeserialize( + using = ObservabilityPipelineMetricValue.ObservabilityPipelineMetricValueDeserializer.class) +@JsonSerialize( + using = ObservabilityPipelineMetricValue.ObservabilityPipelineMetricValueSerializer.class) +public class ObservabilityPipelineMetricValue extends AbstractOpenApiSchema { + private static final Logger log = + Logger.getLogger(ObservabilityPipelineMetricValue.class.getName()); + + @JsonIgnore public boolean unparsed = false; + + public static class ObservabilityPipelineMetricValueSerializer + extends StdSerializer { + public ObservabilityPipelineMetricValueSerializer(Class t) { + super(t); + } + + public ObservabilityPipelineMetricValueSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineMetricValue value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.getActualInstance()); + } + } + + public static class ObservabilityPipelineMetricValueDeserializer + extends StdDeserializer { + public ObservabilityPipelineMetricValueDeserializer() { + this(ObservabilityPipelineMetricValue.class); + } + + public ObservabilityPipelineMetricValueDeserializer(Class vc) { + super(vc); + } + + @Override + public ObservabilityPipelineMetricValue deserialize(JsonParser jp, DeserializationContext ctxt) + throws IOException, JsonProcessingException { + JsonNode tree = jp.readValueAsTree(); + Object deserialized = null; + Object tmp = null; + boolean typeCoercion = ctxt.isEnabled(MapperFeature.ALLOW_COERCION_OF_SCALARS); + int match = 0; + JsonToken token = tree.traverse(jp.getCodec()).nextToken(); + // deserialize ObservabilityPipelineGeneratedMetricIncrementByOne + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Long.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Double.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Boolean.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByOne.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGeneratedMetricIncrementByOne.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGeneratedMetricIncrementByOne) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGeneratedMetricIncrementByOne'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema 'ObservabilityPipelineGeneratedMetricIncrementByOne'", + e); + } + + // deserialize ObservabilityPipelineGeneratedMetricIncrementByField + try { + boolean attemptParsing = true; + // ensure that we respect type coercion as set on the client ObjectMapper + if (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Long.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Double.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Boolean.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(String.class)) { + attemptParsing = typeCoercion; + if (!attemptParsing) { + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Integer.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals( + Long.class)) + && token == JsonToken.VALUE_NUMBER_INT); + attemptParsing |= + ((ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Float.class) + || ObservabilityPipelineGeneratedMetricIncrementByField.class.equals( + Double.class)) + && (token == JsonToken.VALUE_NUMBER_FLOAT + || token == JsonToken.VALUE_NUMBER_INT)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(Boolean.class) + && (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE)); + attemptParsing |= + (ObservabilityPipelineGeneratedMetricIncrementByField.class.equals(String.class) + && token == JsonToken.VALUE_STRING); + } + } + if (attemptParsing) { + tmp = + tree.traverse(jp.getCodec()) + .readValueAs(ObservabilityPipelineGeneratedMetricIncrementByField.class); + // TODO: there is no validation against JSON schema constraints + // (min, max, enum, pattern...), this does not perform a strict JSON + // validation, which means the 'match' count may be higher than it should be. + if (!((ObservabilityPipelineGeneratedMetricIncrementByField) tmp).unparsed) { + deserialized = tmp; + match++; + } + log.log( + Level.FINER, + "Input data matches schema 'ObservabilityPipelineGeneratedMetricIncrementByField'"); + } + } catch (Exception e) { + // deserialization failed, continue + log.log( + Level.FINER, + "Input data does not match schema" + + " 'ObservabilityPipelineGeneratedMetricIncrementByField'", + e); + } + + ObservabilityPipelineMetricValue ret = new ObservabilityPipelineMetricValue(); + if (match == 1) { + ret.setActualInstance(deserialized); + } else { + Map res = + new ObjectMapper() + .readValue( + tree.traverse(jp.getCodec()).readValueAsTree().toString(), + new TypeReference>() {}); + ret.setActualInstance(new UnparsedObject(res)); + } + return ret; + } + + /** Handle deserialization of the 'null' value. */ + @Override + public ObservabilityPipelineMetricValue getNullValue(DeserializationContext ctxt) + throws JsonMappingException { + throw new JsonMappingException( + ctxt.getParser(), "ObservabilityPipelineMetricValue cannot be null"); + } + } + + // store a list of schema names defined in oneOf + public static final Map schemas = new HashMap(); + + public ObservabilityPipelineMetricValue() { + super("oneOf", Boolean.FALSE); + } + + public ObservabilityPipelineMetricValue(ObservabilityPipelineGeneratedMetricIncrementByOne o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + public ObservabilityPipelineMetricValue(ObservabilityPipelineGeneratedMetricIncrementByField o) { + super("oneOf", Boolean.FALSE); + setActualInstance(o); + } + + static { + schemas.put( + "ObservabilityPipelineGeneratedMetricIncrementByOne", + new GenericType() {}); + schemas.put( + "ObservabilityPipelineGeneratedMetricIncrementByField", + new GenericType() {}); + JSON.registerDescendants( + ObservabilityPipelineMetricValue.class, Collections.unmodifiableMap(schemas)); + } + + @Override + public Map getSchemas() { + return ObservabilityPipelineMetricValue.schemas; + } + + /** + * Set the instance that matches the oneOf child schema, check the instance parameter is valid + * against the oneOf child schemas: ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField + * + *

It could be an instance of the 'oneOf' schemas. The oneOf child schemas may themselves be a + * composed schema (allOf, anyOf, oneOf). + */ + @Override + public void setActualInstance(Object instance) { + if (JSON.isInstanceOf( + ObservabilityPipelineGeneratedMetricIncrementByOne.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + if (JSON.isInstanceOf( + ObservabilityPipelineGeneratedMetricIncrementByField.class, + instance, + new HashSet>())) { + super.setActualInstance(instance); + return; + } + + if (JSON.isInstanceOf(UnparsedObject.class, instance, new HashSet>())) { + super.setActualInstance(instance); + return; + } + throw new RuntimeException( + "Invalid instance type. Must be ObservabilityPipelineGeneratedMetricIncrementByOne," + + " ObservabilityPipelineGeneratedMetricIncrementByField"); + } + + /** + * Get the actual instance, which can be the following: + * ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField + * + * @return The actual instance (ObservabilityPipelineGeneratedMetricIncrementByOne, + * ObservabilityPipelineGeneratedMetricIncrementByField) + */ + @Override + public Object getActualInstance() { + return super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGeneratedMetricIncrementByOne`. If the actual + * instance is not `ObservabilityPipelineGeneratedMetricIncrementByOne`, the ClassCastException + * will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGeneratedMetricIncrementByOne` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGeneratedMetricIncrementByOne` + */ + public ObservabilityPipelineGeneratedMetricIncrementByOne + getObservabilityPipelineGeneratedMetricIncrementByOne() throws ClassCastException { + return (ObservabilityPipelineGeneratedMetricIncrementByOne) super.getActualInstance(); + } + + /** + * Get the actual instance of `ObservabilityPipelineGeneratedMetricIncrementByField`. If the + * actual instance is not `ObservabilityPipelineGeneratedMetricIncrementByField`, the + * ClassCastException will be thrown. + * + * @return The actual instance of `ObservabilityPipelineGeneratedMetricIncrementByField` + * @throws ClassCastException if the instance is not + * `ObservabilityPipelineGeneratedMetricIncrementByField` + */ + public ObservabilityPipelineGeneratedMetricIncrementByField + getObservabilityPipelineGeneratedMetricIncrementByField() throws ClassCastException { + return (ObservabilityPipelineGeneratedMetricIncrementByField) super.getActualInstance(); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java new file mode 100644 index 00000000000..3eafb768273 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestination.java @@ -0,0 +1,343 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector + * (HEC). + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_ENCODING, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_INDEX, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_INPUTS, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_SOURCETYPE, + ObservabilityPipelineSplunkHecDestination.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkHecDestination { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP = "auto_extract_timestamp"; + private Boolean autoExtractTimestamp; + + public static final String JSON_PROPERTY_ENCODING = "encoding"; + private ObservabilityPipelineSplunkHecDestinationEncoding encoding; + + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_INDEX = "index"; + private String index; + + public static final String JSON_PROPERTY_INPUTS = "inputs"; + private List inputs = new ArrayList<>(); + + public static final String JSON_PROPERTY_SOURCETYPE = "sourcetype"; + private String sourcetype; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkHecDestinationType type = + ObservabilityPipelineSplunkHecDestinationType.SPLUNK_HEC; + + public ObservabilityPipelineSplunkHecDestination() {} + + @JsonCreator + public ObservabilityPipelineSplunkHecDestination( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_INPUTS) List inputs, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkHecDestinationType type) { + this.id = id; + this.inputs = inputs; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkHecDestination autoExtractTimestamp( + Boolean autoExtractTimestamp) { + this.autoExtractTimestamp = autoExtractTimestamp; + return this; + } + + /** + * If true, Splunk tries to extract timestamps from incoming log events. If + * false, Splunk assigns the time the event was received. + * + * @return autoExtractTimestamp + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_AUTO_EXTRACT_TIMESTAMP) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public Boolean getAutoExtractTimestamp() { + return autoExtractTimestamp; + } + + public void setAutoExtractTimestamp(Boolean autoExtractTimestamp) { + this.autoExtractTimestamp = autoExtractTimestamp; + } + + public ObservabilityPipelineSplunkHecDestination encoding( + ObservabilityPipelineSplunkHecDestinationEncoding encoding) { + this.encoding = encoding; + this.unparsed |= !encoding.isValid(); + return this; + } + + /** + * Encoding format for log events. + * + * @return encoding + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_ENCODING) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineSplunkHecDestinationEncoding getEncoding() { + return encoding; + } + + public void setEncoding(ObservabilityPipelineSplunkHecDestinationEncoding encoding) { + if (!encoding.isValid()) { + this.unparsed = true; + } + this.encoding = encoding; + } + + public ObservabilityPipelineSplunkHecDestination id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkHecDestination index(String index) { + this.index = index; + return this; + } + + /** + * Optional name of the Splunk index where logs are written. + * + * @return index + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_INDEX) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getIndex() { + return index; + } + + public void setIndex(String index) { + this.index = index; + } + + public ObservabilityPipelineSplunkHecDestination inputs(List inputs) { + this.inputs = inputs; + return this; + } + + public ObservabilityPipelineSplunkHecDestination addInputsItem(String inputsItem) { + this.inputs.add(inputsItem); + return this; + } + + /** + * A list of component IDs whose output is used as the input for this component. + * + * @return inputs + */ + @JsonProperty(JSON_PROPERTY_INPUTS) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public List getInputs() { + return inputs; + } + + public void setInputs(List inputs) { + this.inputs = inputs; + } + + public ObservabilityPipelineSplunkHecDestination sourcetype(String sourcetype) { + this.sourcetype = sourcetype; + return this; + } + + /** + * The Splunk sourcetype to assign to log events. + * + * @return sourcetype + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_SOURCETYPE) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public String getSourcetype() { + return sourcetype; + } + + public void setSourcetype(String sourcetype) { + this.sourcetype = sourcetype; + } + + public ObservabilityPipelineSplunkHecDestination type( + ObservabilityPipelineSplunkHecDestinationType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The destination type. Always splunk_hec. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkHecDestinationType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkHecDestinationType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkHecDestination + */ + @JsonAnySetter + public ObservabilityPipelineSplunkHecDestination putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkHecDestination object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkHecDestination observabilityPipelineSplunkHecDestination = + (ObservabilityPipelineSplunkHecDestination) o; + return Objects.equals( + this.autoExtractTimestamp, + observabilityPipelineSplunkHecDestination.autoExtractTimestamp) + && Objects.equals(this.encoding, observabilityPipelineSplunkHecDestination.encoding) + && Objects.equals(this.id, observabilityPipelineSplunkHecDestination.id) + && Objects.equals(this.index, observabilityPipelineSplunkHecDestination.index) + && Objects.equals(this.inputs, observabilityPipelineSplunkHecDestination.inputs) + && Objects.equals(this.sourcetype, observabilityPipelineSplunkHecDestination.sourcetype) + && Objects.equals(this.type, observabilityPipelineSplunkHecDestination.type) + && Objects.equals( + this.additionalProperties, + observabilityPipelineSplunkHecDestination.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash( + autoExtractTimestamp, encoding, id, index, inputs, sourcetype, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkHecDestination {\n"); + sb.append(" autoExtractTimestamp: ") + .append(toIndentedString(autoExtractTimestamp)) + .append("\n"); + sb.append(" encoding: ").append(toIndentedString(encoding)).append("\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" index: ").append(toIndentedString(index)).append("\n"); + sb.append(" inputs: ").append(toIndentedString(inputs)).append("\n"); + sb.append(" sourcetype: ").append(toIndentedString(sourcetype)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java new file mode 100644 index 00000000000..b523d269664 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationEncoding.java @@ -0,0 +1,65 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** Encoding format for log events. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecDestinationEncoding + .ObservabilityPipelineSplunkHecDestinationEncodingSerializer.class) +public class ObservabilityPipelineSplunkHecDestinationEncoding extends ModelEnum { + + private static final Set allowedValues = + new HashSet(Arrays.asList("json", "raw_message")); + + public static final ObservabilityPipelineSplunkHecDestinationEncoding JSON = + new ObservabilityPipelineSplunkHecDestinationEncoding("json"); + public static final ObservabilityPipelineSplunkHecDestinationEncoding RAW_MESSAGE = + new ObservabilityPipelineSplunkHecDestinationEncoding("raw_message"); + + ObservabilityPipelineSplunkHecDestinationEncoding(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecDestinationEncodingSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecDestinationEncodingSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecDestinationEncodingSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecDestinationEncoding value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecDestinationEncoding fromValue(String value) { + return new ObservabilityPipelineSplunkHecDestinationEncoding(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java new file mode 100644 index 00000000000..1051d259645 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecDestinationType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The destination type. Always splunk_hec. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecDestinationType + .ObservabilityPipelineSplunkHecDestinationTypeSerializer.class) +public class ObservabilityPipelineSplunkHecDestinationType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_hec")); + + public static final ObservabilityPipelineSplunkHecDestinationType SPLUNK_HEC = + new ObservabilityPipelineSplunkHecDestinationType("splunk_hec"); + + ObservabilityPipelineSplunkHecDestinationType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecDestinationTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecDestinationTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecDestinationTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecDestinationType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecDestinationType fromValue(String value) { + return new ObservabilityPipelineSplunkHecDestinationType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java new file mode 100644 index 00000000000..cd7d7dbc420 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSource.java @@ -0,0 +1,211 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API. */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_TLS, + ObservabilityPipelineSplunkHecSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkHecSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkHecSourceType type = + ObservabilityPipelineSplunkHecSourceType.SPLUNK_HEC; + + public ObservabilityPipelineSplunkHecSource() {} + + @JsonCreator + public ObservabilityPipelineSplunkHecSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkHecSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkHecSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkHecSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSplunkHecSource type(ObservabilityPipelineSplunkHecSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always splunk_hec. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkHecSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkHecSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkHecSource + */ + @JsonAnySetter + public ObservabilityPipelineSplunkHecSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkHecSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkHecSource observabilityPipelineSplunkHecSource = + (ObservabilityPipelineSplunkHecSource) o; + return Objects.equals(this.id, observabilityPipelineSplunkHecSource.id) + && Objects.equals(this.tls, observabilityPipelineSplunkHecSource.tls) + && Objects.equals(this.type, observabilityPipelineSplunkHecSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSplunkHecSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkHecSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java new file mode 100644 index 00000000000..a366d7383ff --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkHecSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always splunk_hec. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkHecSourceType.ObservabilityPipelineSplunkHecSourceTypeSerializer + .class) +public class ObservabilityPipelineSplunkHecSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_hec")); + + public static final ObservabilityPipelineSplunkHecSourceType SPLUNK_HEC = + new ObservabilityPipelineSplunkHecSourceType("splunk_hec"); + + ObservabilityPipelineSplunkHecSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkHecSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkHecSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkHecSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkHecSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkHecSourceType fromValue(String value) { + return new ObservabilityPipelineSplunkHecSourceType(value); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java new file mode 100644 index 00000000000..802a520ac56 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSource.java @@ -0,0 +1,214 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP. TLS + * is supported for secure transmission. + */ +@JsonPropertyOrder({ + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_ID, + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_TLS, + ObservabilityPipelineSplunkTcpSource.JSON_PROPERTY_TYPE +}) +@jakarta.annotation.Generated( + value = "https://github.com/DataDog/datadog-api-client-java/blob/master/.generator") +public class ObservabilityPipelineSplunkTcpSource { + @JsonIgnore public boolean unparsed = false; + public static final String JSON_PROPERTY_ID = "id"; + private String id; + + public static final String JSON_PROPERTY_TLS = "tls"; + private ObservabilityPipelineTls tls; + + public static final String JSON_PROPERTY_TYPE = "type"; + private ObservabilityPipelineSplunkTcpSourceType type = + ObservabilityPipelineSplunkTcpSourceType.SPLUNK_TCP; + + public ObservabilityPipelineSplunkTcpSource() {} + + @JsonCreator + public ObservabilityPipelineSplunkTcpSource( + @JsonProperty(required = true, value = JSON_PROPERTY_ID) String id, + @JsonProperty(required = true, value = JSON_PROPERTY_TYPE) + ObservabilityPipelineSplunkTcpSourceType type) { + this.id = id; + this.type = type; + this.unparsed |= !type.isValid(); + } + + public ObservabilityPipelineSplunkTcpSource id(String id) { + this.id = id; + return this; + } + + /** + * The unique identifier for this component. Used to reference this component in other parts of + * the pipeline (e.g., as input to downstream components). + * + * @return id + */ + @JsonProperty(JSON_PROPERTY_ID) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public ObservabilityPipelineSplunkTcpSource tls(ObservabilityPipelineTls tls) { + this.tls = tls; + this.unparsed |= tls.unparsed; + return this; + } + + /** + * Configuration for enabling TLS encryption. + * + * @return tls + */ + @jakarta.annotation.Nullable + @JsonProperty(JSON_PROPERTY_TLS) + @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) + public ObservabilityPipelineTls getTls() { + return tls; + } + + public void setTls(ObservabilityPipelineTls tls) { + this.tls = tls; + } + + public ObservabilityPipelineSplunkTcpSource type(ObservabilityPipelineSplunkTcpSourceType type) { + this.type = type; + this.unparsed |= !type.isValid(); + return this; + } + + /** + * The source type. Always splunk_tcp. + * + * @return type + */ + @JsonProperty(JSON_PROPERTY_TYPE) + @JsonInclude(value = JsonInclude.Include.ALWAYS) + public ObservabilityPipelineSplunkTcpSourceType getType() { + return type; + } + + public void setType(ObservabilityPipelineSplunkTcpSourceType type) { + if (!type.isValid()) { + this.unparsed = true; + } + this.type = type; + } + + /** + * A container for additional, undeclared properties. This is a holder for any undeclared + * properties as specified with the 'additionalProperties' keyword in the OAS document. + */ + private Map additionalProperties; + + /** + * Set the additional (undeclared) property with the specified name and value. If the property + * does not already exist, create it otherwise replace it. + * + * @param key The arbitrary key to set + * @param value The associated value + * @return ObservabilityPipelineSplunkTcpSource + */ + @JsonAnySetter + public ObservabilityPipelineSplunkTcpSource putAdditionalProperty(String key, Object value) { + if (this.additionalProperties == null) { + this.additionalProperties = new HashMap(); + } + this.additionalProperties.put(key, value); + return this; + } + + /** + * Return the additional (undeclared) property. + * + * @return The additional properties + */ + @JsonAnyGetter + public Map getAdditionalProperties() { + return additionalProperties; + } + + /** + * Return the additional (undeclared) property with the specified name. + * + * @param key The arbitrary key to get + * @return The specific additional property for the given key + */ + public Object getAdditionalProperty(String key) { + if (this.additionalProperties == null) { + return null; + } + return this.additionalProperties.get(key); + } + + /** Return true if this ObservabilityPipelineSplunkTcpSource object is equal to o. */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ObservabilityPipelineSplunkTcpSource observabilityPipelineSplunkTcpSource = + (ObservabilityPipelineSplunkTcpSource) o; + return Objects.equals(this.id, observabilityPipelineSplunkTcpSource.id) + && Objects.equals(this.tls, observabilityPipelineSplunkTcpSource.tls) + && Objects.equals(this.type, observabilityPipelineSplunkTcpSource.type) + && Objects.equals( + this.additionalProperties, observabilityPipelineSplunkTcpSource.additionalProperties); + } + + @Override + public int hashCode() { + return Objects.hash(id, tls, type, additionalProperties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ObservabilityPipelineSplunkTcpSource {\n"); + sb.append(" id: ").append(toIndentedString(id)).append("\n"); + sb.append(" tls: ").append(toIndentedString(tls)).append("\n"); + sb.append(" type: ").append(toIndentedString(type)).append("\n"); + sb.append(" additionalProperties: ") + .append(toIndentedString(additionalProperties)) + .append("\n"); + sb.append('}'); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java new file mode 100644 index 00000000000..ecd01f1deb8 --- /dev/null +++ b/src/main/java/com/datadog/api/client/v2/model/ObservabilityPipelineSplunkTcpSourceType.java @@ -0,0 +1,62 @@ +/* + * Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + * This product includes software developed at Datadog (https://www.datadoghq.com/). + * Copyright 2019-Present Datadog, Inc. + */ + +package com.datadog.api.client.v2.model; + +import com.datadog.api.client.ModelEnum; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** The source type. Always splunk_tcp. */ +@JsonSerialize( + using = + ObservabilityPipelineSplunkTcpSourceType.ObservabilityPipelineSplunkTcpSourceTypeSerializer + .class) +public class ObservabilityPipelineSplunkTcpSourceType extends ModelEnum { + + private static final Set allowedValues = new HashSet(Arrays.asList("splunk_tcp")); + + public static final ObservabilityPipelineSplunkTcpSourceType SPLUNK_TCP = + new ObservabilityPipelineSplunkTcpSourceType("splunk_tcp"); + + ObservabilityPipelineSplunkTcpSourceType(String value) { + super(value, allowedValues); + } + + public static class ObservabilityPipelineSplunkTcpSourceTypeSerializer + extends StdSerializer { + public ObservabilityPipelineSplunkTcpSourceTypeSerializer( + Class t) { + super(t); + } + + public ObservabilityPipelineSplunkTcpSourceTypeSerializer() { + this(null); + } + + @Override + public void serialize( + ObservabilityPipelineSplunkTcpSourceType value, + JsonGenerator jgen, + SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeObject(value.value); + } + } + + @JsonCreator + public static ObservabilityPipelineSplunkTcpSourceType fromValue(String value) { + return new ObservabilityPipelineSplunkTcpSourceType(value); + } +}