Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
286 changes: 281 additions & 5 deletions .generator/schemas/v2/openapi.yaml

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2026-02-20T14:42:05.988Z

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2026-02-20T14:42:32.372Z

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Validate an observability pipeline with destination secret key returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V2::ObservabilityPipelinesAPI.new

body = DatadogAPIClient::V2::ObservabilityPipelineSpec.new({
data: DatadogAPIClient::V2::ObservabilityPipelineSpecData.new({
attributes: DatadogAPIClient::V2::ObservabilityPipelineDataAttributes.new({
config: DatadogAPIClient::V2::ObservabilityPipelineConfig.new({
destinations: [
DatadogAPIClient::V2::ObservabilityPipelineSumoLogicDestination.new({
id: "sumo-logic-destination",
inputs: [
"my-processor-group",
],
type: DatadogAPIClient::V2::ObservabilityPipelineSumoLogicDestinationType::SUMO_LOGIC,
endpoint_url_key: "SUMO_LOGIC_ENDPOINT_URL",
}),
],
processor_groups: [
DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({
enabled: true,
id: "my-processor-group",
include: "service:my-service",
inputs: [
"datadog-agent-source",
],
processors: [
DatadogAPIClient::V2::ObservabilityPipelineFilterProcessor.new({
enabled: true,
id: "filter-processor",
include: "status:error",
type: DatadogAPIClient::V2::ObservabilityPipelineFilterProcessorType::FILTER,
}),
],
}),
],
sources: [
DatadogAPIClient::V2::ObservabilityPipelineDatadogAgentSource.new({
id: "datadog-agent-source",
type: DatadogAPIClient::V2::ObservabilityPipelineDatadogAgentSourceType::DATADOG_AGENT,
}),
],
}),
name: "Pipeline with Secret Key",
}),
type: "pipelines",
}),
})
p api_instance.validate_pipeline(body)
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# Validate an observability pipeline with source secret key returns "OK" response

require "datadog_api_client"
api_instance = DatadogAPIClient::V2::ObservabilityPipelinesAPI.new

body = DatadogAPIClient::V2::ObservabilityPipelineSpec.new({
data: DatadogAPIClient::V2::ObservabilityPipelineSpecData.new({
attributes: DatadogAPIClient::V2::ObservabilityPipelineDataAttributes.new({
config: DatadogAPIClient::V2::ObservabilityPipelineConfig.new({
destinations: [
DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestination.new({
id: "datadog-logs-destination",
inputs: [
"my-processor-group",
],
type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS,
}),
],
processor_groups: [
DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({
enabled: true,
id: "my-processor-group",
include: "service:my-service",
inputs: [
"http-client-source",
],
processors: [
DatadogAPIClient::V2::ObservabilityPipelineFilterProcessor.new({
enabled: true,
id: "filter-processor",
include: "status:error",
type: DatadogAPIClient::V2::ObservabilityPipelineFilterProcessorType::FILTER,
}),
],
}),
],
sources: [
DatadogAPIClient::V2::ObservabilityPipelineHttpClientSource.new({
id: "http-client-source",
type: DatadogAPIClient::V2::ObservabilityPipelineHttpClientSourceType::HTTP_CLIENT,
decoding: DatadogAPIClient::V2::ObservabilityPipelineDecoding::DECODE_BYTES,
scrape_interval_secs: 15,
scrape_timeout_secs: 5,
auth_strategy: DatadogAPIClient::V2::ObservabilityPipelineHttpClientSourceAuthStrategy::BEARER,
token_key: "HTTP_CLIENT_TOKEN",
}),
],
}),
name: "Pipeline with Source Secret",
}),
type: "pipelines",
}),
})
p api_instance.validate_pipeline(body)
16 changes: 16 additions & 0 deletions features/v2/observability_pipelines.feature
Original file line number Diff line number Diff line change
Expand Up @@ -190,3 +190,19 @@ Feature: Observability Pipelines
When the request is sent
Then the response status is 200 OK
And the response "errors" has length 0

@team:DataDog/observability-pipelines
Scenario: Validate an observability pipeline with destination secret key returns "OK" response
Given new "ValidatePipeline" request
And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "sumo-logic-destination", "inputs": ["my-processor-group"], "type": "sumo_logic", "endpoint_url_key": "SUMO_LOGIC_ENDPOINT_URL"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Pipeline with Secret Key"}, "type": "pipelines"}}
When the request is sent
Then the response status is 200 OK
And the response "errors" has length 0

@team:DataDog/observability-pipelines
Scenario: Validate an observability pipeline with source secret key returns "OK" response
Given new "ValidatePipeline" request
And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["http-client-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "http-client-source", "type": "http_client", "decoding": "bytes", "scrape_interval_secs": 15, "scrape_timeout_secs": 5, "auth_strategy": "bearer", "token_key": "HTTP_CLIENT_TOKEN"}]}, "name": "Pipeline with Source Secret"}, "type": "pipelines"}}
When the request is sent
Then the response status is 200 OK
And the response "errors" has length 0
1 change: 1 addition & 0 deletions lib/datadog_api_client/inflector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3497,6 +3497,7 @@ def overrides
"v2.observability_pipeline_disk_buffer_options" => "ObservabilityPipelineDiskBufferOptions",
"v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination",
"v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion",
"v2.observability_pipeline_elasticsearch_destination_auth" => "ObservabilityPipelineElasticsearchDestinationAuth",
"v2.observability_pipeline_elasticsearch_destination_data_stream" => "ObservabilityPipelineElasticsearchDestinationDataStream",
"v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType",
"v2.observability_pipeline_enrichment_table_file" => "ObservabilityPipelineEnrichmentTableFile",
Expand Down
12 changes: 11 additions & 1 deletion lib/datadog_api_client/v2/models/azure_storage_destination.rb
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ class AzureStorageDestination
# Configuration for buffer settings on destination components.
attr_accessor :buffer

# Name of the environment variable or secret that holds the Azure Storage connection string.
attr_accessor :connection_string_key

# The name of the Azure Blob Storage container to store logs in.
attr_reader :container_name

Expand All @@ -49,6 +52,7 @@ def self.attribute_map
{
:'blob_prefix' => :'blob_prefix',
:'buffer' => :'buffer',
:'connection_string_key' => :'connection_string_key',
:'container_name' => :'container_name',
:'id' => :'id',
:'inputs' => :'inputs',
Expand All @@ -62,6 +66,7 @@ def self.openapi_types
{
:'blob_prefix' => :'String',
:'buffer' => :'ObservabilityPipelineBufferOptions',
:'connection_string_key' => :'String',
:'container_name' => :'String',
:'id' => :'String',
:'inputs' => :'Array<String>',
Expand Down Expand Up @@ -95,6 +100,10 @@ def initialize(attributes = {})
self.buffer = attributes[:'buffer']
end

if attributes.key?(:'connection_string_key')
self.connection_string_key = attributes[:'connection_string_key']
end

if attributes.key?(:'container_name')
self.container_name = attributes[:'container_name']
end
Expand Down Expand Up @@ -193,6 +202,7 @@ def ==(o)
self.class == o.class &&
blob_prefix == o.blob_prefix &&
buffer == o.buffer &&
connection_string_key == o.connection_string_key &&
container_name == o.container_name &&
id == o.id &&
inputs == o.inputs &&
Expand All @@ -204,7 +214,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[blob_prefix, buffer, container_name, id, inputs, type, additional_properties].hash
[blob_prefix, buffer, connection_string_key, container_name, id, inputs, type, additional_properties].hash
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,12 @@ class MicrosoftSentinelDestination
# Azure AD client ID used for authentication.
attr_reader :client_id

# Name of the environment variable or secret that holds the Azure AD client secret.
attr_accessor :client_secret_key

# Name of the environment variable or secret that holds the Data Collection Endpoint (DCE) URI.
attr_accessor :dce_uri_key

# The immutable ID of the Data Collection Rule (DCR).
attr_reader :dcr_immutable_id

Expand All @@ -55,6 +61,8 @@ def self.attribute_map
{
:'buffer' => :'buffer',
:'client_id' => :'client_id',
:'client_secret_key' => :'client_secret_key',
:'dce_uri_key' => :'dce_uri_key',
:'dcr_immutable_id' => :'dcr_immutable_id',
:'id' => :'id',
:'inputs' => :'inputs',
Expand All @@ -70,6 +78,8 @@ def self.openapi_types
{
:'buffer' => :'ObservabilityPipelineBufferOptions',
:'client_id' => :'String',
:'client_secret_key' => :'String',
:'dce_uri_key' => :'String',
:'dcr_immutable_id' => :'String',
:'id' => :'String',
:'inputs' => :'Array<String>',
Expand Down Expand Up @@ -105,6 +115,14 @@ def initialize(attributes = {})
self.client_id = attributes[:'client_id']
end

if attributes.key?(:'client_secret_key')
self.client_secret_key = attributes[:'client_secret_key']
end

if attributes.key?(:'dce_uri_key')
self.dce_uri_key = attributes[:'dce_uri_key']
end

if attributes.key?(:'dcr_immutable_id')
self.dcr_immutable_id = attributes[:'dcr_immutable_id']
end
Expand Down Expand Up @@ -244,6 +262,8 @@ def ==(o)
self.class == o.class &&
buffer == o.buffer &&
client_id == o.client_id &&
client_secret_key == o.client_secret_key &&
dce_uri_key == o.dce_uri_key &&
dcr_immutable_id == o.dcr_immutable_id &&
id == o.id &&
inputs == o.inputs &&
Expand All @@ -257,7 +277,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[buffer, client_id, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash
[buffer, client_id, client_secret_key, dce_uri_key, dcr_immutable_id, id, inputs, table, tenant_id, type, additional_properties].hash
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ module DatadogAPIClient::V2
class ObservabilityPipelineAmazonDataFirehoseSource
include BaseGenericModel

# Name of the environment variable or secret that holds the Firehose delivery stream address.
attr_accessor :address_key

# AWS authentication credentials used for accessing AWS services such as S3.
# If omitted, the system’s default credentials are used (for example, the IAM role and environment variables).
attr_accessor :auth
Expand All @@ -42,6 +45,7 @@ class ObservabilityPipelineAmazonDataFirehoseSource
# @!visibility private
def self.attribute_map
{
:'address_key' => :'address_key',
:'auth' => :'auth',
:'id' => :'id',
:'tls' => :'tls',
Expand All @@ -53,6 +57,7 @@ def self.attribute_map
# @!visibility private
def self.openapi_types
{
:'address_key' => :'String',
:'auth' => :'ObservabilityPipelineAwsAuth',
:'id' => :'String',
:'tls' => :'ObservabilityPipelineTls',
Expand All @@ -78,6 +83,10 @@ def initialize(attributes = {})
end
}

if attributes.key?(:'address_key')
self.address_key = attributes[:'address_key']
end

if attributes.key?(:'auth')
self.auth = attributes[:'auth']
end
Expand Down Expand Up @@ -150,6 +159,7 @@ def to_hash
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
address_key == o.address_key &&
auth == o.auth &&
id == o.id &&
tls == o.tls &&
Expand All @@ -161,7 +171,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[auth, id, tls, type, additional_properties].hash
[address_key, auth, id, tls, type, additional_properties].hash
end
end
end
Loading
Loading