diff --git a/.apigentools-info b/.apigentools-info index 8d9254e41b36c..2a0fcd0be2b6c 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-07-18 16:54:27.438110", - "spec_repo_commit": "0f24b8e5" + "regenerated": "2025-07-18 21:24:06.486206", + "spec_repo_commit": "5b64e98b" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-07-18 16:54:36.732989", - "spec_repo_commit": "0f24b8e5" + "regenerated": "2025-07-18 21:24:15.563971", + "spec_repo_commit": "5b64e98b" } } } \ No newline at end of file diff --git a/content/en/api/v2/observability-pipelines/examples.json b/content/en/api/v2/observability-pipelines/examples.json index 801e8debee9c1..5d5304eb1a2b1 100644 --- a/content/en/api/v2/observability-pipelines/examples.json +++ b/content/en/api/v2/observability-pipelines/examples.json @@ -62,7 +62,7 @@ "totalCount": 42 } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

[object]

\n

The schema data.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

meta

\n
\n

object

\n

Metadata about the response.

\n
\n
\n
\n
\n
\n

totalCount

\n
\n

int64

\n

The total number of pipelines.

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

[object]

\n

The schema data.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

meta

\n
\n

object

\n

Metadata about the response.

\n
\n
\n
\n
\n
\n

totalCount

\n
\n

int64

\n

The total number of pipelines.

\n
\n \n
\n
\n
\n
" }, "400": { "json": { @@ -153,7 +153,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" }, "400": { "json": { @@ -269,7 +269,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the the pipeline configuration.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the the pipeline configuration.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" } }, "ValidatePipeline": { @@ -395,7 +395,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the the pipeline configuration.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the the pipeline configuration.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" } }, "DeletePipeline": { @@ -497,7 +497,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" }, "403": { "json": { @@ -580,7 +580,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" }, "400": { "json": { @@ -706,7 +706,7 @@ "type": "pipelines" } }, - "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" + "html": "
\n
\n
\n
\n

data [required]

\n
\n

object

\n

Contains the pipeline’s ID, type, and configuration attributes.

\n
\n
\n
\n
\n
\n

attributes [required]

\n
\n

object

\n

Defines the pipeline’s name and its components (sources, processors, and destinations).

\n
\n
\n
\n
\n
\n

config [required]

\n
\n

object

\n

Specifies the pipeline's configuration, including its sources, processors, and destinations.

\n
\n
\n
\n
\n
\n

destinations [required]

\n
\n

[ <oneOf>]

\n

A list of destination components where processed logs are sent.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The datadog_logs destination forwards logs to Datadog Log Management.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be datadog_logs. \nAllowed enum values: datadog_logs

default: datadog_logs

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The amazon_s3 destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

S3 bucket name.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region of the S3 bucket.

\n
\n \n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

S3 storage class. \nAllowed enum values: STANDARD,REDUCED_REDUNDANCY,INTELLIGENT_TIERING,STANDARD_IA,EXPRESS_ONEZONE,ONEZONE_IA,GLACIER,GLACIER_IR,DEEP_ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The google_cloud_storage destination stores logs in a Google Cloud Storage (GCS) bucket.\nIt requires a bucket name, GCP authentication, and metadata fields.

\n
\n
\n
\n
\n
\n

acl [required]

\n
\n

enum

\n

Access control list setting for objects written to the bucket. \nAllowed enum values: private,project-private,public-read,authenticated-read,bucket-owner-read,bucket-owner-full-control

\n
\n \n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bucket [required]

\n
\n

string

\n

Name of the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the destination component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

key_prefix

\n
\n

string

\n

Optional prefix for object keys within the GCS bucket.

\n
\n \n
\n
\n
\n
\n
\n

metadata

\n
\n

[object]

\n

Custom metadata to attach to each object uploaded to the GCS bucket.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The metadata key.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The metadata value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

storage_class [required]

\n
\n

enum

\n

Storage class used for objects stored in GCS. \nAllowed enum values: STANDARD,NEARLINE,COLDLINE,ARCHIVE

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always google_cloud_storage. \nAllowed enum values: google_cloud_storage

default: google_cloud_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec destination forwards logs to Splunk using the HTTP Event Collector (HEC).

\n
\n
\n
\n
\n
\n

auto_extract_timestamp

\n
\n

boolean

\n

If true, Splunk tries to extract timestamps from incoming log events.\nIf false, Splunk assigns the time the event was received.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

Encoding format for log events. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

index

\n
\n

string

\n

Optional name of the Splunk index where logs are written.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

sourcetype

\n
\n

string

\n

The Splunk sourcetype to assign to log events.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The sumo_logic destination forwards logs to Sumo Logic.

\n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The output encoding format. \nAllowed enum values: json,raw_message,logfmt

\n
\n \n
\n
\n
\n
\n
\n

header_custom_fields

\n
\n

[object]

\n

A list of custom headers to include in the request to Sumo Logic.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The header field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The header field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

header_host_name

\n
\n

string

\n

Optional override for the host name header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_category

\n
\n

string

\n

Optional override for the source category header.

\n
\n \n
\n
\n
\n
\n
\n

header_source_name

\n
\n

string

\n

Optional override for the source name header.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The elasticsearch destination writes logs to an Elasticsearch cluster.

\n
\n
\n
\n
\n
\n

api_version

\n
\n

enum

\n

The Elasticsearch API version to use. Set to auto to auto-detect. \nAllowed enum values: auto,v6,v7,v8

\n
\n \n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to in Elasticsearch.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be elasticsearch. \nAllowed enum values: elasticsearch

default: elasticsearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The rsyslog destination forwards logs to an external rsyslog server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The syslog_ng destination forwards logs to an external syslog-ng server over TCP or UDP using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keepalive

\n
\n

int64

\n

Optional socket keepalive duration in milliseconds.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The azure_storage destination forwards logs to an Azure Blob Storage container.

\n
\n
\n
\n
\n
\n

blob_prefix

\n
\n

string

\n

Optional prefix for blobs written to the container.

\n
\n \n
\n
\n
\n
\n
\n

container_name [required]

\n
\n

string

\n

The name of the Azure Blob Storage container to store logs in.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be azure_storage. \nAllowed enum values: azure_storage

default: azure_storage

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The microsoft_sentinel destination forwards logs to Microsoft Sentinel.

\n
\n
\n
\n
\n
\n

client_id [required]

\n
\n

string

\n

Azure AD client ID used for authentication.

\n
\n \n
\n
\n
\n
\n
\n

dcr_immutable_id [required]

\n
\n

string

\n

The immutable ID of the Data Collection Rule (DCR).

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

table [required]

\n
\n

string

\n

The name of the Log Analytics table where logs are sent.

\n
\n \n
\n
\n
\n
\n
\n

tenant_id [required]

\n
\n

string

\n

Azure AD tenant ID.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be microsoft_sentinel. \nAllowed enum values: microsoft_sentinel

default: microsoft_sentinel

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The google_chronicle destination sends logs to Google Chronicle.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

customer_id [required]

\n
\n

string

\n

The Google Chronicle customer ID.

\n
\n \n
\n
\n
\n
\n
\n

encoding

\n
\n

enum

\n

The encoding format for the logs sent to Chronicle. \nAllowed enum values: json,raw_message

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

log_type

\n
\n

string

\n

The log type metadata associated with the Chronicle destination.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be google_chronicle. \nAllowed enum values: google_chronicle

default: google_chronicle

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The new_relic destination sends logs to the New Relic platform.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The New Relic region. \nAllowed enum values: us,eu

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be new_relic. \nAllowed enum values: new_relic

default: new_relic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The sentinel_one destination sends logs to SentinelOne.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

enum

\n

The SentinelOne region to send logs to. \nAllowed enum values: us,eu,ca,data_set_us

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be sentinel_one. \nAllowed enum values: sentinel_one

default: sentinel_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The opensearch destination writes logs to an OpenSearch cluster.

\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be opensearch. \nAllowed enum values: opensearch

default: opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The amazon_opensearch destination writes logs to Amazon OpenSearch.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

Authentication settings for the Amazon OpenSearch destination.\nThe strategy field determines whether basic or AWS-based authentication is used.

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The ARN of the role to assume (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

aws_region

\n
\n

string

\n

AWS region

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

External ID for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

Session name for the assumed role (used with aws strategy).

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The authentication strategy to use. \nAllowed enum values: basic,aws

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

bulk_index

\n
\n

string

\n

The index to write logs to.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The destination type. The value should always be amazon_opensearch. \nAllowed enum values: amazon_opensearch

default: amazon_opensearch

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

processors

\n
\n

[ <oneOf>]

\n

A list of processors that transform or enrich log data.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The filter processor allows conditional processing of logs based on a Datadog search query. Logs that match the include query are passed through; others are discarded.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be filter. \nAllowed enum values: filter

default: filter

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The parse_json processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The name of the log field that contains a JSON string.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_json. \nAllowed enum values: parse_json

default: parse_json

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert.

\n
\n
\n
\n
\n
\n

drop_events [required]

\n
\n

boolean

\n

If set to true, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

ignore_when_missing_partitions

\n
\n

boolean

\n

If true, the processor skips quota checks when partition fields are missing from the logs.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the quota.

\n
\n \n
\n
\n
\n
\n
\n

overflow_action

\n
\n

enum

\n

The action to take when the quota is exceeded. Options:

\n
    \n
  • drop: Drop the event.

    \n
  • \n
  • no_action: Let the event pass through.

    \n
  • \n
  • overflow_routing: Route to an overflow destination.

    \n

    Allowed enum values: drop,no_action,overflow_routing

    \n
  • \n
\n
\n \n
\n
\n
\n
\n
\n

overrides

\n
\n

[object]

\n

A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of field matchers used to apply a specific override. If an event matches all listed key-value pairs, the corresponding override limit is enforced.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

limit [required]

\n
\n

object

\n

The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events.

\n
\n
\n
\n
\n
\n

enforce [required]

\n
\n

enum

\n

Unit for quota enforcement in bytes for data size or events for count. \nAllowed enum values: bytes,events

\n
\n \n
\n
\n
\n
\n
\n

limit [required]

\n
\n

int64

\n

The limit for quota enforcement.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

partition_fields

\n
\n

[string]

\n

A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be quota. \nAllowed enum values: quota

default: quota

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The add_fields processor adds static key-value fields to logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of static fields (key-value pairs) that is added to each log event processed by this component.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The field name.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The field value.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_fields. \nAllowed enum values: add_fields

default: add_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The remove_fields processor deletes specified fields from logs.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of field names to be removed from each log event.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

The PipelineRemoveFieldsProcessor inputs.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be remove_fields. \nAllowed enum values: remove_fields

default: remove_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The rename_fields processor changes field names.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[object]

\n

A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields.

\n
\n
\n
\n
\n
\n

destination [required]

\n
\n

string

\n

The field name to assign the renamed value to.

\n
\n \n
\n
\n
\n
\n
\n

preserve_source [required]

\n
\n

boolean

\n

Indicates whether the original field, that is received from the source, should be kept (true) or removed (false) after renaming.

\n
\n \n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The original field name in the log event that should be renamed.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be rename_fields. \nAllowed enum values: rename_fields

default: rename_fields

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The generate_datadog_metrics processor creates custom metrics from logs and sends them to Datadog.\nMetrics can be counters, gauges, or distributions and optionally grouped by log fields.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

metrics [required]

\n
\n

[object]

\n

Configuration for generating individual metrics.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional fields used to group the metric series.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

Datadog filter query to match logs for metric generation.

\n
\n \n
\n
\n
\n
\n
\n

metric_type [required]

\n
\n

enum

\n

Type of metric to create. \nAllowed enum values: count,gauge,distribution

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the custom metric to be created.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

 <oneOf>

\n

Specifies how the value of the generated metric is computed.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Strategy that increments a generated metric by one for each matching event.

\n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Increments the metric by 1 for each matching event. \nAllowed enum values: increment_by_one

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Strategy that increments a generated metric based on the value of a log field.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

Name of the log field containing the numeric value to increment the metric by.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

Uses a numeric field in the log event as the metric increment. \nAllowed enum values: increment_by_field

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. Always generate_datadog_metrics. \nAllowed enum values: generate_datadog_metrics

default: generate_datadog_metrics

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The sample processor allows probabilistic sampling of logs at a fixed rate.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

percentage

\n
\n

double

\n

The percentage of logs to sample.

\n
\n \n
\n
\n
\n
\n
\n

rate

\n
\n

int64

\n

Number of events to sample (1 in N).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sample. \nAllowed enum values: sample

default: sample

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The parse_grok processor extracts structured fields from unstructured log messages using Grok patterns.

\n
\n
\n
\n
\n
\n

disable_library_rules

\n
\n

boolean

\n

If set to true, disables the default Grok rules provided by Datadog.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

A unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied.

\n
\n
\n
\n
\n
\n

match_rules [required]

\n
\n

[object]

\n

A list of Grok parsing rules that define how to extract fields from the source field.\nEach rule must contain a name and a valid Grok pattern.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

source [required]

\n
\n

string

\n

The name of the field in the log event to apply the Grok rules to.

\n
\n \n
\n
\n
\n
\n
\n

support_rules [required]

\n
\n

[object]

\n

A list of Grok helper rules that can be referenced by the parsing rules.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

The definition of the Grok helper rule.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be parse_grok. \nAllowed enum values: parse_grok

default: parse_grok

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The sensitive_data_scanner processor detects and optionally redacts sensitive data in log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

rules [required]

\n
\n

[object]

\n

A list of rules for identifying and acting on sensitive data patterns.

\n
\n
\n
\n
\n
\n

keyword_options

\n
\n

object

\n

Configuration for keywords used to reinforce sensitive data pattern detection.

\n
\n
\n
\n
\n
\n

keywords [required]

\n
\n

[string]

\n

A list of keywords to match near the sensitive pattern.

\n
\n \n
\n
\n
\n
\n
\n

proximity [required]

\n
\n

int64

\n

Maximum number of tokens between a keyword and a sensitive value match.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

A name identifying the rule.

\n
\n \n
\n
\n
\n
\n
\n

on_match [required]

\n
\n

 <oneOf>

\n

Defines what action to take when sensitive data is matched.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Configuration for completely redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. \nAllowed enum values: redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Configuration for fully redacting sensitive data.

\n
\n
\n
\n
\n
\n

replace [required]

\n
\n

string

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions replace.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Configuration for hashing matched sensitive values.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. \nAllowed enum values: hash

\n
\n \n
\n
\n
\n
\n
\n

options

\n
\n

object

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionHash options.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Configuration for partially redacting matched sensitive data.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). \nAllowed enum values: partial_redact

\n
\n \n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Controls how partial redaction is applied, including character count and direction.

\n
\n
\n
\n
\n
\n

characters [required]

\n
\n

int64

\n

The ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions characters.

\n
\n \n
\n
\n
\n
\n
\n

direction [required]

\n
\n

enum

\n

Indicates whether to redact characters from the first or last part of the matched value. \nAllowed enum values: first,last

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

pattern [required]

\n
\n

 <oneOf>

\n

Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Defines a custom regex-based pattern for identifying sensitive data in logs.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for defining a custom regex pattern.

\n
\n
\n
\n
\n
\n

rule [required]

\n
\n

string

\n

A regular expression used to detect sensitive values. Must be a valid regex.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates a custom regular expression is used for matching. \nAllowed enum values: custom

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Options for selecting a predefined library pattern and enabling keyword support.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Identifier for a predefined pattern from the sensitive data scanner pattern library.

\n
\n \n
\n
\n
\n
\n
\n

use_recommended_keywords

\n
\n

boolean

\n

Whether to augment the pattern with recommended keywords (optional).

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Indicates that a predefined library pattern is used. \nAllowed enum values: library

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

scope [required]

\n
\n

 <oneOf>

\n

Determines which parts of the log the pattern-matching rule should be applied to.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

Includes only specific fields for sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule only to included fields. \nAllowed enum values: include

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

Excludes specific fields from sensitive data scanning.

\n
\n
\n
\n
\n
\n

options [required]

\n
\n

object

\n

Fields to which the scope rule applies.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

The ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions fields.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Excludes specific fields from processing. \nAllowed enum values: exclude

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

Applies scanning across all available fields.

\n
\n
\n
\n
\n
\n

target [required]

\n
\n

enum

\n

Applies the rule to all fields. \nAllowed enum values: all

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

tags [required]

\n
\n

[string]

\n

Tags assigned to this rule for filtering and classification.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be sensitive_data_scanner. \nAllowed enum values: sensitive_data_scanner

default: sensitive_data_scanner

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The ocsf_mapper processor transforms logs into the OCSF schema using a predefined mapping configuration.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mappings [required]

\n
\n

[object]

\n

A list of mapping rules to convert events to the OCSF format.

\n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to select the logs that this mapping should apply to.

\n
\n \n
\n
\n
\n
\n
\n

mapping [required]

\n
\n

 <oneOf>

\n

Defines a single mapping rule for transforming logs into the OCSF schema.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

enum

\n

Predefined library mappings for common log formats. \nAllowed enum values: CloudTrail Account Change,GCP Cloud Audit CreateBucket,GCP Cloud Audit CreateSink,GCP Cloud Audit SetIamPolicy,GCP Cloud Audit UpdateSink,Github Audit Log API Activity,Google Workspace Admin Audit addPrivilege,Microsoft 365 Defender Incident,Microsoft 365 Defender UserLoggedIn,Okta System Log Authentication,Palo Alto Networks Firewall Traffic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be ocsf_mapper. \nAllowed enum values: ocsf_mapper

default: ocsf_mapper

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The add_env_vars processor adds environment variable values to log events.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this processor in the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be add_env_vars. \nAllowed enum values: add_env_vars

default: add_env_vars

\n
\n \n
\n
\n
\n
\n
\n

variables [required]

\n
\n

[object]

\n

A list of environment variable mappings to apply to log fields.

\n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The target field in the log event.

\n
\n \n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the environment variable to read.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The dedupe processor removes duplicate fields in log events.

\n
\n
\n
\n
\n
\n

fields [required]

\n
\n

[string]

\n

A list of log field paths to check for duplicates.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The deduplication mode to apply to the fields. \nAllowed enum values: match,ignore

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be dedupe. \nAllowed enum values: dedupe

default: dedupe

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The enrichment_table processor enriches logs using a static CSV file or GeoIP database.

\n
\n
\n
\n
\n
\n

file

\n
\n

object

\n

Defines a static enrichment table loaded from a CSV file.

\n
\n
\n
\n
\n
\n

encoding [required]

\n
\n

object

\n

File encoding format.

\n
\n
\n
\n
\n
\n

delimiter [required]

\n
\n

string

\n

The encoding delimiter.

\n
\n \n
\n
\n
\n
\n
\n

includes_headers [required]

\n
\n

boolean

\n

The encoding includes_headers.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Specifies the encoding format (e.g., CSV) used for enrichment tables. \nAllowed enum values: csv

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

key [required]

\n
\n

[object]

\n

Key fields used to look up enrichment values.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

comparison [required]

\n
\n

enum

\n

Defines how to compare key fields for enrichment table lookups. \nAllowed enum values: equals

\n
\n \n
\n
\n
\n
\n
\n

field [required]

\n
\n

string

\n

The items field.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the CSV file.

\n
\n \n
\n
\n
\n
\n
\n

schema [required]

\n
\n

[object]

\n

Schema defining column names and their types.

\n
\n
\n
\n
\n
\n

column [required]

\n
\n

string

\n

The items column.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

Declares allowed data types for enrichment table columns. \nAllowed enum values: string,boolean,integer,float,date,timestamp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

geoip

\n
\n

object

\n

Uses a GeoIP database to enrich logs based on an IP field.

\n
\n
\n
\n
\n
\n

key_field [required]

\n
\n

string

\n

Path to the IP field in the log.

\n
\n \n
\n
\n
\n
\n
\n

locale [required]

\n
\n

string

\n

Locale used to resolve geographical names.

\n
\n \n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

Path to the GeoIP database file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

target [required]

\n
\n

string

\n

Path where enrichment results should be stored in the log.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be enrichment_table. \nAllowed enum values: enrichment_table

default: enrichment_table

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The reduce processor aggregates and merges logs based on matching keys and merge strategies.

\n
\n
\n
\n
\n
\n

group_by [required]

\n
\n

[string]

\n

A list of fields used to group log events for merging.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

merge_strategies [required]

\n
\n

[object]

\n

List of merge strategies defining how values from grouped events should be combined.

\n
\n
\n
\n
\n
\n

path [required]

\n
\n

string

\n

The field path in the log event.

\n
\n \n
\n
\n
\n
\n
\n

strategy [required]

\n
\n

enum

\n

The merge strategy to apply. \nAllowed enum values: discard,retain,sum,max,min,array,concat,concat_newline,concat_raw,shortest_array,longest_array,flat_unique

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be reduce. \nAllowed enum values: reduce

default: reduce

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 16

\n
\n

object

\n

The throttle processor limits the number of events that pass through over a given time window.

\n
\n
\n
\n
\n
\n

group_by

\n
\n

[string]

\n

Optional list of fields used to group events before the threshold has been reached.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this processor.

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this processor.

\n
\n \n
\n
\n
\n
\n
\n

threshold [required]

\n
\n

int64

\n

the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be throttle. \nAllowed enum values: throttle

default: throttle

\n
\n \n
\n
\n
\n
\n
\n

window [required]

\n
\n

double

\n

The time window in seconds over which the threshold applies.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 17

\n
\n

object

\n

The datadog_tags processor includes or excludes specific Datadog tags in your logs.

\n
\n
\n
\n
\n
\n

action [required]

\n
\n

enum

\n

The action to take on tags with matching keys. \nAllowed enum values: include,exclude

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

include [required]

\n
\n

string

\n

A Datadog search query used to determine which logs this processor targets.

\n
\n \n
\n
\n
\n
\n
\n

inputs [required]

\n
\n

[string]

\n

A list of component IDs whose output is used as the input for this component.

\n
\n \n
\n
\n
\n
\n
\n

keys [required]

\n
\n

[string]

\n

A list of tag keys.

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

The processing mode. \nAllowed enum values: filter

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The processor type. The value should always be datadog_tags. \nAllowed enum values: datadog_tags

default: datadog_tags

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n

sources [required]

\n
\n

[ <oneOf>]

\n

A list of configured data sources for the pipeline.

\n
\n
\n
\n
\n
\n

Option 1

\n
\n

object

\n

The kafka source ingests data from Apache Kafka topics.

\n
\n
\n
\n
\n
\n

group_id [required]

\n
\n

string

\n

Consumer group ID used by the Kafka client.

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

librdkafka_options

\n
\n

[object]

\n

Optional list of advanced Kafka client configuration options, defined as key-value pairs.

\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

The name of the librdkafka configuration option to set.

\n
\n \n
\n
\n
\n
\n
\n

value [required]

\n
\n

string

\n

The value assigned to the specified librdkafka configuration option.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

sasl

\n
\n

object

\n

Specifies the SASL mechanism for authenticating with a Kafka cluster.

\n
\n
\n
\n
\n
\n

mechanism

\n
\n

enum

\n

SASL mechanism used for Kafka authentication. \nAllowed enum values: PLAIN,SCRAM-SHA-256,SCRAM-SHA-512

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

topics [required]

\n
\n

[string]

\n

A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be kafka. \nAllowed enum values: kafka

default: kafka

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 2

\n
\n

object

\n

The datadog_agent source collects logs from the Datadog Agent.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be datadog_agent. \nAllowed enum values: datadog_agent

default: datadog_agent

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 3

\n
\n

object

\n

The splunk_tcp source receives logs from a Splunk Universal Forwarder over TCP.\nTLS is supported for secure transmission.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_tcp. \nAllowed enum values: splunk_tcp

default: splunk_tcp

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 4

\n
\n

object

\n

The splunk_hec source implements the Splunk HTTP Event Collector (HEC) API.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always splunk_hec. \nAllowed enum values: splunk_hec

default: splunk_hec

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 5

\n
\n

object

\n

The amazon_s3 source ingests logs from an Amazon S3 bucket.\nIt supports AWS authentication and TLS encryption.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

region [required]

\n
\n

string

\n

AWS region where the S3 bucket resides.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. Always amazon_s3. \nAllowed enum values: amazon_s3

default: amazon_s3

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 6

\n
\n

object

\n

The fluentd source ingests logs from a Fluentd-compatible service.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be `fluentd. \nAllowed enum values: fluentd

default: fluentd

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 7

\n
\n

object

\n

The fluent_bit source ingests logs from Fluent Bit.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be fluent_bit. \nAllowed enum values: fluent_bit

default: fluent_bit

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 8

\n
\n

object

\n

The http_server source collects logs over HTTP POST from external services.

\n
\n
\n
\n
\n
\n

auth_strategy [required]

\n
\n

enum

\n

HTTP authentication method. \nAllowed enum values: none,plain

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique ID for the HTTP server source.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_server. \nAllowed enum values: http_server

default: http_server

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 9

\n
\n

object

\n

The sumo_logic source receives logs from Sumo Logic collectors.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be sumo_logic. \nAllowed enum values: sumo_logic

default: sumo_logic

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 10

\n
\n

object

\n

The rsyslog source listens for logs over TCP or UDP from an rsyslog server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be rsyslog. \nAllowed enum values: rsyslog

default: rsyslog

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 11

\n
\n

object

\n

The syslog_ng source listens for logs over TCP or UDP from a syslog-ng server using the syslog protocol.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

mode [required]

\n
\n

enum

\n

Protocol used by the syslog source to receive messages. \nAllowed enum values: tcp,udp

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be syslog_ng. \nAllowed enum values: syslog_ng

default: syslog_ng

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 12

\n
\n

object

\n

The amazon_data_firehose source ingests logs from AWS Data Firehose.

\n
\n
\n
\n
\n
\n

auth

\n
\n

object

\n

AWS authentication credentials used for accessing AWS services such as S3.\nIf omitted, the system’s default credentials are used (for example, the IAM role and environment variables).

\n
\n
\n
\n
\n
\n

assume_role

\n
\n

string

\n

The Amazon Resource Name (ARN) of the role to assume.

\n
\n \n
\n
\n
\n
\n
\n

external_id

\n
\n

string

\n

A unique identifier for cross-account role assumption.

\n
\n \n
\n
\n
\n
\n
\n

session_name

\n
\n

string

\n

A session identifier used for logging and tracing the assumed role session.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be amazon_data_firehose. \nAllowed enum values: amazon_data_firehose

default: amazon_data_firehose

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 13

\n
\n

object

\n

The google_pubsub source ingests logs from a Google Cloud Pub/Sub subscription.

\n
\n
\n
\n
\n
\n

auth [required]

\n
\n

object

\n

GCP credentials used to authenticate with Google Cloud Storage.

\n
\n
\n
\n
\n
\n

credentials_file [required]

\n
\n

string

\n

Path to the GCP service account key file.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

project [required]

\n
\n

string

\n

The GCP project ID that owns the Pub/Sub subscription.

\n
\n \n
\n
\n
\n
\n
\n

subscription [required]

\n
\n

string

\n

The Pub/Sub subscription name from which messages are consumed.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be google_pubsub. \nAllowed enum values: google_pubsub

default: google_pubsub

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 14

\n
\n

object

\n

The http_client source scrapes logs from HTTP endpoints at regular intervals.

\n
\n
\n
\n
\n
\n

auth_strategy

\n
\n

enum

\n

Optional authentication strategy for HTTP requests. \nAllowed enum values: basic,bearer

\n
\n \n
\n
\n
\n
\n
\n

decoding [required]

\n
\n

enum

\n

The decoding format used to interpret incoming logs. \nAllowed enum values: bytes,gelf,json,syslog

\n
\n \n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

scrape_interval_secs

\n
\n

int64

\n

The interval (in seconds) between HTTP scrape requests.

\n
\n \n
\n
\n
\n
\n
\n

scrape_timeout_secs

\n
\n

int64

\n

The timeout (in seconds) for each scrape request.

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be http_client. \nAllowed enum values: http_client

default: http_client

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

Option 15

\n
\n

object

\n

The logstash source ingests logs from a Logstash forwarder.

\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components).

\n
\n \n
\n
\n
\n
\n
\n

tls

\n
\n

object

\n

Configuration for enabling TLS encryption between the pipeline component and external services.

\n
\n
\n
\n
\n
\n

ca_file

\n
\n

string

\n

Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate.

\n
\n \n
\n
\n
\n
\n
\n

crt_file [required]

\n
\n

string

\n

Path to the TLS client certificate file used to authenticate the pipeline component with upstream or downstream services.

\n
\n \n
\n
\n
\n
\n
\n

key_file

\n
\n

string

\n

Path to the private key file associated with the TLS client certificate. Used for mutual TLS authentication.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

type [required]

\n
\n

enum

\n

The source type. The value should always be logstash. \nAllowed enum values: logstash

default: logstash

\n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n

name [required]

\n
\n

string

\n

Name of the pipeline.

\n
\n \n
\n
\n
\n
\n
\n
\n
\n

id [required]

\n
\n

string

\n

Unique identifier for the pipeline.

\n
\n \n
\n
\n
\n
\n
\n

type [required]

\n
\n

string

\n

The resource type identifier. For pipeline resources, this should always be set to pipelines.

default: pipelines

\n
\n \n
\n
\n
\n
" } } } \ No newline at end of file diff --git a/data/api/v2/full_spec.yaml b/data/api/v2/full_spec.yaml index 3420ec8812f23..84fb359b2fa24 100644 --- a/data/api/v2/full_spec.yaml +++ b/data/api/v2/full_spec.yaml @@ -25971,6 +25971,7 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: @@ -26080,6 +26081,80 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogTagsProcessor: + description: The `datadog_tags` processor includes or excludes specific Datadog + tags in your logs. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: datadog-tags-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + keys: + description: A list of tag keys. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorMode' + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorType' + required: + - id + - type + - include + - mode + - action + - keys + - inputs + type: object + ObservabilityPipelineDatadogTagsProcessorAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineDatadogTagsProcessorMode: + description: The processing mode. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineDatadogTagsProcessorType: + default: datadog_tags + description: The processor type. The value should always be `datadog_tags`. + enum: + - datadog_tags + example: datadog_tags + type: string + x-enum-varnames: + - DATADOG_TAGS ObservabilityPipelineDecoding: description: The decoding format used to interpret incoming logs. enum: diff --git a/data/api/v2/full_spec_deref.json b/data/api/v2/full_spec_deref.json index 53a67bdb84d90..293506b51afee 100644 --- a/data/api/v2/full_spec_deref.json +++ b/data/api/v2/full_spec_deref.json @@ -147449,6 +147449,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -173679,6 +173762,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -178303,6 +178469,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -182192,6 +182441,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -186096,6 +186428,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -190047,6 +190462,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -191162,6 +191660,125 @@ "DATADOG_LOGS" ] }, + "ObservabilityPipelineDatadogTagsProcessor": { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" + }, + "ObservabilityPipelineDatadogTagsProcessorAction": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "ObservabilityPipelineDatadogTagsProcessorMode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "ObservabilityPipelineDatadogTagsProcessorType": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + }, "ObservabilityPipelineDecoding": { "description": "The decoding format used to interpret incoming logs.", "enum": [ @@ -199535,6 +200152,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -203489,6 +204189,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -472046,6 +472829,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -476134,6 +477000,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -480099,6 +481048,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -484221,6 +485253,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -488519,6 +489634,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -492576,6 +493774,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -496547,6 +497828,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, diff --git a/static/resources/json/full_spec_v2.json b/static/resources/json/full_spec_v2.json index 53a67bdb84d90..293506b51afee 100644 --- a/static/resources/json/full_spec_v2.json +++ b/static/resources/json/full_spec_v2.json @@ -147449,6 +147449,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -173679,6 +173762,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -178303,6 +178469,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -182192,6 +182441,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -186096,6 +186428,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -190047,6 +190462,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -191162,6 +191660,125 @@ "DATADOG_LOGS" ] }, + "ObservabilityPipelineDatadogTagsProcessor": { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" + }, + "ObservabilityPipelineDatadogTagsProcessorAction": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "ObservabilityPipelineDatadogTagsProcessorMode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "ObservabilityPipelineDatadogTagsProcessorType": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + }, "ObservabilityPipelineDecoding": { "description": "The decoding format used to interpret incoming logs.", "enum": [ @@ -199535,6 +200152,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -203489,6 +204189,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -472046,6 +472829,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -476134,6 +477000,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -480099,6 +481048,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -484221,6 +485253,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -488519,6 +489634,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -492576,6 +493774,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] }, @@ -496547,6 +497828,89 @@ "window" ], "type": "object" + }, + { + "description": "The `datadog_tags` processor includes or excludes specific Datadog tags in your logs.", + "properties": { + "action": { + "description": "The action to take on tags with matching keys.", + "enum": [ + "include", + "exclude" + ], + "example": "include", + "type": "string", + "x-enum-varnames": [ + "INCLUDE", + "EXCLUDE" + ] + }, + "id": { + "description": "The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components).", + "example": "datadog-tags-processor", + "type": "string" + }, + "include": { + "description": "A Datadog search query used to determine which logs this processor targets.", + "example": "service:my-service", + "type": "string" + }, + "inputs": { + "description": "A list of component IDs whose output is used as the `input` for this component.", + "example": [ + "datadog-agent-source" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "description": "A list of tag keys.", + "example": [ + "env", + "service", + "version" + ], + "items": { + "type": "string" + }, + "type": "array" + }, + "mode": { + "description": "The processing mode.", + "enum": [ + "filter" + ], + "example": "filter", + "type": "string", + "x-enum-varnames": [ + "FILTER" + ] + }, + "type": { + "default": "datadog_tags", + "description": "The processor type. The value should always be `datadog_tags`.", + "enum": [ + "datadog_tags" + ], + "example": "datadog_tags", + "type": "string", + "x-enum-varnames": [ + "DATADOG_TAGS" + ] + } + }, + "required": [ + "id", + "type", + "include", + "mode", + "action", + "keys", + "inputs" + ], + "type": "object" } ] },