Skip to content

Commit

Permalink
[exporter/datadogexporter] Add Working Examples (#29631)
Browse files Browse the repository at this point in the history
**Description:** 

Adds working examples configs.

**Link to tracking Issue:** <Issue number if applicable>

**Testing:** <Describe what testing was performed and which tests were
added.>

**Documentation:** <Describe the documentation added.>
  • Loading branch information
mackjmr authored Dec 7, 2023
1 parent 4255836 commit f99e52d
Show file tree
Hide file tree
Showing 14 changed files with 748 additions and 54 deletions.
36 changes: 36 additions & 0 deletions exporter/datadogexporter/examples/batch-memory.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
receivers:
otlp:
protocols:
http:
endpoint: "localhost:4318"
grpc:
endpoint: "localhost:4317"

processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s
memory_limiter:
check_interval: 1s
limit_mib: 1000

exporters:
datadog:
api:
key: ${env:DD_API_KEY}

service:
pipelines:
traces:
receivers: [otlp]
processors: [batch, memory_limiter]
exporters: [datadog]
metrics:
receivers: [otlp]
processors: [batch, memory_limiter]
exporters: [datadog]
logs:
receivers: [otlp]
processors: [batch, memory_limiter]
exporters: [datadog]
27 changes: 27 additions & 0 deletions exporter/datadogexporter/examples/collector-metrics.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
receivers:
prometheus:
config:
scrape_configs:
- job_name: 'otelcol'
scrape_interval: 10s
static_configs:
- targets: ['0.0.0.0:8888']

processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s

exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}

service:
pipelines:
metrics:
receivers: [prometheus]
processors: [batch]
exporters: [datadog]
37 changes: 37 additions & 0 deletions exporter/datadogexporter/examples/docker-stats.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
receivers:
docker_stats:
endpoint: unix:///var/run/docker.sock # (default)
metrics:
container.network.io.usage.rx_packets:
enabled: true
container.network.io.usage.tx_packets:
enabled: true
container.cpu.usage.system:
enabled: true
container.memory.rss:
enabled: true
container.blockio.io_serviced_recursive:
enabled: true
container.uptime:
enabled: true
container.memory.hierarchical_memory_limit:
enabled: true

processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s

exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}

service:
pipelines:
metrics:
receivers: [docker_stats]
processors: [batch]
exporters: [datadog]
46 changes: 46 additions & 0 deletions exporter/datadogexporter/examples/host-metrics.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
receivers:
hostmetrics:
collection_interval: 10s
scrapers:
paging:
metrics:
system.paging.utilization:
enabled: true
cpu:
metrics:
system.cpu.utilization:
enabled: true
system.cpu.physical.count:
enabled: true
system.cpu.logical.count:
enabled: true
system.cpu.frequency:
enabled: true
disk:
filesystem:
metrics:
system.filesystem.utilization:
enabled: true
load:
memory:
network:
processes:

processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s

exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}

service:
pipelines:
metrics:
receivers: [hostmetrics]
processors: [batch]
exporters: [datadog]
92 changes: 92 additions & 0 deletions exporter/datadogexporter/examples/k8s-chart/k8s-values.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
mode: daemonset
presets:
kubernetesAttributes:
enabled: true
extraEnvs:
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: OTEL_RESOURCE_ATTRIBUTES
value: "k8s.pod.ip=$(POD_IP)"
config:
processors:
k8sattributes:
passthrough: false
auth_type: "serviceAccount"
pod_association:
- sources:
- from: resource_attribute
name: k8s.pod.ip
extract:
metadata:
- k8s.pod.name
- k8s.pod.uid
- k8s.deployment.name
- k8s.node.name
- k8s.namespace.name
- k8s.pod.start_time
- k8s.replicaset.name
- k8s.replicaset.uid
- k8s.daemonset.name
- k8s.daemonset.uid
- k8s.job.name
- k8s.job.uid
- k8s.cronjob.name
- k8s.statefulset.name
- k8s.statefulset.uid
- container.image.name
- container.image.tag
- container.id
- k8s.container.name
- container.image.name
- container.image.tag
- container.id
labels:
- tag_name: kube_app_name
key: app.kubernetes.io/name
from: pod
- tag_name: kube_app_instance
key: app.kubernetes.io/instance
from: pod
- tag_name: kube_app_version
key: app.kubernetes.io/version
from: pod
- tag_name: kube_app_component
key: app.kubernetes.io/component
from: pod
- tag_name: kube_app_part_of
key: app.kubernetes.io/part-of
from: pod
- tag_name: kube_app_managed_by
key: app.kubernetes.io/managed-by
from: pod
resourcedetection:
detectors: [env, eks, ec2, system]
timeout: 2s
override: false
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s
exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}
traces:
trace_buffer: 500
service:
pipelines:
metrics:
receivers: [otlp]
processors: [batch, resourcedetection, k8sattributes]
exporters: [datadog]
traces:
receivers: [otlp]
processors: [batch, resourcedetection, k8sattributes]
exporters: [datadog]
logs:
receivers: [otlp]
processors: [batch, resourcedetection, k8sattributes]
exporters: [datadog]
35 changes: 35 additions & 0 deletions exporter/datadogexporter/examples/logs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
receivers:
filelog:
include_file_path: true
poll_interval: 500ms
include:
- /var/log/*/app.log
operators:
- type: json_parser
# If your logs are not json, please make sure that you are using the regex_parser to parse out the timestamp into attributes.time.
# - type: regex_parser
# id: parser-timestamp
# ...
# Layout needs to match log timestamp format. If this section is removed, timestamp will correspond to time log was intake by DD.
- type: time_parser
parse_from: attributes.time
layout: '%Y-%m-%dT%H:%M:%S%z'

processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s

exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}

service:
pipelines:
logs:
receivers: [filelog]
processors: [batch]
exporters: [datadog]
113 changes: 113 additions & 0 deletions exporter/datadogexporter/examples/ootb-ec2.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
receivers:
otlp:
protocols:
http:
endpoint: "localhost:4318"
grpc:
endpoint: "localhost:4317"


hostmetrics:
collection_interval: 10s
scrapers:
paging:
metrics:
system.paging.utilization:
enabled: true
cpu:
metrics:
system.cpu.utilization:
enabled: true
system.cpu.physical.count:
enabled: true
system.cpu.logical.count:
enabled: true
system.cpu.frequency:
enabled: true
disk:
filesystem:
metrics:
system.filesystem.utilization:
enabled: true
load:
memory:
network:
processes:

prometheus:
config:
scrape_configs:
- job_name: 'otelcol'
scrape_interval: 10s
static_configs:
- targets: ['0.0.0.0:8888']

filelog:
include_file_path: true
poll_interval: 500ms
include:
- /var/log/*/app.log
operators:
- type: json_parser
# If your logs are not json, please make sure that you are using the regex_parser to parse out the timestamp into attributes.time.
# - type: regex_parser
# id: parser-timestamp
# ...
# Layout needs to match log timestamp format. If this section is removed, timestamp will correspond to time log was intake by DD.
- type: time_parser
parse_from: attributes.time
layout: '%Y-%m-%dT%H:%M:%S%z'


processors:
batch:
send_batch_max_size: 1000
send_batch_size: 100
timeout: 10s
memory_limiter:
check_interval: 1s
limit_mib: 4000
spike_limit_mib: 800
resourcedetection:
detectors: [env, ec2, system]
system:
resource_attributes:
os.description:
enabled: true
host.arch:
enabled: true
host.cpu.vendor.id:
enabled: true
host.cpu.family:
enabled: true
host.cpu.model.id:
enabled: true
host.cpu.model.name:
enabled: true
host.cpu.stepping:
enabled: true
host.cpu.cache.l2.size:
enabled: true

exporters:
datadog:
api:
site: ${env:DD_SITE}
key: ${env:DD_API_KEY}
traces:
trace_buffer: 500

service:
pipelines:
metrics:
receivers: [otlp, hostmetrics]
processors: [batch, resourcedetection, memory_limiter]
exporters: [datadog]
traces:
receivers: [otlp]
processors: [batch, resourcedetection, memory_limiter]
exporters: [datadog]
logs:
receivers: [otlp, filelog]
processors: [batch, resourcedetection, memory_limiter]
exporters: [datadog]
Loading

0 comments on commit f99e52d

Please sign in to comment.