Skip to content

Commit

Permalink
[processor/metricstransformprocessor] Support count aggregation type (
Browse files Browse the repository at this point in the history
open-telemetry#32935)

**Description:** Implements
open-telemetry#24978.
  • Loading branch information
lopes-felipe authored Jun 17, 2024
1 parent 3ee48c4 commit 73e67ba
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 3 deletions.
27 changes: 27 additions & 0 deletions .chloggen/count-aggregation-type.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: metricstransformprocessor

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: "Adds the 'count' aggregation type to the Metrics Transform Processor."

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [24978]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
4 changes: 2 additions & 2 deletions processor/metricstransformprocessor/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ processors:
# new_name specifies the updated name of the metric; if action is insert or combine, new_name is required
new_name: <new_metric_name_inserted>
# aggregation_type defines how combined data points will be aggregated; if action is combine, aggregation_type is required
aggregation_type: {sum, mean, min, max}
aggregation_type: {sum, mean, min, max, count}
# submatch_case specifies the case that should be used when adding label values based on regexp submatches when performing a combine action; leave blank to use the submatch value as is
submatch_case: {lower, upper}
# operations contain a list of operations that will be performed on the resulting metric(s)
Expand All @@ -106,7 +106,7 @@ processors:
# label_set contains a list of labels that will remain after aggregation; if action is aggregate_labels, label_set is required
label_set: [labels...]
# aggregation_type defines how data points will be aggregated; if action is aggregate_labels or aggregate_label_values, aggregation_type is required
aggregation_type: {sum, mean, min, max}
aggregation_type: {sum, mean, min, max, count}
# experimental_scale specifies the scalar to apply to values
experimental_scale: <scalar>
# value_actions contain a list of operations that will be performed on the selected label
Expand Down
5 changes: 4 additions & 1 deletion processor/metricstransformprocessor/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -231,9 +231,12 @@ const (

// max indicates taking the max of the aggregated data.
max aggregationType = "max"

// count indicates taking the count of the aggregated data.
count aggregationType = "count"
)

var aggregationTypes = []aggregationType{sum, mean, min, max}
var aggregationTypes = []aggregationType{sum, mean, min, max, count}

func (at aggregationType) isValid() bool {
for _, aggregationType := range aggregationTypes {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,35 @@ var (
addIntDatapoint(1, 2, 3, "label1-value1").build(),
},
},
{
name: "metric_label_aggregation_count_int_update",
transforms: []internalTransform{
{
MetricIncludeFilter: internalFilterStrict{include: "metric1"},
Action: Update,
Operations: []internalOperation{
{
configOperation: Operation{
Action: aggregateLabels,
AggregationType: count,
LabelSet: []string{"label1"},
},
labelSetMap: map[string]bool{"label1": true},
},
},
},
},
in: []pmetric.Metric{
metricBuilder(pmetric.MetricTypeGauge, "metric1", "label1", "label2").
addIntDatapoint(1, 2, 1, "label1-value1", "label2-value1").
addIntDatapoint(1, 2, 4, "label1-value1", "label2-value2").
addIntDatapoint(1, 2, 2, "label1-value1", "label2-value2").build(),
},
out: []pmetric.Metric{
metricBuilder(pmetric.MetricTypeGauge, "metric1", "label1").
addIntDatapoint(1, 2, 3, "label1-value1").build(),
},
},
{
name: "metric_label_aggregation_min_int_update",
transforms: []internalTransform{
Expand Down Expand Up @@ -411,6 +440,34 @@ var (
addDoubleDatapoint(1, 2, 3, "label1-value1").build(),
},
},
{
name: "metric_label_aggregation_count_double_update",
transforms: []internalTransform{
{
MetricIncludeFilter: internalFilterStrict{include: "metric1"},
Action: Update,
Operations: []internalOperation{
{
configOperation: Operation{
Action: aggregateLabels,
AggregationType: count,
LabelSet: []string{"label1"},
},
labelSetMap: map[string]bool{"label1": true},
},
},
},
},
in: []pmetric.Metric{
metricBuilder(pmetric.MetricTypeGauge, "metric1", "label1", "label2").
addDoubleDatapoint(1, 2, 3, "label1-value1", "label2-value1").
addDoubleDatapoint(1, 2, 1, "label1-value1", "label2-value2").build(),
},
out: []pmetric.Metric{
metricBuilder(pmetric.MetricTypeGauge, "metric1", "label1").
addDoubleDatapoint(1, 2, 2, "label1-value1").build(),
},
},
{
name: "metric_label_aggregation_min_double_update",
transforms: []internalTransform{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,8 @@ func mergeNumberDataPoints(dpsMap map[string]pmetric.NumberDataPointSlice, agg a
dp.SetDoubleValue(math.Max(dp.DoubleValue(), doubleVal(dps.At(i))))
case min:
dp.SetDoubleValue(math.Min(dp.DoubleValue(), doubleVal(dps.At(i))))
case count:
dp.SetDoubleValue(float64(dps.Len()))
}
if dps.At(i).StartTimestamp() < dp.StartTimestamp() {
dp.SetStartTimestamp(dps.At(i).StartTimestamp())
Expand All @@ -188,6 +190,8 @@ func mergeNumberDataPoints(dpsMap map[string]pmetric.NumberDataPointSlice, agg a
if dp.IntValue() > intVal(dps.At(i)) {
dp.SetIntValue(intVal(dps.At(i)))
}
case count:
dp.SetIntValue(int64(dps.Len()))
}
if dps.At(i).StartTimestamp() < dp.StartTimestamp() {
dp.SetStartTimestamp(dps.At(i).StartTimestamp())
Expand Down

0 comments on commit 73e67ba

Please sign in to comment.