Skip to content

Commit

Permalink
transform: epoch nanosecond number to timestamp
Browse files Browse the repository at this point in the history
  • Loading branch information
nikhilsinhaparseable committed Jan 3, 2025
1 parent dd64c83 commit a7b690a
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 16 deletions.
11 changes: 8 additions & 3 deletions src/otel/logs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use serde_json::Value;
use std::collections::BTreeMap;

use super::otel_utils::collect_json_from_values;
use super::otel_utils::convert_epoch_nano_to_timestamp;
use super::otel_utils::insert_attributes;

/// otel log event has severity number
Expand Down Expand Up @@ -51,11 +52,15 @@ pub fn flatten_log_record(log_record: &LogRecord) -> BTreeMap<String, Value> {
let mut log_record_json: BTreeMap<String, Value> = BTreeMap::new();
log_record_json.insert(
"time_unix_nano".to_string(),
Value::Number(log_record.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
log_record.time_unix_nano as i64,
)),
);
log_record_json.insert(
"observable_time_unix_nano".to_string(),
Value::Number(log_record.observed_time_unix_nano.into()),
"observed_time_unix_nano".to_string(),
Value::String(convert_epoch_nano_to_timestamp(
log_record.observed_time_unix_nano as i64,
)),
);

log_record_json.extend(flatten_severity(log_record.severity_number));
Expand Down
40 changes: 30 additions & 10 deletions src/otel/metrics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ use opentelemetry_proto::tonic::metrics::v1::{
};
use serde_json::Value;

use super::otel_utils::{insert_attributes, insert_number_if_some};
use super::otel_utils::{
convert_epoch_nano_to_timestamp, insert_attributes, insert_number_if_some,
};

/// otel metrics event has json array for exemplar
/// this function flatten the exemplar json array
Expand All @@ -37,7 +39,9 @@ fn flatten_exemplar(exemplars: &[Exemplar]) -> BTreeMap<String, Value> {
insert_attributes(&mut exemplar_json, &exemplar.filtered_attributes);
exemplar_json.insert(
"exemplar_time_unix_nano".to_string(),
Value::Number(exemplar.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
exemplar.time_unix_nano as i64,
)),
);
exemplar_json.insert(
"exemplar_span_id".to_string(),
Expand Down Expand Up @@ -79,11 +83,15 @@ fn flatten_number_data_points(data_points: &[NumberDataPoint]) -> Vec<BTreeMap<S
insert_attributes(&mut data_point_json, &data_point.attributes);
data_point_json.insert(
"start_time_unix_nano".to_string(),
Value::Number(data_point.start_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.start_time_unix_nano as i64,
)),
);
data_point_json.insert(
"time_unix_nano".to_string(),
Value::Number(data_point.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.time_unix_nano as i64,
)),
);
let exemplar_json = flatten_exemplar(&data_point.exemplars);
for (key, value) in exemplar_json {
Expand Down Expand Up @@ -167,11 +175,15 @@ fn flatten_histogram(histogram: &Histogram) -> Vec<BTreeMap<String, Value>> {
insert_attributes(&mut data_point_json, &data_point.attributes);
data_point_json.insert(
"histogram_start_time_unix_nano".to_string(),
Value::Number(data_point.start_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.start_time_unix_nano as i64,
)),
);
data_point_json.insert(
"histogram_time_unix_nano".to_string(),
Value::Number(data_point.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.time_unix_nano as i64,
)),
);
data_point_json.insert(
"histogram_data_point_count".to_string(),
Expand Down Expand Up @@ -242,11 +254,15 @@ fn flatten_exp_histogram(exp_histogram: &ExponentialHistogram) -> Vec<BTreeMap<S
insert_attributes(&mut data_point_json, &data_point.attributes);
data_point_json.insert(
"exponential_histogram_start_time_unix_nano".to_string(),
Value::Number(data_point.start_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.start_time_unix_nano as i64,
)),
);
data_point_json.insert(
"exponential_histogram_time_unix_nano".to_string(),
Value::Number(data_point.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.time_unix_nano as i64,
)),
);
data_point_json.insert(
"exponential_histogram_data_point_count".to_string(),
Expand Down Expand Up @@ -306,11 +322,15 @@ fn flatten_summary(summary: &Summary) -> Vec<BTreeMap<String, Value>> {
insert_attributes(&mut data_point_json, &data_point.attributes);
data_point_json.insert(
"summary_start_time_unix_nano".to_string(),
Value::Number(data_point.start_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.start_time_unix_nano as i64,
)),
);
data_point_json.insert(
"summary_time_unix_nano".to_string(),
Value::Number(data_point.time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
data_point.time_unix_nano as i64,
)),
);
data_point_json.insert(
"summary_data_point_count".to_string(),
Expand Down
6 changes: 6 additions & 0 deletions src/otel/otel_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
*
*/

use chrono::DateTime;
use opentelemetry_proto::tonic::common::v1::{any_value::Value as OtelValue, AnyValue, KeyValue};
use serde_json::Value;
use std::collections::BTreeMap;
Expand Down Expand Up @@ -154,3 +155,8 @@ pub fn insert_attributes(map: &mut BTreeMap<String, Value>, attributes: &Vec<Key
map.insert(key, value);
}
}

pub fn convert_epoch_nano_to_timestamp(epoch_ns: i64) -> String {
let dt = DateTime::from_timestamp_nanos(epoch_ns).naive_utc();
dt.format("%Y-%m-%dT%H:%M:%S%.6fZ").to_string()
}
13 changes: 10 additions & 3 deletions src/otel/traces.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ use opentelemetry_proto::tonic::trace::v1::TracesData;
use serde_json::Value;
use std::collections::BTreeMap;

use super::otel_utils::convert_epoch_nano_to_timestamp;
use super::otel_utils::insert_attributes;

/// this function flattens the `ScopeSpans` object
Expand Down Expand Up @@ -116,7 +117,9 @@ fn flatten_events(events: &[Event]) -> Vec<BTreeMap<String, Value>> {
let mut event_json = BTreeMap::new();
event_json.insert(
"event_time_unix_nano".to_string(),
Value::Number(event.time_unix_nano.into()),
Value::String(
convert_epoch_nano_to_timestamp(event.time_unix_nano as i64).to_string(),
),
);
event_json.insert("event_name".to_string(), Value::String(event.name.clone()));
insert_attributes(&mut event_json, &event.attributes);
Expand Down Expand Up @@ -261,11 +264,15 @@ fn flatten_span_record(span_record: &Span) -> Vec<BTreeMap<String, Value>> {
span_record_json.extend(flatten_kind(span_record.kind));
span_record_json.insert(
"span_start_time_unix_nano".to_string(),
Value::Number(span_record.start_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
span_record.start_time_unix_nano as i64,
)),
);
span_record_json.insert(
"span_end_time_unix_nano".to_string(),
Value::Number(span_record.end_time_unix_nano.into()),
Value::String(convert_epoch_nano_to_timestamp(
span_record.end_time_unix_nano as i64,
)),
);
insert_attributes(&mut span_record_json, &span_record.attributes);
span_record_json.insert(
Expand Down

0 comments on commit a7b690a

Please sign in to comment.