diff --git a/Cargo.toml b/Cargo.toml index bf0200d8..3f8d68e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,10 +12,10 @@ resolver = "2" debug = 1 [workspace.dependencies] -opentelemetry = "0.24" -opentelemetry-appender-tracing = "0.5" -opentelemetry-http = "0.13" -opentelemetry-proto = { version = "0.7", default-features = false } -opentelemetry_sdk = { version = "0.24", default-features = false } -opentelemetry-stdout = "0.5" -opentelemetry-semantic-conventions = "0.16" +opentelemetry = "0.25" +opentelemetry-appender-tracing = "0.25" +opentelemetry-http = "0.25" +opentelemetry-proto = { version = "0.25", default-features = false } +opentelemetry_sdk = { version = "0.25", default-features = false } +opentelemetry-stdout = "0.25" +opentelemetry-semantic-conventions = "0.25" diff --git a/opentelemetry-aws/CHANGELOG.md b/opentelemetry-aws/CHANGELOG.md index 72a2483d..478a0bfa 100644 --- a/opentelemetry-aws/CHANGELOG.md +++ b/opentelemetry-aws/CHANGELOG.md @@ -1,12 +1,16 @@ # Changelog -## vNext +## v0.13.0 ### Added - `LambdaResourceDetector` has been added to the crate to detect AWS Lambda attributes. To enable it in your code, use the feature `detector-aws-lambda`. -### v0.12.0 +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25.0 + +## v0.12.0 - Bump opentelemetry and opentelemetry_sdk versions to 0.24.0 - Update hyper to 1.4.1 diff --git a/opentelemetry-aws/Cargo.toml b/opentelemetry-aws/Cargo.toml index 3db4863d..03558e11 100644 --- a/opentelemetry-aws/Cargo.toml +++ b/opentelemetry-aws/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-aws" -version = "0.12.0" +version = "0.13.0" description = "AWS exporters and propagators for OpenTelemetry" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-aws" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-aws" diff --git a/opentelemetry-contrib/CHANGELOG.md b/opentelemetry-contrib/CHANGELOG.md index ea5bc45f..5dfdf3de 100644 --- a/opentelemetry-contrib/CHANGELOG.md +++ b/opentelemetry-contrib/CHANGELOG.md @@ -2,6 +2,13 @@ ## vNext +## v0.17.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25 +- Bump opentelemetry-semantic-conventions version to 0.25 + ## v0.16.0 ### Changed diff --git a/opentelemetry-contrib/Cargo.toml b/opentelemetry-contrib/Cargo.toml index f9478ee4..6e2727be 100644 --- a/opentelemetry-contrib/Cargo.toml +++ b/opentelemetry-contrib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-contrib" -version = "0.16.0" +version = "0.17.0" description = "Rust contrib repo for OpenTelemetry" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-contrib" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-contrib" diff --git a/opentelemetry-datadog/CHANGELOG.md b/opentelemetry-datadog/CHANGELOG.md index a10ee726..20b5c75e 100644 --- a/opentelemetry-datadog/CHANGELOG.md +++ b/opentelemetry-datadog/CHANGELOG.md @@ -2,6 +2,12 @@ ## vNext +## v0.13.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk version to 0.25 + ## v0.12.0 ### Changed diff --git a/opentelemetry-datadog/Cargo.toml b/opentelemetry-datadog/Cargo.toml index 0f7e51d2..dce1865e 100644 --- a/opentelemetry-datadog/Cargo.toml +++ b/opentelemetry-datadog/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-datadog" -version = "0.12.0" +version = "0.13.0" description = "Datadog exporters and propagators for OpenTelemetry" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-datadog" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-datadog" diff --git a/opentelemetry-etw-logs/CHANGELOG.md b/opentelemetry-etw-logs/CHANGELOG.md index 4c641c8f..51e03074 100644 --- a/opentelemetry-etw-logs/CHANGELOG.md +++ b/opentelemetry-etw-logs/CHANGELOG.md @@ -2,6 +2,12 @@ ## vNext +## v0.4.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25 + ## v0.3.0 ### Changed diff --git a/opentelemetry-etw-logs/Cargo.toml b/opentelemetry-etw-logs/Cargo.toml index 066875bd..fc868914 100644 --- a/opentelemetry-etw-logs/Cargo.toml +++ b/opentelemetry-etw-logs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "opentelemetry-etw-logs" description = "OpenTelemetry logs exporter to ETW (Event Tracing for Windows)" -version = "0.3.0" +version = "0.4.0" edition = "2021" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-etw-logs" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-etw-logs" diff --git a/opentelemetry-etw-logs/src/logs/converters.rs b/opentelemetry-etw-logs/src/logs/converters.rs index 214227d9..43354e33 100644 --- a/opentelemetry-etw-logs/src/logs/converters.rs +++ b/opentelemetry-etw-logs/src/logs/converters.rs @@ -44,7 +44,7 @@ mod tests { #[test] fn test_convert_vec_of_any_value_to_string() { - let vec = vec![ + let vec = [ AnyValue::Int(1), AnyValue::Int(2), AnyValue::Int(3), @@ -57,17 +57,17 @@ mod tests { let result = [].as_json_value(); assert_eq!(result, json!([])); - let array = [AnyValue::ListAny(vec![ + let array = [AnyValue::ListAny(Box::new(vec![ AnyValue::Int(1), AnyValue::Int(2), AnyValue::Int(3), - ])]; + ]))]; let result = array.as_json_value(); assert_eq!(result, json!([[1, 2, 3]])); let array = [ - AnyValue::ListAny(vec![AnyValue::Int(1), AnyValue::Int(2)]), - AnyValue::ListAny(vec![AnyValue::Int(3), AnyValue::Int(4)]), + AnyValue::ListAny(Box::new(vec![AnyValue::Int(1), AnyValue::Int(2)])), + AnyValue::ListAny(Box::new(vec![AnyValue::Int(3), AnyValue::Int(4)])), ]; let result = array.as_json_value(); assert_eq!(result, json!([[1, 2], [3, 4]])); @@ -105,8 +105,8 @@ mod tests { #[should_panic] fn test_convert_bytes_panics() { let array = [ - AnyValue::Bytes(vec![97u8, 98u8, 99u8]), - AnyValue::Bytes(vec![]), + AnyValue::Bytes(Box::new(vec![97u8, 98u8, 99u8])), + AnyValue::Bytes(Box::default()), ]; let result = array.as_json_value(); assert_eq!(result, json!(["abc", ""])); @@ -134,7 +134,7 @@ mod tests { let mut map = HashMap::new(); map.insert(Key::new("d"), AnyValue::Int(4)); map.insert(Key::new("e"), AnyValue::Int(5)); - map.insert(Key::new("f"), AnyValue::Map(inner_map)); + map.insert(Key::new("f"), AnyValue::Map(Box::new(inner_map))); let result = map.as_json_value(); assert_eq!(result, json!({"d":4,"e":5,"f":{"a":1,"b":2,"c":3}})); @@ -183,24 +183,30 @@ mod tests { let empty_vec = vec![]; let mut complex_map = HashMap::new(); - complex_map.insert(Key::new("a"), AnyValue::Map(simple_map.clone())); - complex_map.insert(Key::new("b"), AnyValue::Map(empty_map.clone())); - complex_map.insert(Key::new("c"), AnyValue::ListAny(simple_vec.clone())); - complex_map.insert(Key::new("d"), AnyValue::ListAny(empty_vec.clone())); + complex_map.insert(Key::new("a"), AnyValue::Map(Box::new(simple_map.clone()))); + complex_map.insert(Key::new("b"), AnyValue::Map(Box::new(empty_map.clone()))); + complex_map.insert( + Key::new("c"), + AnyValue::ListAny(Box::new(simple_vec.clone())), + ); + complex_map.insert( + Key::new("d"), + AnyValue::ListAny(Box::new(empty_vec.clone())), + ); let result = complex_map.as_json_value(); assert_eq!(result, json!({"a":{"a":1,"b":2},"b":{},"c":[1,2],"d":[]})); - let complex_vec = vec![ - AnyValue::Map(simple_map.clone()), - AnyValue::Map(empty_map.clone()), - AnyValue::ListAny(simple_vec.clone()), - AnyValue::ListAny(empty_vec.clone()), + let complex_vec = [ + AnyValue::Map(Box::new(simple_map.clone())), + AnyValue::Map(Box::new(empty_map.clone())), + AnyValue::ListAny(Box::new(simple_vec.clone())), + AnyValue::ListAny(Box::new(empty_vec.clone())), ]; let result = complex_vec.as_json_value(); assert_eq!(result, json!([{"a":1,"b":2},{},[1,2],[]])); let mut nested_complex_map = HashMap::new(); - nested_complex_map.insert(Key::new("a"), AnyValue::Map(complex_map.clone())); + nested_complex_map.insert(Key::new("a"), AnyValue::Map(Box::new(complex_map.clone()))); let result = nested_complex_map.as_json_value(); assert_eq!( result, diff --git a/opentelemetry-etw-logs/src/logs/exporter.rs b/opentelemetry-etw-logs/src/logs/exporter.rs index ab14a052..e07fbd20 100644 --- a/opentelemetry-etw-logs/src/logs/exporter.rs +++ b/opentelemetry-etw-logs/src/logs/exporter.rs @@ -173,14 +173,14 @@ impl ETWExporter { pub(crate) fn export_log_data( &self, - log_data: &opentelemetry_sdk::export::logs::LogData, + log_record: &opentelemetry_sdk::logs::LogRecord, + instrumentation: &opentelemetry::InstrumentationLibrary, ) -> opentelemetry_sdk::export::logs::ExportResult { - let level = - self.get_severity_level(log_data.record.severity_number.unwrap_or(Severity::Debug)); + let level = self.get_severity_level(log_record.severity_number.unwrap_or(Severity::Debug)); let keyword = match self .exporter_config - .get_log_keyword_or_default(log_data.instrumentation.name.as_ref()) + .get_log_keyword_or_default(instrumentation.name.as_ref()) { Some(keyword) => keyword, _ => return Ok(()), @@ -199,11 +199,11 @@ impl ETWExporter { event.add_u16("__csver__", 0x0401u16, tld::OutType::Hex, field_tag); - self.populate_part_a(&mut event, log_data, field_tag); + self.populate_part_a(&mut event, log_record, field_tag); - let (event_id, event_name) = self.populate_part_c(&mut event, log_data, field_tag); + let (event_id, event_name) = self.populate_part_c(&mut event, log_record, field_tag); - self.populate_part_b(&mut event, log_data, level, event_id, event_name); + self.populate_part_b(&mut event, log_record, level, event_id, event_name); // Write event to ETW let result = event.write(&self.provider, None, None); @@ -217,13 +217,12 @@ impl ETWExporter { fn populate_part_a( &self, event: &mut tld::EventBuilder, - log_data: &opentelemetry_sdk::export::logs::LogData, + log_record: &opentelemetry_sdk::logs::LogRecord, field_tag: u32, ) { - let event_time: SystemTime = log_data - .record + let event_time: SystemTime = log_record .timestamp - .or(log_data.record.observed_timestamp) + .or(log_record.observed_timestamp) .unwrap_or_else(SystemTime::now); const COUNT_TIME: u8 = 1u8; @@ -238,7 +237,7 @@ impl ETWExporter { fn populate_part_b( &self, event: &mut tld::EventBuilder, - log_data: &opentelemetry_sdk::export::logs::LogData, + log_record: &opentelemetry_sdk::logs::LogRecord, level: tld::Level, event_id: Option, event_name: Option<&str>, @@ -249,8 +248,8 @@ impl ETWExporter { let field_count = COUNT_TYPE_NAME + COUNT_SEVERITY_NUMBER - + log_data.record.body.is_some() as u8 - + log_data.record.severity_text.is_some() as u8 + + log_record.body.is_some() as u8 + + log_record.severity_text.is_some() as u8 + event_id.is_some() as u8 + event_name.is_some() as u8; @@ -260,19 +259,14 @@ impl ETWExporter { // Fill fields of PartB struct event.add_str8("_typeName", "Logs", tld::OutType::Default, 0); - if let Some(body) = log_data.record.body.clone() { + if let Some(body) = log_record.body.clone() { add_attribute_to_event(event, &Key::new("body"), &body); } event.add_u8("severityNumber", level.as_int(), tld::OutType::Default, 0); - if let Some(severity_text) = &log_data.record.severity_text { - event.add_str8( - "severityText", - severity_text.as_ref(), - tld::OutType::Default, - 0, - ); + if let Some(severity_text) = &log_record.severity_text { + event.add_str8("severityText", severity_text, tld::OutType::Default, 0); } if let Some(event_id) = event_id { @@ -287,55 +281,52 @@ impl ETWExporter { fn populate_part_c<'a>( &'a self, event: &mut tld::EventBuilder, - log_data: &'a opentelemetry_sdk::export::logs::LogData, + log_record: &'a opentelemetry_sdk::logs::LogRecord, field_tag: u32, ) -> (Option, Option<&str>) { //populate CS PartC let mut event_id: Option = None; let mut event_name: Option<&str> = None; - if let Some(attr_list) = &log_data.record.attributes { - let mut cs_c_count = 0; - + let mut cs_c_count = 0; + for (key, value) in log_record.attributes_iter() { // find if we have PartC and its information - for (key, value) in attr_list.iter() { - match (key.as_str(), &value) { - (EVENT_ID, AnyValue::Int(value)) => { - event_id = Some(*value); - continue; - } - (EVENT_NAME_PRIMARY, AnyValue::String(value)) => { + match (key.as_str(), &value) { + (EVENT_ID, AnyValue::Int(value)) => { + event_id = Some(*value); + continue; + } + (EVENT_NAME_PRIMARY, AnyValue::String(value)) => { + event_name = Some(value.as_str()); + continue; + } + (EVENT_NAME_SECONDARY, AnyValue::String(value)) => { + if event_name.is_none() { event_name = Some(value.as_str()); - continue; - } - (EVENT_NAME_SECONDARY, AnyValue::String(value)) => { - if event_name.is_none() { - event_name = Some(value.as_str()); - } - continue; - } - _ => { - cs_c_count += 1; } + continue; + } + _ => { + cs_c_count += 1; } } + } + + // If there are additional PartC attributes, add them to the event + if cs_c_count > 0 { + event.add_struct("PartC", cs_c_count, field_tag); - if cs_c_count > 0 { - event.add_struct("PartC", cs_c_count, field_tag); - - for (key, value) in attr_list.iter() { - match (key.as_str(), &value) { - (EVENT_ID, _) | (EVENT_NAME_PRIMARY, _) | (EVENT_NAME_SECONDARY, _) => { - continue; - } - _ => { - add_attribute_to_event(event, key, value); - } + for (key, value) in log_record.attributes_iter() { + match (key.as_str(), &value) { + (EVENT_ID, _) | (EVENT_NAME_PRIMARY, _) | (EVENT_NAME_SECONDARY, _) => { + continue; + } + _ => { + add_attribute_to_event(event, key, value); } } } } - (event_id, event_name) } } @@ -348,12 +339,12 @@ impl Debug for ETWExporter { #[async_trait] impl opentelemetry_sdk::export::logs::LogExporter for ETWExporter { - async fn export<'a>( + async fn export( &mut self, - batch: Vec>, + batch: opentelemetry_sdk::export::logs::LogBatch<'_>, ) -> opentelemetry::logs::LogResult<()> { - for log_data in batch { - let _ = self.export_log_data(&log_data); + for (log_record, instrumentation) in batch.iter() { + let _ = self.export_log_data(log_record, instrumentation); } Ok(()) } @@ -417,7 +408,6 @@ fn add_attribute_to_event(event: &mut tld::EventBuilder, key: &Key, value: &AnyV mod tests { use super::*; use opentelemetry::logs::Severity; - use opentelemetry_sdk::export::logs::LogData; #[test] fn test_export_log_data() { @@ -427,13 +417,10 @@ mod tests { None, ExporterConfig::default(), ); + let record = Default::default(); + let instrumentation = Default::default(); - let log_data = LogData { - instrumentation: Default::default(), - record: Default::default(), - }; - - let result = exporter.export_log_data(&log_data); + let result = exporter.export_log_data(&record, &instrumentation); assert!(result.is_ok()); } diff --git a/opentelemetry-etw-logs/src/logs/reentrant_logprocessor.rs b/opentelemetry-etw-logs/src/logs/reentrant_logprocessor.rs index 95627245..7057a717 100644 --- a/opentelemetry-etw-logs/src/logs/reentrant_logprocessor.rs +++ b/opentelemetry-etw-logs/src/logs/reentrant_logprocessor.rs @@ -1,7 +1,8 @@ use std::fmt::Debug; use opentelemetry::logs::LogResult; -use opentelemetry_sdk::export::logs::LogData; +use opentelemetry::InstrumentationLibrary; +use opentelemetry_sdk::logs::LogRecord; #[cfg(feature = "logs_level_enabled")] use opentelemetry_sdk::export::logs::LogExporter; @@ -32,8 +33,8 @@ impl ReentrantLogProcessor { } impl opentelemetry_sdk::logs::LogProcessor for ReentrantLogProcessor { - fn emit(&self, data: &mut LogData) { - _ = self.event_exporter.export_log_data(data); + fn emit(&self, data: &mut LogRecord, instrumentation: &InstrumentationLibrary) { + _ = self.event_exporter.export_log_data(data, instrumentation); } // This is a no-op as this processor doesn't keep anything @@ -97,11 +98,8 @@ mod tests { ExporterConfig::default(), ); - let mut log_data = LogData { - instrumentation: Default::default(), - record: Default::default(), - }; - - processor.emit(&mut log_data); + let mut record = Default::default(); + let instrumentation = Default::default(); + processor.emit(&mut record, &instrumentation); } } diff --git a/opentelemetry-etw-metrics/src/exporter/mod.rs b/opentelemetry-etw-metrics/src/exporter/mod.rs index 84083d91..14d8bc59 100644 --- a/opentelemetry-etw-metrics/src/exporter/mod.rs +++ b/opentelemetry-etw-metrics/src/exporter/mod.rs @@ -4,7 +4,10 @@ use opentelemetry::{ }; use opentelemetry_proto::tonic::collector::metrics::v1::ExportMetricsServiceRequest; use opentelemetry_sdk::metrics::{ - data::{ResourceMetrics, Temporality}, + data::{ + self, ExponentialBucket, ExponentialHistogramDataPoint, Metric, ResourceMetrics, + ScopeMetrics, Temporality, + }, exporter::PushMetricsExporter, reader::{AggregationSelector, DefaultAggregationSelector, TemporalitySelector}, Aggregation, InstrumentKind, @@ -63,32 +66,278 @@ impl Debug for MetricsExporter { #[async_trait] impl PushMetricsExporter for MetricsExporter { async fn export(&self, metrics: &mut ResourceMetrics) -> Result<()> { - let proto_message: ExportMetricsServiceRequest = (&*metrics).into(); - - let mut byte_array = Vec::new(); - proto_message - .encode(&mut byte_array) - .map_err(|err| MetricsError::Other(err.to_string()))?; - - if (byte_array.len()) > etw::MAX_EVENT_SIZE { - global::handle_error(MetricsError::Other(format!( - "Exporting failed due to event size {} exceeding the maximum size of {} bytes", - byte_array.len(), - etw::MAX_EVENT_SIZE - ))); - } else { - let result = etw::write(&byte_array); - // TODO: Better logging/internal metrics needed here for non-failure - // case Uncomment the line below to see the exported bytes until a - // better logging solution is implemented - // println!("Exported {} bytes to ETW", byte_array.len()); - if result != 0 { - global::handle_error(MetricsError::Other(format!( - "Failed to write ETW event with error code: {}", - result - ))); + for scope_metric in &metrics.scope_metrics { + for metric in &scope_metric.metrics { + let mut resource_metrics = Vec::new(); + + let data = &metric.data.as_any(); + if let Some(hist) = data.downcast_ref::>() { + for data_point in &hist.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Histogram { + temporality: hist.temporality, + data_points: vec![data_point.clone()], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(hist) = data.downcast_ref::>() { + for data_point in &hist.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Histogram { + temporality: hist.temporality, + data_points: vec![data_point.clone()], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(hist) = data.downcast_ref::>() { + for data_point in &hist.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::ExponentialHistogram { + temporality: hist.temporality, + data_points: vec![ExponentialHistogramDataPoint { + attributes: data_point.attributes.clone(), + count: data_point.count, + start_time: data_point.start_time, + time: data_point.time, + min: data_point.min, + max: data_point.max, + sum: data_point.sum, + scale: data_point.scale, + zero_count: data_point.zero_count, + zero_threshold: data_point.zero_threshold, + positive_bucket: ExponentialBucket { + offset: data_point.positive_bucket.offset, + counts: data_point.positive_bucket.counts.clone(), + }, + negative_bucket: ExponentialBucket { + offset: data_point.negative_bucket.offset, + counts: data_point.negative_bucket.counts.clone(), + }, + exemplars: data_point.exemplars.clone(), + }], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(hist) = data.downcast_ref::>() { + for data_point in &hist.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::ExponentialHistogram { + temporality: hist.temporality, + data_points: vec![ExponentialHistogramDataPoint { + attributes: data_point.attributes.clone(), + count: data_point.count, + start_time: data_point.start_time, + time: data_point.time, + min: data_point.min, + max: data_point.max, + sum: data_point.sum, + scale: data_point.scale, + zero_count: data_point.zero_count, + zero_threshold: data_point.zero_threshold, + positive_bucket: ExponentialBucket { + offset: data_point.positive_bucket.offset, + counts: data_point.positive_bucket.counts.clone(), + }, + negative_bucket: ExponentialBucket { + offset: data_point.negative_bucket.offset, + counts: data_point.negative_bucket.counts.clone(), + }, + exemplars: data_point.exemplars.clone(), + }], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(sum) = data.downcast_ref::>() { + for data_point in &sum.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Sum { + temporality: sum.temporality, + data_points: vec![data_point.clone()], + is_monotonic: sum.is_monotonic, + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(sum) = data.downcast_ref::>() { + for data_point in &sum.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Sum { + temporality: sum.temporality, + data_points: vec![data_point.clone()], + is_monotonic: sum.is_monotonic, + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(sum) = data.downcast_ref::>() { + for data_point in &sum.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Sum { + temporality: sum.temporality, + data_points: vec![data_point.clone()], + is_monotonic: sum.is_monotonic, + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(gauge) = data.downcast_ref::>() { + for data_point in &gauge.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Gauge { + data_points: vec![data_point.clone()], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(gauge) = data.downcast_ref::>() { + for data_point in &gauge.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Gauge { + data_points: vec![data_point.clone()], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else if let Some(gauge) = data.downcast_ref::>() { + for data_point in &gauge.data_points { + let resource_metric = ResourceMetrics { + resource: metrics.resource.clone(), + scope_metrics: vec![ScopeMetrics { + scope: scope_metric.scope.clone(), + metrics: vec![Metric { + name: metric.name.clone(), + description: metric.description.clone(), + unit: metric.unit.clone(), + data: Box::new(data::Gauge { + data_points: vec![data_point.clone()], + }), + }], + }], + }; + resource_metrics.push(resource_metric); + } + } else { + global::handle_error(MetricsError::Other(format!( + "Unsupported aggregation type: {:?}", + data + ))); + } + + for resource_metric in resource_metrics { + let mut byte_array = Vec::new(); + let proto_message: ExportMetricsServiceRequest = (&resource_metric).into(); + proto_message + .encode(&mut byte_array) + .map_err(|err| MetricsError::Other(err.to_string()))?; + + if (byte_array.len()) > etw::MAX_EVENT_SIZE { + global::handle_error(MetricsError::Other(format!( + "Exporting failed due to event size {} exceeding the maximum size of {} bytes", + byte_array.len(), + etw::MAX_EVENT_SIZE + ))); + } else { + let result = etw::write(&byte_array); + // TODO: Better logging/internal metrics needed here for non-failure + // case Uncomment the line below to see the exported bytes until a + // better logging solution is implemented + // println!("Exported {} bytes to ETW", byte_array.len()); + if result != 0 { + global::handle_error(MetricsError::Other(format!( + "Failed to write ETW event with error code: {}", + result + ))); + } + } + } } } + Ok(()) } @@ -102,3 +351,137 @@ impl PushMetricsExporter for MetricsExporter { Ok(()) } } + +#[cfg(test)] +mod tests { + use opentelemetry::{metrics::MeterProvider as _, KeyValue}; + use opentelemetry_sdk::{ + metrics::{PeriodicReader, SdkMeterProvider}, + runtime, Resource, + }; + + use crate::etw; + + #[tokio::test(flavor = "multi_thread")] + async fn emit_metrics_that_combined_exceed_etw_max_event_size() { + let exporter = super::MetricsExporter::new(); + let reader = PeriodicReader::builder(exporter, runtime::Tokio).build(); + let meter_provider = SdkMeterProvider::builder() + .with_resource(Resource::new(vec![KeyValue::new( + "service.name", + "service-name", + )])) + .with_reader(reader) + .build(); + + let meter = meter_provider.meter("user-event-test"); + + let u64_histogram = meter + .u64_histogram("Testu64Histogram") + .with_description("u64_histogram_test_description") + .with_unit("u64_histogram_test_unit") + .init(); + + let f64_histogram = meter + .f64_histogram("TestHistogram") + .with_description("f64_histogram_test_description") + .with_unit("f64_histogram_test_unit") + .init(); + + let u64_counter = meter + .u64_counter("Testu64Counter") + .with_description("u64_counter_test_description") + .with_unit("u64_counter_test_units") + .init(); + + let f64_counter = meter + .f64_counter("Testf64Counter") + .with_description("f64_counter_test_description") + .with_unit("f64_counter_test_units") + .init(); + + let i64_counter = meter + .i64_up_down_counter("Testi64Counter") + .with_description("i64_counter_test_description") + .with_unit("i64_counter_test_units") + .init(); + + let u64_gauge = meter + .u64_gauge("Testu64Gauge") + .with_description("u64_gauge_test_description") + .with_unit("u64_gauge_test_unit") + .init(); + + let i64_gauge = meter + .i64_gauge("Testi64Gauge") + .with_description("i64_gauge_test_description") + .with_unit("i64_gauge_test_unit") + .init(); + + let f64_gauge = meter + .f64_gauge("Testf64Gauge") + .with_description("f64_gauge_test_description") + .with_unit("f64_gauge_test_unit") + .init(); + + // Create a key that is 1/10th the size of the MAX_EVENT_SIZE + let key_size = etw::MAX_EVENT_SIZE / 10; + let large_key = "a".repeat(key_size); + + for index in 0..11 { + u64_histogram.record( + 1, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + f64_histogram.record( + 1.0, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + u64_counter.add( + 1, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + f64_counter.add( + 1.0, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + i64_counter.add( + 1, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + u64_gauge.record( + 1, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + i64_gauge.record( + 1, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + + for index in 0..11 { + f64_gauge.record( + 1.0, + [KeyValue::new(large_key.clone(), format!("{index}"))].as_ref(), + ); + } + } +} diff --git a/opentelemetry-resource-detectors/CHANGELOG.md b/opentelemetry-resource-detectors/CHANGELOG.md index 1b4e1c90..82bd9b8b 100644 --- a/opentelemetry-resource-detectors/CHANGELOG.md +++ b/opentelemetry-resource-detectors/CHANGELOG.md @@ -2,6 +2,13 @@ ## vNext +## v0.4.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25 +- Bump opentelemetry-semantic-conventions version to 0.25 + ## v0.3.0 ### Changed diff --git a/opentelemetry-resource-detectors/Cargo.toml b/opentelemetry-resource-detectors/Cargo.toml index afeb2c6f..3580ed62 100644 --- a/opentelemetry-resource-detectors/Cargo.toml +++ b/opentelemetry-resource-detectors/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-resource-detectors" -version = "0.3.0" +version = "0.4.0" edition = "2021" description = "A collection of community supported resource detectors for OpenTelemetry" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-resource-detectors" diff --git a/opentelemetry-stackdriver/tests/generate.rs b/opentelemetry-stackdriver/tests/generate.rs index c138ab41..d32c21a4 100644 --- a/opentelemetry-stackdriver/tests/generate.rs +++ b/opentelemetry-stackdriver/tests/generate.rs @@ -41,7 +41,6 @@ async fn sync_schemas() { /// This test will fail if the code currently in the repository is different from the /// newly generated code, and will update it in place in that case. #[test] -#[ignore] fn generated_code_is_fresh() { // Generate code into a temporary directory. diff --git a/opentelemetry-user-events-logs/CHANGELOG.md b/opentelemetry-user-events-logs/CHANGELOG.md index dccaf162..cd9c975f 100644 --- a/opentelemetry-user-events-logs/CHANGELOG.md +++ b/opentelemetry-user-events-logs/CHANGELOG.md @@ -2,6 +2,12 @@ ## vNext +## v0.6.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25 + ## v0.5.0 - **BREAKING** Decouple Exporter creation with the Reentrant processor [#82](https://github.com/open-telemetry/opentelemetry-rust-contrib/pull/82) diff --git a/opentelemetry-user-events-logs/Cargo.toml b/opentelemetry-user-events-logs/Cargo.toml index 0c745f7f..723d893d 100644 --- a/opentelemetry-user-events-logs/Cargo.toml +++ b/opentelemetry-user-events-logs/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "opentelemetry-user-events-logs" description = "OpenTelemetry-Rust exporter to userevents" -version = "0.5.0" +version = "0.6.0" edition = "2021" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-user-events-logs" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-user-events-logs" diff --git a/opentelemetry-user-events-logs/src/logs/exporter.rs b/opentelemetry-user-events-logs/src/logs/exporter.rs index cb0eafda..75b0c651 100644 --- a/opentelemetry-user-events-logs/src/logs/exporter.rs +++ b/opentelemetry-user-events-logs/src/logs/exporter.rs @@ -106,9 +106,9 @@ impl UserEventsExporter { } } - fn add_attribute_to_event(&self, eb: &mut EventBuilder, attrib: &(Key, AnyValue)) { - let field_name = &attrib.0.to_string(); - match attrib.1.to_owned() { + fn add_attribute_to_event(&self, eb: &mut EventBuilder, (key, value): (&Key, &AnyValue)) { + let field_name = key.as_str(); + match value.to_owned() { AnyValue::Boolean(b) => { eb.add_value(field_name, b, FieldFormat::Boolean, 0); } @@ -166,16 +166,17 @@ impl UserEventsExporter { pub(crate) fn export_log_data( &self, - log_data: &opentelemetry_sdk::export::logs::LogData, + log_record: &opentelemetry_sdk::logs::LogRecord, + instrumentation: &opentelemetry::InstrumentationLibrary, ) -> opentelemetry_sdk::export::logs::ExportResult { let mut level: Level = Level::Invalid; - if log_data.record.severity_number.is_some() { - level = self.get_severity_level(log_data.record.severity_number.unwrap()); + if log_record.severity_number.is_some() { + level = self.get_severity_level(log_record.severity_number.unwrap()); } let keyword = self .exporter_config - .get_log_keyword_or_default(log_data.instrumentation.name.as_ref()); + .get_log_keyword_or_default(instrumentation.name.as_ref()); if keyword.is_none() { return Ok(()); @@ -193,17 +194,16 @@ impl UserEventsExporter { EBW.with(|eb| { let mut eb = eb.borrow_mut(); let event_tags: u32 = 0; // TBD name and event_tag values - eb.reset(log_data.instrumentation.name.as_ref(), event_tags as u16); + eb.reset(instrumentation.name.as_ref(), event_tags as u16); eb.opcode(Opcode::Info); eb.add_value("__csver__", 0x0401u16, FieldFormat::HexInt, 0); // populate CS PartA let mut cs_a_count = 0; - let event_time: SystemTime = log_data - .record + let event_time: SystemTime = log_record .timestamp - .or(log_data.record.observed_timestamp) + .or(log_record.observed_timestamp) .unwrap_or_else(SystemTime::now); cs_a_count += 1; // for event_time eb.add_struct("PartA", cs_a_count, 0); @@ -216,35 +216,33 @@ impl UserEventsExporter { //populate CS PartC let (mut is_event_id, mut event_id) = (false, 0); let (mut is_event_name, mut event_name) = (false, ""); - - if let Some(attr_list) = &log_data.record.attributes { - let (mut is_part_c_present, mut cs_c_bookmark, mut cs_c_count) = (false, 0, 0); - for attrib in attr_list.iter() { - match (attrib.0.as_str(), &attrib.1) { - (EVENT_ID, AnyValue::Int(value)) => { - is_event_id = true; - event_id = *value; - continue; - } - (EVENT_NAME_PRIMARY, AnyValue::String(value)) => { - is_event_name = true; + let (mut is_part_c_present, mut cs_c_bookmark, mut cs_c_count) = (false, 0, 0); + + for (key, value) in log_record.attributes_iter() { + match (key.as_str(), value) { + (EVENT_ID, AnyValue::Int(value)) => { + is_event_id = true; + event_id = *value; + continue; + } + (EVENT_NAME_PRIMARY, AnyValue::String(value)) => { + is_event_name = true; + event_name = value.as_str(); + continue; + } + (EVENT_NAME_SECONDARY, AnyValue::String(value)) => { + if !is_event_name { event_name = value.as_str(); - continue; - } - (EVENT_NAME_SECONDARY, AnyValue::String(value)) => { - if !is_event_name { - event_name = value.as_str(); - } - continue; } - _ => { - if !is_part_c_present { - eb.add_struct_with_bookmark("PartC", 1, 0, &mut cs_c_bookmark); - is_part_c_present = true; - } - self.add_attribute_to_event(&mut eb, attrib); - cs_c_count += 1; + continue; + } + _ => { + if !is_part_c_present { + eb.add_struct_with_bookmark("PartC", 1, 0, &mut cs_c_bookmark); + is_part_c_present = true; } + self.add_attribute_to_event(&mut eb, (key, value)); + cs_c_count += 1; } } @@ -259,10 +257,10 @@ impl UserEventsExporter { eb.add_str("_typeName", "Logs", FieldFormat::Default, 0); cs_b_count += 1; - if log_data.record.body.is_some() { + if log_record.body.is_some() { eb.add_str( "body", - match log_data.record.body.as_ref().unwrap() { + match log_record.body.as_ref().unwrap() { AnyValue::Int(value) => value.to_string(), AnyValue::String(value) => value.to_string(), AnyValue::Boolean(value) => value.to_string(), @@ -280,10 +278,10 @@ impl UserEventsExporter { eb.add_value("severityNumber", level.as_int(), FieldFormat::SignedInt, 0); cs_b_count += 1; } - if log_data.record.severity_text.is_some() { + if log_record.severity_text.is_some() { eb.add_str( "severityText", - log_data.record.severity_text.as_ref().unwrap().as_ref(), + log_record.severity_text.as_ref().unwrap(), FieldFormat::SignedInt, 0, ); @@ -315,12 +313,12 @@ impl Debug for UserEventsExporter { #[async_trait] impl opentelemetry_sdk::export::logs::LogExporter for UserEventsExporter { - async fn export<'a>( + async fn export( &mut self, - batch: Vec>, + batch: opentelemetry_sdk::export::logs::LogBatch<'_>, ) -> opentelemetry::logs::LogResult<()> { - for log_data in batch { - let _ = self.export_log_data(&log_data); + for (record, instrumentation) in batch.iter() { + let _ = self.export_log_data(record, instrumentation); } Ok(()) } diff --git a/opentelemetry-user-events-logs/src/logs/reentrant_logprocessor.rs b/opentelemetry-user-events-logs/src/logs/reentrant_logprocessor.rs index 3e913443..3cada92a 100644 --- a/opentelemetry-user-events-logs/src/logs/reentrant_logprocessor.rs +++ b/opentelemetry-user-events-logs/src/logs/reentrant_logprocessor.rs @@ -1,7 +1,6 @@ use std::fmt::Debug; use opentelemetry::logs::LogResult; -use opentelemetry_sdk::export::logs::LogData; #[cfg(feature = "logs_level_enabled")] use opentelemetry_sdk::export::logs::LogExporter; @@ -27,8 +26,12 @@ impl ReentrantLogProcessor { } impl opentelemetry_sdk::logs::LogProcessor for ReentrantLogProcessor { - fn emit(&self, data: &mut LogData) { - _ = self.event_exporter.export_log_data(data); + fn emit( + &self, + record: &mut opentelemetry_sdk::logs::LogRecord, + instrumentation: &opentelemetry::InstrumentationLibrary, + ) { + _ = self.event_exporter.export_log_data(record, instrumentation); } // This is a no-op as this processor doesn't keep anything diff --git a/opentelemetry-user-events-metrics/CHANGELOG.md b/opentelemetry-user-events-metrics/CHANGELOG.md index 20007c4d..6c3b2e9e 100644 --- a/opentelemetry-user-events-metrics/CHANGELOG.md +++ b/opentelemetry-user-events-metrics/CHANGELOG.md @@ -2,6 +2,13 @@ ## vNext +## v0.6.0 + +### Changed + +- Bump opentelemetry and opentelemetry_sdk versions to 0.25, + opentelemetry-proto version to 0.25. + ## v0.5.0 ### Changed diff --git a/opentelemetry-user-events-metrics/Cargo.toml b/opentelemetry-user-events-metrics/Cargo.toml index cddc0132..7b212595 100644 --- a/opentelemetry-user-events-metrics/Cargo.toml +++ b/opentelemetry-user-events-metrics/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-user-events-metrics" -version = "0.5.0" +version = "0.6.0" description = "OpenTelemetry metrics exporter to user events" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-user-events-metrics" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-user-events-metrics" diff --git a/opentelemetry-user-events-metrics/examples/basic-metrics.rs b/opentelemetry-user-events-metrics/examples/basic-metrics.rs index 232e6581..80eee892 100644 --- a/opentelemetry-user-events-metrics/examples/basic-metrics.rs +++ b/opentelemetry-user-events-metrics/examples/basic-metrics.rs @@ -55,109 +55,97 @@ async fn main() -> Result<(), Box> { .init(); // Create a ObservableGauge instrument and register a callback that reports the measurement. - let gauge = meter + let _gauge = meter .f64_observable_gauge("observable_gauge_f64_test") .with_unit("test_unit") .with_description("test_description") + .with_callback(|observer| { + observer.observe( + 1.0, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - let gauge2 = meter + let _gauge2 = meter .u64_observable_gauge("observable_gauge_u64_test") .with_unit("test_unit") .with_description("test_description") + .with_callback(|observer| { + observer.observe( + 1, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - meter.register_callback(&[gauge.as_any()], move |observer| { - observer.observe_f64( - &gauge, - 1.0, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - - meter.register_callback(&[gauge2.as_any()], move |observer| { - observer.observe_u64( - &gauge2, - 1, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - // Create a ObservableCounter instrument and register a callback that reports the measurement. - let observable_counter = meter + let _observable_counter = meter .u64_observable_counter("observable_counter_u64_test") .with_description("test_description") .with_unit("test_unit") + .with_callback(|observer| { + observer.observe( + 100, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - let observable_counter2 = meter + let _observable_counter2 = meter .f64_observable_counter("observable_counter_f64_test") .with_description("test_description") .with_unit("test_unit") + .with_callback(|observer| { + observer.observe( + 100.0, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - meter.register_callback(&[observable_counter.as_any()], move |observer| { - observer.observe_u64( - &observable_counter, - 100, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - - meter.register_callback(&[observable_counter2.as_any()], move |observer| { - observer.observe_f64( - &observable_counter2, - 100.0, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - // Create a Observable UpDownCounter instrument and register a callback that reports the measurement. - let observable_up_down_counter = meter + let _observable_up_down_counter = meter .i64_observable_up_down_counter("observable_up_down_counter_i64_test") .with_description("test_description") .with_unit("test_unit") + .with_callback(|observer| { + observer.observe( + 100, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - let observable_up_down_counter2 = meter + let _observable_up_down_counter2 = meter .f64_observable_up_down_counter("observable_up_down_counter_f64_test") .with_description("test_description") .with_unit("test_unit") + .with_callback(|observer| { + observer.observe( + 100.0, + &[ + KeyValue::new("mykey1", "myvalue1"), + KeyValue::new("mykey2", "myvalue2"), + ], + ) + }) .init(); - meter.register_callback(&[observable_up_down_counter.as_any()], move |observer| { - observer.observe_i64( - &observable_up_down_counter, - 100, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - - meter.register_callback(&[observable_up_down_counter2.as_any()], move |observer| { - observer.observe_f64( - &observable_up_down_counter2, - 100.0, - &[ - KeyValue::new("mykey1", "myvalue1"), - KeyValue::new("mykey2", "myvalue2"), - ], - ) - })?; - loop { // Record measurements using the Counter instrument. counter.add( diff --git a/opentelemetry-zpages/CHANGELOG.md b/opentelemetry-zpages/CHANGELOG.md index f37320e2..25a89dfd 100644 --- a/opentelemetry-zpages/CHANGELOG.md +++ b/opentelemetry-zpages/CHANGELOG.md @@ -2,6 +2,12 @@ ## vNext +## v0.10.0 + +### Changed + +- Bump opentelemetry version to 0.25, opentelemetry_sdk version to 0.25 + ## v0.9.0 ### Changed diff --git a/opentelemetry-zpages/Cargo.toml b/opentelemetry-zpages/Cargo.toml index bc2b018c..e99c5f43 100644 --- a/opentelemetry-zpages/Cargo.toml +++ b/opentelemetry-zpages/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "opentelemetry-zpages" -version = "0.9.0" +version = "0.10.0" description = "ZPages implementation for OpenTelemetry" homepage = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-zpages" repository = "https://github.com/open-telemetry/opentelemetry-rust-contrib/tree/main/opentelemetry-zpages"