diff --git a/examples/self-diagnostics/Cargo.toml b/examples/self-diagnostics/Cargo.toml index 40e574a7e5..8e8b1cd394 100644 --- a/examples/self-diagnostics/Cargo.toml +++ b/examples/self-diagnostics/Cargo.toml @@ -7,13 +7,13 @@ publish = false [dependencies] opentelemetry = { path = "../../opentelemetry" } -opentelemetry_sdk = { path = "../../opentelemetry-sdk", features = ["rt-tokio", "experimental-internal-logs"]} +opentelemetry_sdk = { path = "../../opentelemetry-sdk", features = ["rt-tokio"]} opentelemetry-stdout = { path = "../../opentelemetry-stdout"} opentelemetry-appender-tracing = { path = "../../opentelemetry-appender-tracing"} tokio = { workspace = true, features = ["full"] } tracing = { workspace = true, features = ["std"]} tracing-core = { workspace = true } tracing-subscriber = { version = "0.3.18", features = ["env-filter","registry", "std"]} -opentelemetry-otlp = { path = "../../opentelemetry-otlp", features = ["http-proto", "reqwest-client", "logs", "experimental-internal-logs"] } +opentelemetry-otlp = { path = "../../opentelemetry-otlp", features = ["http-proto", "reqwest-client", "logs"] } once_cell ={ version = "1.19.0"} ctrlc = "3.4" diff --git a/opentelemetry-appender-tracing/Cargo.toml b/opentelemetry-appender-tracing/Cargo.toml index eb52a7c02a..5fffd11900 100644 --- a/opentelemetry-appender-tracing/Cargo.toml +++ b/opentelemetry-appender-tracing/Cargo.toml @@ -22,6 +22,7 @@ tracing-subscriber = { workspace = true, features = ["registry", "std"] } log = { workspace = true } opentelemetry-stdout = { path = "../opentelemetry-stdout", features = ["logs"] } opentelemetry_sdk = { path = "../opentelemetry-sdk", features = ["logs", "testing"] } +tracing-subscriber = { workspace = true, features = ["registry", "std", "env-filter"] } tracing-log = "0.2" async-trait = { workspace = true } criterion = { workspace = true } diff --git a/opentelemetry-appender-tracing/src/layer.rs b/opentelemetry-appender-tracing/src/layer.rs index 8148f75069..1bfbef0fcf 100644 --- a/opentelemetry-appender-tracing/src/layer.rs +++ b/opentelemetry-appender-tracing/src/layer.rs @@ -217,7 +217,8 @@ mod tests { use opentelemetry_sdk::trace; use opentelemetry_sdk::trace::{Sampler, TracerProvider}; use tracing::error; - use tracing_subscriber::layer::SubscriberExt; + use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt; + use tracing_subscriber::Layer; pub fn attributes_contains(log_record: &LogRecord, key: &Key, value: &AnyValue) -> bool { log_record @@ -225,6 +226,17 @@ mod tests { .any(|(k, v)| k == key && v == value) } + fn create_tracing_subscriber( + _exporter: InMemoryLogsExporter, + logger_provider: &LoggerProvider, + ) -> impl tracing::Subscriber { + let level_filter = tracing_subscriber::filter::LevelFilter::WARN; // Capture WARN and ERROR levels + let layer = + layer::OpenTelemetryTracingBridge::new(logger_provider).with_filter(level_filter); // No filter based on target, only based on log level + + tracing_subscriber::registry().with(layer) + } + // cargo test --features=testing #[test] fn tracing_appender_standalone() { @@ -234,8 +246,7 @@ mod tests { .with_simple_exporter(exporter.clone()) .build(); - let layer = layer::OpenTelemetryTracingBridge::new(&logger_provider); - let subscriber = tracing_subscriber::registry().with(layer); + let subscriber = create_tracing_subscriber(exporter.clone(), &logger_provider); // avoiding setting tracing subscriber as global as that does not // play well with unit tests. @@ -315,8 +326,7 @@ mod tests { .with_simple_exporter(exporter.clone()) .build(); - let layer = layer::OpenTelemetryTracingBridge::new(&logger_provider); - let subscriber = tracing_subscriber::registry().with(layer); + let subscriber = create_tracing_subscriber(exporter.clone(), &logger_provider); // avoiding setting tracing subscriber as global as that does not // play well with unit tests. @@ -427,8 +437,7 @@ mod tests { .with_simple_exporter(exporter.clone()) .build(); - let layer = layer::OpenTelemetryTracingBridge::new(&logger_provider); - let subscriber = tracing_subscriber::registry().with(layer); + let subscriber = create_tracing_subscriber(exporter.clone(), &logger_provider); // avoiding setting tracing subscriber as global as that does not // play well with unit tests. @@ -436,7 +445,7 @@ mod tests { drop(tracing_log::LogTracer::init()); // Act - log::error!("log from log crate"); + log::error!(target: "my-system", "log from log crate"); logger_provider.force_flush(); // Assert TODO: move to helper methods @@ -493,8 +502,7 @@ mod tests { .with_simple_exporter(exporter.clone()) .build(); - let layer = layer::OpenTelemetryTracingBridge::new(&logger_provider); - let subscriber = tracing_subscriber::registry().with(layer); + let subscriber = create_tracing_subscriber(exporter.clone(), &logger_provider); // avoiding setting tracing subscriber as global as that does not // play well with unit tests. @@ -513,7 +521,7 @@ mod tests { let span_id = cx.span().span_context().span_id(); // logging is done inside span context. - log::error!("log from log crate"); + log::error!(target: "my-system", "log from log crate"); (trace_id, span_id) }); diff --git a/opentelemetry-otlp/Cargo.toml b/opentelemetry-otlp/Cargo.toml index 07ad6127df..272481053e 100644 --- a/opentelemetry-otlp/Cargo.toml +++ b/opentelemetry-otlp/Cargo.toml @@ -58,12 +58,12 @@ trace = ["opentelemetry/trace", "opentelemetry_sdk/trace", "opentelemetry-proto/ metrics = ["opentelemetry/metrics", "opentelemetry_sdk/metrics", "opentelemetry-proto/metrics"] logs = ["opentelemetry/logs", "opentelemetry_sdk/logs", "opentelemetry-proto/logs"] populate-logs-event-name = ["opentelemetry-proto/populate-logs-event-name"] -experimental-internal-logs = ["tracing"] +internal-logs = ["tracing"] # add ons serialize = ["serde", "serde_json"] -default = ["grpc-tonic", "trace", "metrics", "logs"] +default = ["grpc-tonic", "trace", "metrics", "logs", "internal-logs"] # grpc using tonic grpc-tonic = ["tonic", "prost", "http", "tokio", "opentelemetry-proto/gen-tonic"] diff --git a/opentelemetry-otlp/src/metric.rs b/opentelemetry-otlp/src/metric.rs index 55124f5355..83474e94ff 100644 --- a/opentelemetry-otlp/src/metric.rs +++ b/opentelemetry-otlp/src/metric.rs @@ -294,29 +294,7 @@ impl TemporalitySelector for MetricsExporter { #[async_trait] impl PushMetricsExporter for MetricsExporter { async fn export(&self, metrics: &mut ResourceMetrics) -> Result<()> { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!( - name = "export_metrics", - target = "opentelemetry-otlp", - metrics_count = metrics - .scope_metrics - .iter() - .map(|scope| scope.metrics.len()) - .sum::(), - status = "started" - ); - let result = self.client.export(metrics).await; - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!( - name = "export_metrics", - target = "opentelemetry-otlp", - status = if result.is_ok() { - "completed" - } else { - "failed" - } - ); - result + self.client.export(metrics).await } async fn force_flush(&self) -> Result<()> { diff --git a/opentelemetry-sdk/Cargo.toml b/opentelemetry-sdk/Cargo.toml index 86380d45ca..5fc542a943 100644 --- a/opentelemetry-sdk/Cargo.toml +++ b/opentelemetry-sdk/Cargo.toml @@ -42,7 +42,7 @@ temp-env = { workspace = true } pprof = { version = "0.13", features = ["flamegraph", "criterion"] } [features] -default = ["trace", "metrics", "logs"] +default = ["trace", "metrics", "logs", "internal-logs"] trace = ["opentelemetry/trace", "rand", "async-trait", "percent-encoding"] jaeger_remote_sampler = ["trace", "opentelemetry-http", "http", "serde", "serde_json", "url"] logs = ["opentelemetry/logs", "async-trait", "serde_json"] @@ -52,7 +52,7 @@ testing = ["opentelemetry/testing", "trace", "metrics", "logs", "rt-async-std", rt-tokio = ["tokio", "tokio-stream"] rt-tokio-current-thread = ["tokio", "tokio-stream"] rt-async-std = ["async-std"] -experimental-internal-logs = ["tracing"] +internal-logs = ["tracing"] [[bench]] name = "context" diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 7463b19059..1b4e1b8c2c 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -1,5 +1,6 @@ use super::{BatchLogProcessor, LogProcessor, LogRecord, SimpleLogProcessor, TraceContext}; use crate::{export::logs::LogExporter, runtime::RuntimeChannel, Resource}; +use opentelemetry::otel_warn; use opentelemetry::{ global, logs::{LogError, LogResult}, @@ -49,7 +50,6 @@ impl opentelemetry::logs::LoggerProvider for LoggerProvider { attributes: Option>, ) -> Logger { let name = name.into(); - let component_name = if name.is_empty() { Cow::Borrowed(DEFAULT_COMPONENT_NAME) } else { @@ -114,6 +114,10 @@ impl LoggerProvider { let mut errs = vec![]; for processor in &self.inner.processors { if let Err(err) = processor.shutdown() { + otel_warn!( + name: "logger_provider_shutdown_error", + error = format!("{:?}", err) + ); errs.push(err); } } @@ -124,6 +128,9 @@ impl LoggerProvider { Err(LogError::Other(format!("{errs:?}").into())) } } else { + otel_warn!( + name: "logger_provider_already_shutdown" + ); Err(LogError::Other("logger provider already shut down".into())) } } diff --git a/opentelemetry-sdk/src/logs/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs index b3a13c22bb..e6578c7f7e 100644 --- a/opentelemetry-sdk/src/logs/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -14,8 +14,9 @@ use opentelemetry::logs::Severity; use opentelemetry::{ global, logs::{LogError, LogResult}, - InstrumentationLibrary, + otel_error, otel_warn, InstrumentationLibrary, }; + use std::sync::atomic::AtomicBool; use std::{cmp::min, env, sync::Mutex}; use std::{ @@ -98,16 +99,12 @@ impl LogProcessor for SimpleLogProcessor { fn emit(&self, record: &mut LogRecord, instrumentation: &InstrumentationLibrary) { // noop after shutdown if self.is_shutdown.load(std::sync::atomic::Ordering::Relaxed) { + otel_warn!( + name: "simple_log_processor_emit_after_shutdown" + ); return; } - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!( - name: "simple_log_processor_emit", - target: "opentelemetry-sdk", - event_name = record.event_name - ); - let result = self .exporter .lock() @@ -117,6 +114,10 @@ impl LogProcessor for SimpleLogProcessor { futures_executor::block_on(exporter.export(LogBatch::new(log_tuple))) }); if let Err(err) = result { + otel_error!( + name: "simple_log_processor_emit_error", + error = format!("{:?}", err) + ); global::handle_error(err); } } @@ -132,6 +133,9 @@ impl LogProcessor for SimpleLogProcessor { exporter.shutdown(); Ok(()) } else { + otel_error!( + name: "simple_log_processor_shutdown_error" + ); Err(LogError::Other( "simple logprocessor mutex poison during shutdown".into(), )) @@ -167,6 +171,10 @@ impl LogProcessor for BatchLogProcessor { ))); if let Err(err) = result { + otel_error!( + name: "batch_log_processor_emit_error", + error = format!("{:?}", err) + ); global::handle_error(LogError::Other(err.into())); } } @@ -224,13 +232,6 @@ impl BatchLogProcessor { // Log has finished, add to buffer of pending logs. BatchMessage::ExportLog(log) => { logs.push(log); - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!( - name: "batch_log_processor_record_count", - target: "opentelemetry-sdk", - current_batch_size = logs.len() - ); - if logs.len() == config.max_export_batch_size { let result = export_with_timeout( config.max_export_timeout, @@ -241,6 +242,10 @@ impl BatchLogProcessor { .await; if let Err(err) = result { + otel_error!( + name: "batch_log_processor_export_error", + error = format!("{:?}", err) + ); global::handle_error(err); } } @@ -261,8 +266,18 @@ impl BatchLogProcessor { "failed to send flush result: {:?}", result ))); + otel_error!( + name: "batch_log_processor_flush_error", + error = format!("{:?}", result), + message = "Failed to send flush result" + ); } } else if let Err(err) = result { + otel_error!( + name: "batch_log_processor_flush_error", + error = format!("{:?}", err), + message = "Flush failed" + ); global::handle_error(err); } } @@ -279,6 +294,11 @@ impl BatchLogProcessor { exporter.shutdown(); if let Err(result) = ch.send(result) { + otel_error!( + name: "batch_log_processor_shutdown_error", + error = format!("{:?}", result), + message = "Failed to send shutdown result" + ); global::handle_error(LogError::from(format!( "failed to send batch processor shutdown result: {:?}", result @@ -295,7 +315,6 @@ impl BatchLogProcessor { } } })); - // Return batch processor with link to worker BatchLogProcessor { message_sender } } @@ -338,7 +357,13 @@ where pin_mut!(timeout); match future::select(export, timeout).await { Either::Left((export_res, _)) => export_res, - Either::Right((_, _)) => ExportResult::Err(LogError::ExportTimedOut(time_out)), + Either::Right((_, _)) => { + otel_error!( + name: "export_with_timeout_timeout", + timeout_duration = time_out.as_millis() + ); + ExportResult::Err(LogError::ExportTimedOut(time_out)) + } } } diff --git a/opentelemetry-sdk/src/metrics/internal/mod.rs b/opentelemetry-sdk/src/metrics/internal/mod.rs index abc691b2fc..7a79fe4653 100644 --- a/opentelemetry-sdk/src/metrics/internal/mod.rs +++ b/opentelemetry-sdk/src/metrics/internal/mod.rs @@ -17,7 +17,7 @@ pub(crate) use aggregate::{AggregateBuilder, ComputeAggregation, Measure}; pub(crate) use exponential_histogram::{EXPO_MAX_SCALE, EXPO_MIN_SCALE}; use once_cell::sync::Lazy; use opentelemetry::metrics::MetricsError; -use opentelemetry::{global, KeyValue}; +use opentelemetry::{global, otel_warn, KeyValue}; use crate::metrics::AttributeSet; @@ -147,6 +147,9 @@ impl, T: Number, O: Operation> ValueMap { O::update_tracker(&new_tracker, measurement, index); trackers.insert(STREAM_OVERFLOW_ATTRIBUTES.clone(), Arc::new(new_tracker)); global::handle_error(MetricsError::Other("Warning: Maximum data points for metric stream exceeded. Entry added to overflow. Subsequent overflows to same metric until next collect will not be logged.".into())); + otel_warn!( name: "ValueMap.measure", + message = "Maximum data points for metric stream exceeded. Entry added to overflow. Subsequent overflows to same metric until next collect will not be logged." + ); } } } diff --git a/opentelemetry-sdk/src/metrics/periodic_reader.rs b/opentelemetry-sdk/src/metrics/periodic_reader.rs index 721b979078..30604acc48 100644 --- a/opentelemetry-sdk/src/metrics/periodic_reader.rs +++ b/opentelemetry-sdk/src/metrics/periodic_reader.rs @@ -14,6 +14,7 @@ use futures_util::{ use opentelemetry::{ global, metrics::{MetricsError, Result}, + otel_error, }; use crate::runtime::Runtime; @@ -234,8 +235,6 @@ struct PeriodicReaderWorker { impl PeriodicReaderWorker { async fn collect_and_export(&mut self) -> Result<()> { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!(name: "metrics_collect_and_export", target: "opentelemetry-sdk", status = "started"); self.reader.collect(&mut self.rm)?; if self.rm.scope_metrics.is_empty() { // No metrics to export. @@ -249,20 +248,11 @@ impl PeriodicReaderWorker { match future::select(export, timeout).await { Either::Left((res, _)) => { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!( - name: "collect_and_export", - target: "opentelemetry-sdk", - status = "completed", - result = ?res - ); res // return the status of export. } Either::Right(_) => { - #[cfg(feature = "experimental-internal-logs")] - tracing::error!( - name = "collect_and_export", - target = "opentelemetry-sdk", + otel_error!( + name: "collect_and_export", status = "timed_out" ); Err(MetricsError::Other("export timed out".into())) @@ -273,23 +263,17 @@ impl PeriodicReaderWorker { async fn process_message(&mut self, message: Message) -> bool { match message { Message::Export => { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!(name: "process_message", target: "opentelemetry-sdk", message_type = "export"); if let Err(err) = self.collect_and_export().await { global::handle_error(err) } } Message::Flush(ch) => { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!(name: "process_message", target: "opentelemetry-sdk", message_type = "flush"); let res = self.collect_and_export().await; if ch.send(res).is_err() { global::handle_error(MetricsError::Other("flush channel closed".into())) } } Message::Shutdown(ch) => { - #[cfg(feature = "experimental-internal-logs")] - tracing::debug!(name: "process_message", target: "opentelemetry-sdk", message_type = "shutdown"); let res = self.collect_and_export().await; let _ = self.reader.exporter.shutdown(); if ch.send(res).is_err() { diff --git a/opentelemetry/src/global/internal_logging.rs b/opentelemetry/src/global/internal_logging.rs new file mode 100644 index 0000000000..3a5c24ba69 --- /dev/null +++ b/opentelemetry/src/global/internal_logging.rs @@ -0,0 +1,114 @@ +#![allow(unused_macros)] +/// +/// **Note**: These macros (`otel_info!`, `otel_warn!`, `otel_debug!`, and `otel_error!`) are intended to be used +/// **internally within OpenTelemetry code** or for **custom exporters and processors**. They are not designed +/// for general application logging and should not be used for that purpose. +/// + +/// Macro for logging informational messages in OpenTelemetry. +/// +/// # Fields: +/// - `name`: The operation or action being logged. +/// - Additional optional key-value pairs can be passed as attributes. +/// +/// # Example: +/// ```rust +/// use opentelemetry::otel_info; +/// otel_info!(name: "sdk_start", version = "1.0.0", schema_url = "http://example.com"); +/// ``` +#[macro_export] +macro_rules! otel_info { + (name: $name:expr $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::info!( name: $name, target: env!("CARGO_PKG_NAME"), ""); + } + }; + (name: $name:expr, $($key:ident = $value:expr),+ $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::info!(name: $name, target: env!("CARGO_PKG_NAME"), $($key = $value),+, ""); + } + }; +} + +/// Macro for logging warning messages in OpenTelemetry. +/// +/// # Fields: +/// - `name`: The operation or action being logged. +/// - Additional optional key-value pairs can be passed as attributes. +/// +/// # Example: +/// ```rust +/// use opentelemetry::otel_warn; +/// otel_warn!(name: "export_warning", error_code = 404, version = "1.0.0"); +/// ``` +#[macro_export] +macro_rules! otel_warn { + (name: $name:expr $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::warn!(name: $name, target: env!("CARGO_PKG_NAME"), ""); + } + }; + (name: $name:expr, $($key:ident = $value:expr),+ $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::warn!(name: $name, target: env!("CARGO_PKG_NAME"), $($key = $value),+, ""); + } + }; +} + +/// Macro for logging debug messages in OpenTelemetry. +/// +/// # Fields: +/// - `name`: The operation or action being logged. +/// - Additional optional key-value pairs can be passed as attributes. +/// +/// # Example: +/// ```rust +/// use opentelemetry::otel_debug; +/// otel_debug!(name: "debug_operation", debug_level = "high", version = "1.0.0"); +/// ``` +#[macro_export] +macro_rules! otel_debug { + (name: $name:expr $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::debug!(name: $name, target: env!("CARGO_PKG_NAME"),""); + } + }; + (name: $name:expr, $($key:ident = $value:expr),+ $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::debug!(name: $name, target: env!("CARGO_PKG_NAME"), $($key = $value),+, ""); + } + }; +} + +/// Macro for logging error messages in OpenTelemetry. +/// +/// # Fields: +/// - `name`: The operation or action being logged. +/// - Additional optional key-value pairs can be passed as attributes. +/// +/// # Example: +/// ```rust +/// use opentelemetry::otel_error; +/// otel_error!(name: "export_failure", error_code = 500, version = "1.0.0"); +/// ``` +#[macro_export] +macro_rules! otel_error { + (name: $name:expr $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::error!(name: $name, target: env!("CARGO_PKG_NAME"), ""); + } + }; + (name: $name:expr, $($key:ident = $value:expr),+ $(,)?) => { + #[cfg(feature = "internal-logs")] + { + tracing::error!(name: $name, target: env!("CARGO_PKG_NAME"), $($key = $value),+, ""); + } + }; +} diff --git a/opentelemetry/src/global/mod.rs b/opentelemetry/src/global/mod.rs index a1e7b1da72..5a6d335431 100644 --- a/opentelemetry/src/global/mod.rs +++ b/opentelemetry/src/global/mod.rs @@ -126,6 +126,7 @@ //! [`set_meter_provider`]: crate::global::set_meter_provider mod error_handler; +mod internal_logging; #[cfg(feature = "metrics")] mod metrics; #[cfg(feature = "trace")]