Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove Global Error handler. #2260

Merged
merged 18 commits into from
Nov 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
115 changes: 36 additions & 79 deletions examples/self-diagnostics/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,56 +1,18 @@
use opentelemetry::global::{self, set_error_handler, Error as OtelError};
use opentelemetry::global::{self, Error as OtelError};
use opentelemetry::KeyValue;
use opentelemetry_appender_tracing::layer;
use opentelemetry_otlp::{LogExporter, MetricExporter, WithExportConfig};
use opentelemetry_sdk::metrics::PeriodicReader;
use tracing_subscriber::filter::{EnvFilter, LevelFilter};
use tracing_subscriber::fmt;
use tracing_subscriber::prelude::*;

use std::error::Error;
use tracing::error;

use once_cell::sync::Lazy;
use std::collections::HashSet;
use std::sync::{Arc, Mutex};

use std::sync::mpsc::channel;

struct ErrorState {
seen_errors: Mutex<HashSet<String>>,
}

impl ErrorState {
fn new() -> Self {
ErrorState {
seen_errors: Mutex::new(HashSet::new()),
}
}

fn mark_as_seen(&self, err: &OtelError) -> bool {
let mut seen_errors = self.seen_errors.lock().unwrap();
seen_errors.insert(err.to_string())
}
}

static GLOBAL_ERROR_STATE: Lazy<Arc<ErrorState>> = Lazy::new(|| Arc::new(ErrorState::new()));

fn custom_error_handler(err: OtelError) {
if GLOBAL_ERROR_STATE.mark_as_seen(&err) {
// log error not already seen
match err {
OtelError::Metric(err) => error!("OpenTelemetry metrics error occurred: {}", err),
OtelError::Trace(err) => error!("OpenTelemetry trace error occurred: {}", err),
OtelError::Log(err) => error!("OpenTelemetry log error occurred: {}", err),
OtelError::Propagation(err) => {
error!("OpenTelemetry propagation error occurred: {}", err)
}
OtelError::Other(err_msg) => error!("OpenTelemetry error occurred: {}", err_msg),
_ => error!("OpenTelemetry error occurred: {:?}", err),
}
}
}

fn init_logger_provider() -> opentelemetry_sdk::logs::LoggerProvider {
let exporter = LogExporter::builder()
.with_http()
Expand All @@ -64,46 +26,46 @@ fn init_logger_provider() -> opentelemetry_sdk::logs::LoggerProvider {

let cloned_provider = provider.clone();

// Add a tracing filter to filter events from crates used by opentelemetry-otlp.
// The filter levels are set as follows:
// - Allow `info` level and above by default.
// - Restrict `hyper`, `tonic`, and `reqwest` to `error` level logs only.
// This ensures events generated from these crates within the OTLP Exporter are not looped back,
// thus preventing infinite event generation.
// Note: This will also drop events from these crates used outside the OTLP Exporter.
// For more details, see: https://github.com/open-telemetry/opentelemetry-rust/issues/761
let filter = EnvFilter::new("info")
.add_directive("hyper=error".parse().unwrap())
.add_directive("tonic=error".parse().unwrap())
.add_directive("reqwest=error".parse().unwrap());

// Configuring the formatting layer specifically for OpenTelemetry internal logs.
// These logs starts with "opentelemetry" prefix in target. This allows specific logs
// from the OpenTelemetry-related components to be filtered and handled separately
// from the application logs

let opentelemetry_filter = tracing_subscriber::filter::filter_fn(|metadata| {
metadata.target().starts_with("opentelemetry")
// Specialized filter to process
// - ERROR logs from specific targets
// - ERROR logs generated internally.
let internal_and_dependency_filter = tracing_subscriber::filter::filter_fn(|metadata| {
let target = metadata.target();

// Only allow ERROR logs from specific targets
(target.starts_with("hyper")
|| target.starts_with("hyper_util")
|| target.starts_with("hyper")
|| target.starts_with("tonic")
|| target.starts_with("tower")
|| target.starts_with("reqwest")
|| target.starts_with("opentelemetry"))
&& metadata.level() == &tracing::Level::ERROR
});

let fmt_opentelemetry_layer = fmt::layer()
.with_filter(LevelFilter::DEBUG)
.with_filter(opentelemetry_filter);

// Configures the appender tracing layer, filtering out OpenTelemetry internal logs
// to prevent infinite logging loops.

let non_opentelemetry_filter = tracing_subscriber::filter::filter_fn(|metadata| {
!metadata.target().starts_with("opentelemetry")
// Configure fmt::Layer to print detailed log information, including structured fields
let fmt_internal_and_dependency_layer =
tracing_subscriber::fmt::layer().with_filter(internal_and_dependency_filter.clone());

// Application filter to exclude specific targets entirely, regardless of level
let application_filter = tracing_subscriber::filter::filter_fn(|metadata| {
let target = metadata.target();

// Exclude logs from specific targets for the application layer
!(target.starts_with("hyper")
|| target.starts_with("hyper_util")
|| target.starts_with("hyper")
|| target.starts_with("tonic")
|| target.starts_with("tower")
|| target.starts_with("reqwest")
|| target.starts_with("opentelemetry"))
});

let otel_layer = layer::OpenTelemetryTracingBridge::new(&cloned_provider)
.with_filter(non_opentelemetry_filter.clone());
let application_layer = layer::OpenTelemetryTracingBridge::new(&cloned_provider)
.with_filter(application_filter.clone());

tracing_subscriber::registry()
.with(fmt_opentelemetry_layer)
.with(fmt::layer().with_filter(filter))
.with(otel_layer)
.with(fmt_internal_and_dependency_layer)
.with(application_layer)
.init();
provider
}
Expand All @@ -130,11 +92,6 @@ fn init_meter_provider() -> opentelemetry_sdk::metrics::SdkMeterProvider {

#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync + 'static>> {
// Set the custom error handler
if let Err(err) = set_error_handler(custom_error_handler) {
eprintln!("Failed to set custom error handler: {}", err);
}

let logger_provider = init_logger_provider();

// Initialize the MeterProvider with the stdout Exporter.
Expand Down
6 changes: 6 additions & 0 deletions opentelemetry/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,12 @@ let counter = meter.u64_counter("my_counter").build();
- Replaced `global::meter_with_version` with `global::meter_with_scope`
- Added `global::tracer_with_scope`

- **Breaking change**: [#2260](https://github.com/open-telemetry/opentelemetry-rust/pull/2260)
- Removed `global::set_error_handler` and `global::handle_error`.
- `global::handle_error` usage inside the opentelemetry crates has been replaced with `global::otel_info`, `otel_warn`, `otel_debug` and `otel_error` macros based on the severity of the internal logs.
- The default behavior of `global::handle_error` was to log the error using `eprintln!`. With otel macro, the internal logs get emitted via `tracing` macros of matching severity. Users now need to configure the `tracing` layer to capture these logs.
- Refer to this PR description for migration guide. Also refer to [self-diagnostics](https://github.com/open-telemetry/opentelemetry-rust/tree/main/examples/self-diagnostics) example on how to configure the tracing layer for internal logs.

## v0.26.0
Released 2024-Sep-30

Expand Down
41 changes: 0 additions & 41 deletions opentelemetry/src/global/error_handler.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::sync::PoisonError;
use std::sync::RwLock;

#[cfg(feature = "logs")]
use crate::logs::LogError;
Expand All @@ -8,9 +7,6 @@ use crate::metrics::MetricError;
use crate::propagation::PropagationError;
#[cfg(feature = "trace")]
use crate::trace::TraceError;
use once_cell::sync::Lazy;

static GLOBAL_ERROR_HANDLER: Lazy<RwLock<Option<ErrorHandler>>> = Lazy::new(|| RwLock::new(None));

/// Wrapper for error from both tracing and metrics part of open telemetry.
#[derive(thiserror::Error, Debug)]
Expand Down Expand Up @@ -47,40 +43,3 @@ impl<T> From<PoisonError<T>> for Error {
Error::Other(err.to_string())
}
}

struct ErrorHandler(Box<dyn Fn(Error) + Send + Sync>);

/// Handle error using the globally configured error handler.
///
/// Writes to stderr if unset.
pub fn handle_error<T: Into<Error>>(err: T) {
match GLOBAL_ERROR_HANDLER.read() {
Ok(handler) if handler.is_some() => (handler.as_ref().unwrap().0)(err.into()),
_ => match err.into() {
#[cfg(feature = "metrics")]
#[cfg_attr(docsrs, doc(cfg(feature = "metrics")))]
Error::Metric(err) => eprintln!("OpenTelemetry metrics error occurred. {}", err),
#[cfg(feature = "trace")]
#[cfg_attr(docsrs, doc(cfg(feature = "trace")))]
Error::Trace(err) => eprintln!("OpenTelemetry trace error occurred. {}", err),
#[cfg(feature = "logs")]
#[cfg_attr(docsrs, doc(cfg(feature = "logs")))]
Error::Log(err) => eprintln!("OpenTelemetry log error occurred. {}", err),
Error::Propagation(err) => {
eprintln!("OpenTelemetry propagation error occurred. {}", err)
}
Error::Other(err_msg) => eprintln!("OpenTelemetry error occurred. {}", err_msg),
},
}
}

/// Set global error handler.
pub fn set_error_handler<F>(f: F) -> std::result::Result<(), Error>
where
F: Fn(Error) + Send + Sync + 'static,
{
GLOBAL_ERROR_HANDLER
.write()
.map(|mut handler| *handler = Some(ErrorHandler(Box::new(f))))
.map_err(Into::into)
}
2 changes: 1 addition & 1 deletion opentelemetry/src/global/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,8 @@ mod metrics;
mod propagation;
#[cfg(feature = "trace")]
mod trace;
pub use error_handler::Error;

pub use error_handler::{handle_error, set_error_handler, Error};
#[cfg(feature = "metrics")]
#[cfg_attr(docsrs, doc(cfg(feature = "metrics")))]
pub use metrics::*;
Expand Down
Loading