Module tower_http::trace [−][src]
trace
only.Expand description
Middleware that adds high level tracing to a Service
.
Example
Adding tracing to your service can be as simple as:
use http::{Request, Response}; use hyper::Body; use tower::{ServiceBuilder, ServiceExt, Service}; use tower_http::trace::TraceLayer; use std::convert::Infallible; async fn handle(request: Request<Body>) -> Result<Response<Body>, Infallible> { Ok(Response::new(Body::from("foo"))) } // Setup tracing tracing_subscriber::fmt::init(); let mut service = ServiceBuilder::new() .layer(TraceLayer::new_for_http()) .service_fn(handle); let request = Request::new(Body::from("foo")); let response = service .ready() .await? .call(request) .await?;
If you run this application with RUST_LOG=tower_http=trace cargo run
you should see logs like:
Mar 05 20:50:28.523 DEBUG request{method=GET path="/foo"}: tower_http::trace::on_request: started processing request
Mar 05 20:50:28.524 DEBUG request{method=GET path="/foo"}: tower_http::trace::on_response: finished processing request latency=1 ms status=200
Customization
Trace
comes with good defaults but also supports customizing many aspects of the output.
The default behaviour supports some customization:
use http::{Request, Response, HeaderMap, StatusCode}; use hyper::Body; use bytes::Bytes; use tower::ServiceBuilder; use tracing::Level; use tower_http::{ LatencyUnit, trace::{TraceLayer, DefaultMakeSpan, DefaultOnRequest, DefaultOnResponse}, }; use std::time::Duration; let service = ServiceBuilder::new() .layer( TraceLayer::new_for_http() .make_span_with( DefaultMakeSpan::new().include_headers(true) ) .on_request( DefaultOnRequest::new().level(Level::INFO) ) .on_response( DefaultOnResponse::new() .level(Level::INFO) .latency_unit(LatencyUnit::Micros) ) // on so on for `on_eos`, `on_body_chunk`, and `on_failure` ) .service_fn(handle);
However for maximum control you can provide callbacks:
use http::{Request, Response, HeaderMap, StatusCode}; use hyper::Body; use bytes::Bytes; use tower::ServiceBuilder; use tower_http::{classify::ServerErrorsFailureClass, trace::TraceLayer}; use std::time::Duration; use tracing::Span; let service = ServiceBuilder::new() .layer( TraceLayer::new_for_http() .make_span_with(|request: &Request<Body>| { tracing::debug_span!("http-request") }) .on_request(|request: &Request<Body>, _span: &Span| { tracing::debug!("started {} {}", request.method(), request.uri().path()) }) .on_response(|response: &Response<Body>, latency: Duration, _span: &Span| { tracing::debug!("response generated in {:?}", latency) }) .on_body_chunk(|chunk: &Bytes, latency: Duration, _span: &Span| { tracing::debug!("sending {} bytes", chunk.len()) }) .on_eos(|trailers: Option<&HeaderMap>, stream_duration: Duration, _span: &Span| { tracing::debug!("stream closed after {:?}", stream_duration) }) .on_failure(|error: ServerErrorsFailureClass, latency: Duration, _span: &Span| { tracing::debug!("something went wrong") }) ) .service_fn(handle);
Disabling something
Setting the behaviour to ()
will be disable that particular step:
use http::StatusCode; use tower::ServiceBuilder; use tower_http::{classify::ServerErrorsFailureClass, trace::TraceLayer}; use std::time::Duration; use tracing::Span; let service = ServiceBuilder::new() .layer( // This configuration will only emit events on failures TraceLayer::new_for_http() .on_request(()) .on_response(()) .on_body_chunk(()) .on_eos(()) .on_failure(|error: ServerErrorsFailureClass, latency: Duration, _span: &Span| { tracing::debug!("something went wrong") }) ) .service_fn(handle);
Recording fields on the span
All callbacks receive a reference to the tracing Span
, corresponding to this request,
produced by the closure passed to TraceLayer::make_span_with
. It can be used to record
field values that weren’t known when the span was created.
use http::{Request, Response, HeaderMap, StatusCode}; use hyper::Body; use bytes::Bytes; use tower::ServiceBuilder; use tower_http::trace::TraceLayer; use tracing::Span; use std::time::Duration; let service = ServiceBuilder::new() .layer( TraceLayer::new_for_http() .make_span_with(|request: &Request<Body>| { tracing::debug_span!( "http-request", status_code = tracing::field::Empty, ) }) .on_response(|response: &Response<Body>, _latency: Duration, span: &Span| { span.record("status_code", &tracing::field::display(response.status())); tracing::debug!("response generated") }) ) .service_fn(handle);
Providing classifiers
Tracing requires determining if a response is a success or failure. MakeClassifier
is used
to create a classifier for the incoming request. See the docs for MakeClassifier
and
ClassifyResponse
for more details on classification.
A MakeClassifier
can be provided when creating a TraceLayer
:
use http::{Request, Response}; use hyper::Body; use tower::ServiceBuilder; use tower_http::{ trace::TraceLayer, classify::{ MakeClassifier, ClassifyResponse, ClassifiedResponse, NeverClassifyEos, SharedClassifier, }, }; use std::convert::Infallible; // Our `MakeClassifier` that always crates `MyClassifier` classifiers. #[derive(Copy, Clone)] struct MyMakeClassify; impl MakeClassifier for MyMakeClassify { type Classifier = MyClassifier; type FailureClass = &'static str; type ClassifyEos = NeverClassifyEos<&'static str>; fn make_classifier<B>(&self, req: &Request<B>) -> Self::Classifier { MyClassifier } } // A classifier that classifies failures as `"something went wrong..."`. #[derive(Copy, Clone)] struct MyClassifier; impl ClassifyResponse for MyClassifier { type FailureClass = &'static str; type ClassifyEos = NeverClassifyEos<&'static str>; fn classify_response<B>( self, res: &Response<B> ) -> ClassifiedResponse<Self::FailureClass, Self::ClassifyEos> { // Classify based on the status code. if res.status().is_server_error() { ClassifiedResponse::Ready(Err("something went wrong...")) } else { ClassifiedResponse::Ready(Ok(())) } } fn classify_error<E>(self, error: &E) -> Self::FailureClass where E: std::fmt::Display + 'static, { "something went wrong..." } } let service = ServiceBuilder::new() // Create a trace layer that uses our classifier. .layer(TraceLayer::new(MyMakeClassify)) .service_fn(handle); // Since `MyClassifier` is `Clone` we can also use `SharedClassifier` // to avoid having to define a separate `MakeClassifier`. let service = ServiceBuilder::new() .layer(TraceLayer::new(SharedClassifier::new(MyClassifier))) .service_fn(handle);
TraceLayer
comes with convenience methods for using common classifiers:
TraceLayer::new_for_http
classifies based on the status code. It doesn’t consider streaming responses.TraceLayer::new_for_grpc
classifies based on the gRPC protocol and supports streaming responses.
Structs
DefaultMakeSpan | |
DefaultOnBodyChunk | The default |
DefaultOnEos | |
DefaultOnFailure | |
DefaultOnRequest | |
DefaultOnResponse | The default |
ResponseBody | Response body for |
ResponseFuture | Response future for |
Trace | |
TraceLayer |
Traits
MakeSpan | Trait used to generate |
OnBodyChunk | Trait used to tell |
OnEos | Trait used to tell |
OnFailure | Trait used to tell |
OnRequest | Trait used to tell |
OnResponse | Trait used to tell |