From 854b9da2cd579059ddc9525d3476b15c729eb4ac Mon Sep 17 00:00:00 2001 From: Thomas Orozco Date: Thu, 12 Sep 2019 02:31:23 -0700 Subject: [PATCH] mononoke/lfs_server: log error causes Summary: This updates our error reporting to actually log the chained causes we bothered to put on the errors. It also adds a few more chained causes. Reviewed By: StanislavGlebik Differential Revision: D17315689 fbshipit-source-id: d3b83f73fa06b56b863e23f2f76e78f699af8e36 --- lfs_server/src/batch.rs | 5 ++++- lfs_server/src/download.rs | 6 +++++- lfs_server/src/errors.rs | 8 ++++++++ lfs_server/src/main.rs | 17 ++++++++--------- lfs_server/src/upload.rs | 3 +++ 5 files changed, 28 insertions(+), 11 deletions(-) diff --git a/lfs_server/src/batch.rs b/lfs_server/src/batch.rs index d3998d6dc8..0054b4321e 100644 --- a/lfs_server/src/batch.rs +++ b/lfs_server/src/batch.rs @@ -288,10 +288,13 @@ pub async fn batch(state: &mut State) -> Result<(Body, Mime), HttpError> { .compat() .try_concat() .await + .chain_err(ErrorKind::ClientCancelled) .map_err(HttpError::e400)? .into_bytes(); - let request_batch = serde_json::from_slice::(&body).map_err(HttpError::e400)?; + let request_batch = serde_json::from_slice::(&body) + .chain_err(ErrorKind::InvalidBatch) + .map_err(HttpError::e400)?; let res = match request_batch.operation { Operation::Upload => batch_upload(&ctx, request_batch).await, diff --git a/lfs_server/src/download.rs b/lfs_server/src/download.rs index 70a1af0ced..e91e00f881 100644 --- a/lfs_server/src/download.rs +++ b/lfs_server/src/download.rs @@ -4,6 +4,7 @@ // This software may be used and distributed according to the terms of the // GNU General Public License version 2 or any later version. +use failure_ext::chain::ChainExt; use futures_preview::compat::Future01CompatExt; use gotham::state::State; use gotham_derive::{StateData, StaticResponseExtender}; @@ -31,7 +32,9 @@ pub async fn download(state: &mut State) -> Result<(Body, mime::Mime), HttpError let ctx = RequestContext::instantiate(state, repository.clone()).map_err(HttpError::e400)?; - let content_id = ContentId::from_str(&content_id).map_err(HttpError::e400)?; + let content_id = ContentId::from_str(&content_id) + .chain_err(ErrorKind::InvalidContentId) + .map_err(HttpError::e400)?; // Query a stream out of the Filestore let stream = filestore::fetch( @@ -41,6 +44,7 @@ pub async fn download(state: &mut State) -> Result<(Body, mime::Mime), HttpError ) .compat() .await + .chain_err(ErrorKind::FilestoreReadFailure) .map_err(HttpError::e500)?; // Return a 404 if the stream doesn't exist. diff --git a/lfs_server/src/errors.rs b/lfs_server/src/errors.rs index d176056090..c11dc1be12 100644 --- a/lfs_server/src/errors.rs +++ b/lfs_server/src/errors.rs @@ -61,4 +61,12 @@ pub enum ErrorKind { GenerateDownloadUrisError, #[fail(display = "Could not generate upload URIs")] GenerateUploadUrisError, + #[fail(display = "Could not parse Request Batch")] + InvalidBatch, + #[fail(display = "Could not parse Content ID")] + InvalidContentId, + #[fail(display = "Could not access Filestore for reads")] + FilestoreReadFailure, + #[fail(display = "Could not access Filestore for writes")] + FilestoreWriteFailure, } diff --git a/lfs_server/src/main.rs b/lfs_server/src/main.rs index 3697babdbc..626a4f8b0f 100644 --- a/lfs_server/src/main.rs +++ b/lfs_server/src/main.rs @@ -26,10 +26,12 @@ use gotham::{ state::{request_id, State}, }; use hyper::{header::HeaderValue, Body, Response, StatusCode}; +use itertools::Itertools; use mime::Mime; use scuba::ScubaSampleBuilder; use slog::warn; use std::collections::HashMap; +use std::iter; use std::net::ToSocketAddrs; use tokio::net::TcpListener; use tokio_openssl::SslAcceptorExt; @@ -57,13 +59,6 @@ mod upload; #[macro_use] mod http; -// TODO: left to do here: -// - HTTPS -// - Logging -// - VIP-level routing (won't happen in this code base, though) -// - Verify that we are talking HTTP/2 to upstream -// - Make upstream optional for tests? - const ARG_SELF_URL: &str = "self-url"; const ARG_UPSTREAM_URL: &str = "upstream-url"; const ARG_LISTEN_HOST: &str = "listen-host"; @@ -83,14 +78,18 @@ async fn build_response( Err(error) => { let HttpError { error, status_code } = error; + let error_message = iter::once(error.to_string()) + .chain(error.iter_causes().map(|c| c.to_string())) + .join(": "); + let res = ResponseError { - message: error.to_string(), + message: error_message.clone(), documentation_url: None, request_id: Some(request_id(&state).to_string()), }; if let Some(log_ctx) = state.try_borrow_mut::() { - log_ctx.set_error_msg(error.to_string()); + log_ctx.set_error_msg(error_message); } // Bail if we can't convert the response to json. diff --git a/lfs_server/src/upload.rs b/lfs_server/src/upload.rs index 424ca4bb09..f3fc2ad582 100644 --- a/lfs_server/src/upload.rs +++ b/lfs_server/src/upload.rs @@ -6,6 +6,7 @@ use bytes::Bytes; use failure::Error; +use futures::Future; use futures_preview::{ channel::mpsc::channel, compat::Future01CompatExt, compat::Stream01CompatExt, future::ready, SinkExt, Stream, StreamExt, TryStreamExt, @@ -154,6 +155,8 @@ pub async fn upload(state: &mut State) -> Result<(Body, Mime), HttpError> { &StoreRequest::with_sha256(size, oid), internal_recv.compat(), ) + .chain_err(ErrorKind::FilestoreWriteFailure) + .map_err(Error::from) .compat(); let upstream_upload = upstream_upload(&ctx, oid, size, upstream_recv);