From 47703694d17f191e170d3112d8768fe29c70fe0b Mon Sep 17 00:00:00 2001 From: Syfaro Date: Wed, 21 Apr 2021 21:46:10 -0400 Subject: [PATCH] More logging updates. --- Cargo.lock | 23 ++++++----- fuzzysearch-common/Cargo.toml | 1 + fuzzysearch-common/src/lib.rs | 13 +++++++ fuzzysearch-ingest-e621/Cargo.toml | 2 +- fuzzysearch-ingest-e621/src/main.rs | 45 ++++++++++------------ fuzzysearch-ingest-furaffinity/Cargo.toml | 1 - fuzzysearch-ingest-furaffinity/src/main.rs | 9 +---- fuzzysearch-ingest-weasyl/Cargo.toml | 2 +- fuzzysearch-ingest-weasyl/src/main.rs | 36 ++++++++--------- fuzzysearch-webhook/Cargo.toml | 1 - fuzzysearch-webhook/src/main.rs | 9 +---- 11 files changed, 65 insertions(+), 77 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2ec2fc9..071ecd9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -131,9 +131,9 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "backtrace" -version = "0.3.56" +version = "0.3.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" +checksum = "78ed203b9ba68b242c62b3fb7480f589dd49829be1edb3fe8fc8b4ffda2dcb8d" dependencies = [ "addr2line", "cfg-if 1.0.0", @@ -923,6 +923,7 @@ dependencies = [ "tempfile", "tokio", "tracing", + "tracing-subscriber", ] [[package]] @@ -944,7 +945,7 @@ dependencies = [ "sqlx", "tokio", "tracing", - "tracing-subscriber", + "tracing-unwrap", ] [[package]] @@ -969,7 +970,6 @@ dependencies = [ "tokio", "tokio-postgres", "tracing", - "tracing-subscriber", "tracing-unwrap", ] @@ -988,7 +988,7 @@ dependencies = [ "sqlx", "tokio", "tracing", - "tracing-subscriber", + "tracing-unwrap", ] [[package]] @@ -1004,7 +1004,6 @@ dependencies = [ "serde_json", "thiserror", "tracing", - "tracing-subscriber", "tracing-unwrap", ] @@ -1257,15 +1256,15 @@ checksum = "4a1ce40d6fc9764887c2fdc7305c3dcc429ba11ff981c1509416afd5697e4437" [[package]] name = "httpdate" -version = "0.3.2" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +checksum = "05842d0d43232b23ccb7060ecb0f0626922c21f30012e97b767b30afd4a5d4b9" [[package]] name = "hyper" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf09f61b52cfcf4c00de50df88ae423d6c02354e385a86341133b5338630ad1" +checksum = "5f006b8784cfb01fe7aa9c46f5f5cd4cf5c85a8c612a0653ec97642979062665" dependencies = [ "bytes", "futures-channel", @@ -2947,9 +2946,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.69" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb" +checksum = "b9505f307c872bab8eb46f77ae357c8eba1fdacead58ee5a850116b1d7f82883" dependencies = [ "proc-macro2", "quote", diff --git a/fuzzysearch-common/Cargo.toml b/fuzzysearch-common/Cargo.toml index 59e3528..32021b8 100644 --- a/fuzzysearch-common/Cargo.toml +++ b/fuzzysearch-common/Cargo.toml @@ -13,6 +13,7 @@ queue = ["faktory", "tokio", "serde_json"] [dependencies] anyhow = "1" tracing = "0.1" +tracing-subscriber = "0.2" serde = { version = "1", features = ["derive"] } base64 = "0.13" diff --git a/fuzzysearch-common/src/lib.rs b/fuzzysearch-common/src/lib.rs index 208e145..e0ec059 100644 --- a/fuzzysearch-common/src/lib.rs +++ b/fuzzysearch-common/src/lib.rs @@ -14,3 +14,16 @@ pub fn get_hasher() -> img_hash::Hasher<[u8; 8]> { .preproc_dct() .to_hasher() } + +/// Initialize the logger. This should only be called by the running binary. +pub fn init_logger() { + if matches!(std::env::var("LOG_FMT").as_deref(), Ok("json")) { + tracing_subscriber::fmt::Subscriber::builder() + .json() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .with_timer(tracing_subscriber::fmt::time::ChronoUtc::rfc3339()) + .init(); + } else { + tracing_subscriber::fmt::init(); + } +} diff --git a/fuzzysearch-ingest-e621/Cargo.toml b/fuzzysearch-ingest-e621/Cargo.toml index fb6ddfe..fc4aed4 100644 --- a/fuzzysearch-ingest-e621/Cargo.toml +++ b/fuzzysearch-ingest-e621/Cargo.toml @@ -21,7 +21,7 @@ img_hash = "3" sha2 = "0.9" tracing = "0.1" -tracing-subscriber = "0.2" +tracing-unwrap = "0.9" anyhow = "1" diff --git a/fuzzysearch-ingest-e621/src/main.rs b/fuzzysearch-ingest-e621/src/main.rs index 608f4f0..19b1349 100644 --- a/fuzzysearch-ingest-e621/src/main.rs +++ b/fuzzysearch-ingest-e621/src/main.rs @@ -2,6 +2,7 @@ use anyhow::Context; use lazy_static::lazy_static; use prometheus::{register_histogram, register_int_gauge, Histogram, IntGauge}; use sqlx::Connection; +use tracing_unwrap::ResultExt; use fuzzysearch_common::faktory::FaktoryClient; @@ -12,50 +13,44 @@ lazy_static! { "fuzzysearch_watcher_e621_submission_backlog", "Number of submissions behind the latest ID" ) - .unwrap(); + .unwrap_or_log(); static ref INDEX_DURATION: Histogram = register_histogram!( "fuzzysearch_watcher_e621_index_duration", "Duration to load an index of submissions" ) - .unwrap(); + .unwrap_or_log(); static ref SUBMISSION_DURATION: Histogram = register_histogram!( "fuzzysearch_watcher_e621_submission_duration", "Duration to ingest a submission" ) - .unwrap(); + .unwrap_or_log(); } type Auth = (String, Option); #[tokio::main] async fn main() -> anyhow::Result<()> { - if matches!(std::env::var("LOG_FMT").as_deref(), Ok("json")) { - tracing_subscriber::fmt::Subscriber::builder() - .json() - .with_timer(tracing_subscriber::fmt::time::ChronoUtc::rfc3339()) - .init(); - } else { - tracing_subscriber::fmt::init(); - } + fuzzysearch_common::init_logger(); create_metrics_server().await; - let login = std::env::var("E621_LOGIN").expect("Missing E621_LOGIN"); - let api_key = std::env::var("E621_API_KEY").expect("Missing E621_API_KEY"); + let login = std::env::var("E621_LOGIN").expect_or_log("Missing E621_LOGIN"); + let api_key = std::env::var("E621_API_KEY").expect_or_log("Missing E621_API_KEY"); let auth = (login, Some(api_key)); let client = reqwest::ClientBuilder::default() .user_agent(USER_AGENT) .build()?; - let mut conn = - sqlx::PgConnection::connect(&std::env::var("DATABASE_URL").expect("Missing DATABASE_URL")) - .await?; + let mut conn = sqlx::PgConnection::connect( + &std::env::var("DATABASE_URL").expect_or_log("Missing DATABASE_URL"), + ) + .await?; - let faktory_dsn = std::env::var("FAKTORY_URL").expect("Missing FAKTORY_URL"); + let faktory_dsn = std::env::var("FAKTORY_URL").expect_or_log("Missing FAKTORY_URL"); let faktory = FaktoryClient::connect(faktory_dsn) .await - .expect("Unable to connect to Faktory"); + .expect_or_log("Unable to connect to Faktory"); let max_id: i32 = sqlx::query!("SELECT max(id) max FROM e621") .fetch_one(&mut conn) @@ -79,7 +74,7 @@ async fn main() -> anyhow::Result<()> { let _hist = INDEX_DURATION.start_timer(); let lid = get_latest_id(&client, &auth) .await - .expect("Unable to get latest ID"); + .expect_or_log("Unable to get latest ID"); drop(_hist); latest_id = Some(lid); @@ -100,7 +95,7 @@ async fn main() -> anyhow::Result<()> { min_id = match post_ids.iter().max() { Some(id) => *id, None => { - tracing::warn!("Found no new posts, sleeping"); + tracing::info!("Found no new posts, sleeping"); tokio::time::sleep(std::time::Duration::from_secs(60 * 5)).await; continue; } @@ -359,7 +354,9 @@ async fn provide_metrics( let encoder = TextEncoder::new(); let metric_families = prometheus::gather(); - encoder.encode(&metric_families, &mut buffer).unwrap(); + encoder + .encode(&metric_families, &mut buffer) + .unwrap_or_log(); Ok(Response::new(Body::from(buffer))) } @@ -376,11 +373,11 @@ async fn create_metrics_server() { make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(provide_metrics)) }); let addr: SocketAddr = std::env::var("METRICS_HOST") - .expect("Missing METRICS_HOST") + .expect_or_log("Missing METRICS_HOST") .parse() - .expect("Invalid METRICS_HOST"); + .expect_or_log("Invalid METRICS_HOST"); let server = Server::bind(&addr).serve(make_svc); - tokio::spawn(async move { server.await.expect("Metrics server error") }); + tokio::spawn(async move { server.await.expect_or_log("Metrics server error") }); } diff --git a/fuzzysearch-ingest-furaffinity/Cargo.toml b/fuzzysearch-ingest-furaffinity/Cargo.toml index 7500531..febd753 100644 --- a/fuzzysearch-ingest-furaffinity/Cargo.toml +++ b/fuzzysearch-ingest-furaffinity/Cargo.toml @@ -17,7 +17,6 @@ prometheus = { version = "0.12", features = ["process"] } lazy_static = "1" futures-retry = "0.6" tracing = "0.1" -tracing-subscriber = "0.2" tracing-unwrap = "0.9" faktory = "0.11" anyhow = "1" diff --git a/fuzzysearch-ingest-furaffinity/src/main.rs b/fuzzysearch-ingest-furaffinity/src/main.rs index 9eab1cf..44f1a2d 100644 --- a/fuzzysearch-ingest-furaffinity/src/main.rs +++ b/fuzzysearch-ingest-furaffinity/src/main.rs @@ -270,14 +270,7 @@ async fn process_submission( #[tokio::main] async fn main() { - if matches!(std::env::var("LOG_FMT").as_deref(), Ok("json")) { - tracing_subscriber::fmt::Subscriber::builder() - .json() - .with_timer(tracing_subscriber::fmt::time::ChronoUtc::rfc3339()) - .init(); - } else { - tracing_subscriber::fmt::init(); - } + fuzzysearch_common::init_logger(); let (cookie_a, cookie_b) = ( std::env::var("FA_A").expect_or_log("Missing FA_A"), diff --git a/fuzzysearch-ingest-weasyl/Cargo.toml b/fuzzysearch-ingest-weasyl/Cargo.toml index 52e90ae..f4832c4 100644 --- a/fuzzysearch-ingest-weasyl/Cargo.toml +++ b/fuzzysearch-ingest-weasyl/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" anyhow = "1" tracing = "0.1" -tracing-subscriber = "0.2" +tracing-unwrap = "0.9" reqwest = { version = "0.11", features = ["json"] } tokio = { version = "1", features = ["full"] } diff --git a/fuzzysearch-ingest-weasyl/src/main.rs b/fuzzysearch-ingest-weasyl/src/main.rs index 7bee1bd..c07903f 100644 --- a/fuzzysearch-ingest-weasyl/src/main.rs +++ b/fuzzysearch-ingest-weasyl/src/main.rs @@ -1,5 +1,6 @@ use serde::{Deserialize, Serialize}; use sha2::{Digest, Sha256}; +use tracing_unwrap::{OptionExt, ResultExt}; use fuzzysearch_common::faktory::FaktoryClient; @@ -130,7 +131,7 @@ async fn process_submission( tracing::debug!("Processing submission"); let data = client - .get(&sub.media.submission.first().unwrap().url) + .get(&sub.media.submission.first().unwrap_or_log().url) .send() .await? .bytes() @@ -158,7 +159,7 @@ async fn process_submission( site: fuzzysearch_common::types::Site::Weasyl, site_id: sub.id, artist: sub.owner_login.clone(), - file_url: sub.media.submission.first().unwrap().url.clone(), + file_url: sub.media.submission.first().unwrap_or_log().url.clone(), file_sha256: Some(result.to_vec()), hash: num.map(|hash| hash.to_be_bytes()), }) @@ -195,39 +196,32 @@ async fn insert_null( #[tokio::main] async fn main() { - if matches!(std::env::var("LOG_FMT").as_deref(), Ok("json")) { - tracing_subscriber::fmt::Subscriber::builder() - .json() - .with_timer(tracing_subscriber::fmt::time::ChronoUtc::rfc3339()) - .init(); - } else { - tracing_subscriber::fmt::init(); - } + fuzzysearch_common::init_logger(); - let api_key = std::env::var("WEASYL_APIKEY").unwrap(); + let api_key = std::env::var("WEASYL_APIKEY").unwrap_or_log(); let pool = sqlx::postgres::PgPoolOptions::new() .max_connections(2) - .connect(&std::env::var("DATABASE_URL").unwrap()) + .connect(&std::env::var("DATABASE_URL").unwrap_or_log()) .await - .unwrap(); + .unwrap_or_log(); let client = reqwest::Client::new(); - let faktory_dsn = std::env::var("FAKTORY_URL").expect("Missing FAKTORY_URL"); + let faktory_dsn = std::env::var("FAKTORY_URL").expect_or_log("Missing FAKTORY_URL"); let faktory = FaktoryClient::connect(faktory_dsn) .await - .expect("Unable to connect to Faktory"); + .expect_or_log("Unable to connect to Faktory"); loop { let min = sqlx::query!("SELECT max(id) id FROM weasyl") .fetch_one(&pool) .await - .unwrap() + .unwrap_or_log() .id .unwrap_or_default(); - let max = load_frontpage(&client, &api_key).await.unwrap(); + let max = load_frontpage(&client, &api_key).await.unwrap_or_log(); tracing::info!(min, max, "Calculated range of submissions to check"); @@ -235,16 +229,16 @@ async fn main() { let row: Option<_> = sqlx::query!("SELECT id FROM weasyl WHERE id = $1", id) .fetch_optional(&pool) .await - .unwrap(); + .unwrap_or_log(); if row.is_some() { continue; } - match load_submission(&client, &api_key, id).await.unwrap() { + match load_submission(&client, &api_key, id).await.unwrap_or_log() { (Some(sub), json) => process_submission(&pool, &client, &faktory, json, sub) .await - .unwrap(), - (None, body) => insert_null(&pool, body, id).await.unwrap(), + .unwrap_or_log(), + (None, body) => insert_null(&pool, body, id).await.unwrap_or_log(), } } diff --git a/fuzzysearch-webhook/Cargo.toml b/fuzzysearch-webhook/Cargo.toml index d5b0205..fd90146 100644 --- a/fuzzysearch-webhook/Cargo.toml +++ b/fuzzysearch-webhook/Cargo.toml @@ -6,7 +6,6 @@ edition = "2018" [dependencies] tracing = "0.1" -tracing-subscriber = "0.2" tracing-unwrap = "0.9" thiserror = "1" diff --git a/fuzzysearch-webhook/src/main.rs b/fuzzysearch-webhook/src/main.rs index 4a09f55..353e611 100644 --- a/fuzzysearch-webhook/src/main.rs +++ b/fuzzysearch-webhook/src/main.rs @@ -25,14 +25,7 @@ pub enum WebhookError { } fn main() { - if matches!(std::env::var("LOG_FMT").as_deref(), Ok("json")) { - tracing_subscriber::fmt::Subscriber::builder() - .json() - .with_timer(tracing_subscriber::fmt::time::ChronoUtc::rfc3339()) - .init(); - } else { - tracing_subscriber::fmt::init(); - } + fuzzysearch_common::init_logger(); tracing::info!("Starting...");