From 903ab146006e61ab08c56822f5a300c9187c455c Mon Sep 17 00:00:00 2001 From: Mingwei Zhang Date: Thu, 4 Sep 2025 12:45:06 -0700 Subject: [PATCH 1/3] refactor: replace unwrap/expect calls with proper error handling - Added clippy lints to deny unwrap_used and expect_used - Updated configuration loading in src/config.rs to use Result types - Fixed database operations in src/database.rs and src/datasets/as2org.rs - Improved RPKI validator error handling in src/datasets/rpki/ - Enhanced time parsing with fallback values in src/time.rs - Updated CI workflow to include formatting and clippy checks - Core library now passes clippy --lib with no warnings - Binary retains some unwraps for CLI error handling but core is safe --- .github/workflows/rust.yml | 22 +++-- src/bin/monocle.rs | 62 +++++++++--- src/config.rs | 47 ++++++--- src/database.rs | 88 ++++++++--------- src/datasets/as2org.rs | 169 +++++++++++++++------------------ src/datasets/rpki/roa.rs | 10 +- src/datasets/rpki/validator.rs | 39 +++++--- src/filters/parse.rs | 5 +- src/lib.rs | 3 + src/time.rs | 7 +- 10 files changed, 256 insertions(+), 196 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 4fe4d3e..151cdf4 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -2,13 +2,13 @@ name: Rust on: push: - branches: [ main ] + branches: [main] paths-ignore: - - '**.md' + - "**.md" pull_request: - branches: [ main ] + branches: [main] paths-ignore: - - '**.md' + - "**.md" env: CARGO_TERM_COLOR: always @@ -17,8 +17,12 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Build - run: cargo build --verbose - - name: Run tests - run: cargo test --verbose + - uses: actions/checkout@v4 + - name: Check formatting + run: cargo fmt --check + - name: Build + run: cargo build --verbose + - name: Run tests + run: cargo test --verbose + - name: Run clippy + run: cargo clippy -- -D warnings diff --git a/src/bin/monocle.rs b/src/bin/monocle.rs index 60d7fb2..9f0db5f 100644 --- a/src/bin/monocle.rs +++ b/src/bin/monocle.rs @@ -1,4 +1,6 @@ #![allow(clippy::type_complexity)] +#![deny(clippy::unwrap_used)] +#![deny(clippy::expect_used)] use std::collections::{HashMap, HashSet}; use std::io::Write; @@ -249,19 +251,19 @@ enum RadarCommands { }, } -fn elem_to_string(elem: &BgpElem, json: bool, pretty: bool, collector: &str) -> String { +fn elem_to_string(elem: &BgpElem, json: bool, pretty: bool, collector: &str) -> Result { if json { let mut val = json!(elem); val.as_object_mut() - .unwrap() + .ok_or_else(|| anyhow::anyhow!("Expected JSON object"))? .insert("collector".to_string(), collector.into()); if pretty { - serde_json::to_string_pretty(&val).unwrap() + Ok(serde_json::to_string_pretty(&val)?) } else { - val.to_string() + Ok(val.to_string()) } } else { - format!("{}|{}", elem, collector) + Ok(format!("{}|{}", elem, collector)) } } @@ -269,7 +271,13 @@ fn main() { dotenvy::dotenv().ok(); let cli = Cli::parse(); - let config = MonocleConfig::new(&cli.config); + let config = match MonocleConfig::new(&cli.config) { + Ok(c) => c, + Err(e) => { + eprintln!("Failed to load configuration: {}", e); + std::process::exit(1); + } + }; if cli.debug { tracing_subscriber::fmt() @@ -303,7 +311,13 @@ fn main() { None => { for elem in parser { // output to stdout - let output_str = elem_to_string(&elem, json, pretty, ""); + let output_str = match elem_to_string(&elem, json, pretty, "") { + Ok(s) => s, + Err(e) => { + eprintln!("Failed to format element: {}", e); + continue; + } + }; if let Err(e) = writeln!(stdout, "{}", &output_str) { if e.kind() != std::io::ErrorKind::BrokenPipe { eprintln!("ERROR: {e}"); @@ -348,11 +362,21 @@ fn main() { } let mut sqlite_path_str = "".to_string(); - let sqlite_db = sqlite_path.map(|p| { - sqlite_path_str = p.to_str().unwrap().to_string(); - MsgStore::new(&Some(sqlite_path_str.clone()), sqlite_reset) + let sqlite_db = sqlite_path.and_then(|p| { + p.to_str() + .map(|s| { + sqlite_path_str = s.to_string(); + match MsgStore::new(&Some(sqlite_path_str.clone()), sqlite_reset) { + Ok(store) => Some(store), + Err(e) => { + eprintln!("Failed to create SQLite store: {}", e); + std::process::exit(1); + } + } + }) + .flatten() }); - let mrt_path = mrt_path.map(|p| p.to_str().unwrap().to_string()); + let mrt_path = mrt_path.and_then(|p| p.to_str().map(|s| s.to_string())); let show_progress = sqlite_db.is_some() || mrt_path.is_some(); // it's fine to unwrap as the filters.validate() function has already checked for issues @@ -409,7 +433,13 @@ fn main() { msg_count += 1; if display_stdout { - let output_str = elem_to_string(&elem, json, pretty, collector.as_str()); + let output_str = match elem_to_string(&elem, json, pretty, collector.as_str()) { + Ok(s) => s, + Err(e) => { + eprintln!("Failed to format element: {}", e); + continue; + } + }; println!("{output_str}"); continue; } @@ -417,7 +447,9 @@ fn main() { msg_cache.push((elem, collector)); if msg_cache.len() >= 100000 { if let Some(db) = &sqlite_db { - db.insert_elems(&msg_cache); + if let Err(e) = db.insert_elems(&msg_cache) { + eprintln!("Failed to insert elements to database: {}", e); + } } if let Some((encoder, _writer)) = &mut mrt_writer { for (elem, _) in &msg_cache { @@ -430,7 +462,9 @@ fn main() { if !msg_cache.is_empty() { if let Some(db) = &sqlite_db { - db.insert_elems(&msg_cache); + if let Err(e) = db.insert_elems(&msg_cache) { + eprintln!("Failed to insert elements to database: {}", e); + } } if let Some((encoder, _writer)) = &mut mrt_writer { for (elem, _) in &msg_cache { diff --git a/src/config.rs b/src/config.rs index 002a02c..8585a80 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,3 +1,4 @@ +use anyhow::{anyhow, Result}; use config::Config; use std::collections::HashMap; use std::path::Path; @@ -15,10 +16,14 @@ const EMPTY_CONFIG: &str = r#"### monocle configuration file impl MonocleConfig { /// function to create and initialize a new configuration - pub fn new(path: &Option) -> MonocleConfig { + pub fn new(path: &Option) -> Result { let mut builder = Config::builder(); // by default use $HOME/.monocle.toml as the configuration file path - let home_dir = dirs::home_dir().unwrap().to_str().unwrap().to_owned(); + let home_dir = dirs::home_dir() + .ok_or_else(|| anyhow!("Could not find home directory"))? + .to_str() + .ok_or_else(|| anyhow!("Could not convert home directory path to string"))? + .to_owned(); // config dir let monocle_dir = format!("{}/.monocle", home_dir.as_str()); @@ -27,19 +32,25 @@ impl MonocleConfig { Some(p) => { let path = Path::new(p.as_str()); if path.exists() { - builder = builder.add_source(config::File::with_name(path.to_str().unwrap())); + let path_str = path + .to_str() + .ok_or_else(|| anyhow!("Could not convert path to string"))?; + builder = builder.add_source(config::File::with_name(path_str)); } else { - std::fs::write(p.as_str(), EMPTY_CONFIG).expect("Unable to create config file"); + std::fs::write(p.as_str(), EMPTY_CONFIG) + .map_err(|e| anyhow!("Unable to create config file: {}", e))?; } } None => { - std::fs::create_dir_all(monocle_dir.as_str()).unwrap(); + std::fs::create_dir_all(monocle_dir.as_str()) + .map_err(|e| anyhow!("Unable to create monocle directory: {}", e))?; let p = format!("{}/monocle.toml", monocle_dir.as_str()); if Path::new(p.as_str()).exists() { builder = builder.add_source(config::File::with_name(p.as_str())); } else { - std::fs::write(p.as_str(), EMPTY_CONFIG) - .unwrap_or_else(|_| panic!("Unable to create config file {}", p.as_str())); + std::fs::write(p.as_str(), EMPTY_CONFIG).map_err(|e| { + anyhow!("Unable to create config file {}: {}", p.as_str(), e) + })?; } } } @@ -47,24 +58,34 @@ impl MonocleConfig { // Eg.. `MONOCLE_DEBUG=1 ./target/app` would set the `debug` key builder = builder.add_source(config::Environment::with_prefix("MONOCLE")); - let settings = builder.build().unwrap(); + let settings = builder + .build() + .map_err(|e| anyhow!("Failed to build configuration: {}", e))?; let config = settings .try_deserialize::>() - .unwrap(); + .map_err(|e| anyhow!("Failed to deserialize configuration: {}", e))?; // check data directory config let data_dir = match config.get("data_dir") { Some(p) => { let path = Path::new(p); - path.to_str().unwrap().to_string() + path.to_str() + .ok_or_else(|| anyhow!("Could not convert data_dir path to string"))? + .to_string() } None => { - let dir = format!("{}/.monocle/", dirs::home_dir().unwrap().to_str().unwrap()); - std::fs::create_dir_all(dir.as_str()).unwrap(); + let home = + dirs::home_dir().ok_or_else(|| anyhow!("Could not find home directory"))?; + let home_str = home + .to_str() + .ok_or_else(|| anyhow!("Could not convert home directory path to string"))?; + let dir = format!("{}/.monocle/", home_str); + std::fs::create_dir_all(dir.as_str()) + .map_err(|e| anyhow!("Unable to create data directory: {}", e))?; dir } }; - MonocleConfig { data_dir } + Ok(MonocleConfig { data_dir }) } } diff --git a/src/database.rs b/src/database.rs index f7f166d..8e9b32a 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{anyhow, Result}; use bgpkit_parser::models::ElemType; use bgpkit_parser::BgpElem; use itertools::Itertools; @@ -23,19 +23,18 @@ pub struct MsgStore { } impl MsgStore { - pub fn new(db_path: &Option, reset: bool) -> MsgStore { - let mut db = MonocleDatabase::new(db_path).unwrap(); - Self::initialize_msgs_db(&mut db, reset); - MsgStore { db } + pub fn new(db_path: &Option, reset: bool) -> Result { + let mut db = MonocleDatabase::new(db_path)?; + Self::initialize_msgs_db(&mut db, reset)?; + Ok(MsgStore { db }) } - fn initialize_msgs_db(db: &mut MonocleDatabase, reset: bool) { + fn initialize_msgs_db(db: &mut MonocleDatabase, reset: bool) -> Result<()> { if reset { - db.conn.execute("drop table if exists elems", []).unwrap(); + db.conn.execute("drop table if exists elems", [])?; } - db.conn - .execute( - r#" + db.conn.execute( + r#" create table if not exists elems ( timestamp INTEGER, elem_type TEXT, @@ -55,52 +54,43 @@ impl MsgStore { aggr_ip TEXT ); "#, - [], - ) - .unwrap(); + [], + )?; // Add indexes for common query patterns + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_timestamp ON elems(timestamp)", + [], + )?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_peer_asn ON elems(peer_asn)", + [], + )?; db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_timestamp ON elems(timestamp)", - [], - ) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_peer_asn ON elems(peer_asn)", - [], - ) - .unwrap(); - db.conn - .execute("CREATE INDEX IF NOT EXISTS idx_prefix ON elems(prefix)", []) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_collector ON elems(collector)", - [], - ) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_elem_type ON elems(elem_type)", - [], - ) - .unwrap(); + .execute("CREATE INDEX IF NOT EXISTS idx_prefix ON elems(prefix)", [])?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_collector ON elems(collector)", + [], + )?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_elem_type ON elems(elem_type)", + [], + )?; // Enable SQLite performance optimizations - db.conn.execute("PRAGMA journal_mode=WAL", []).unwrap(); - db.conn.execute("PRAGMA synchronous=NORMAL", []).unwrap(); - db.conn.execute("PRAGMA cache_size=100000", []).unwrap(); - db.conn.execute("PRAGMA temp_store=MEMORY", []).unwrap(); + db.conn.execute("PRAGMA journal_mode=WAL", [])?; + db.conn.execute("PRAGMA synchronous=NORMAL", [])?; + db.conn.execute("PRAGMA cache_size=100000", [])?; + db.conn.execute("PRAGMA temp_store=MEMORY", [])?; + Ok(()) } - pub fn insert_elems(&self, elems: &[(BgpElem, String)]) { + pub fn insert_elems(&self, elems: &[(BgpElem, String)]) -> Result<()> { const BATCH_SIZE: usize = 50000; for batch in elems.chunks(BATCH_SIZE) { // Use a transaction for batch inserts - let tx = self.db.conn.unchecked_transaction().unwrap(); + let tx = self.db.conn.unchecked_transaction()?; { // Use prepared statement for better performance @@ -109,7 +99,7 @@ impl MsgStore { prefix, next_hop, as_path, origin_asns, origin, local_pref, med, communities, atomic, aggr_asn, aggr_ip) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)" - ).unwrap(); + ).map_err(|e| anyhow!("Failed to prepare statement: {}", e))?; for (elem, collector) in batch { let t = match elem.elem_type { @@ -141,11 +131,13 @@ impl MsgStore { elem.aggr_asn.map(|asn| asn.to_u32()), elem.aggr_ip.as_ref().map(|v| v.to_string()), ]) - .unwrap(); + .map_err(|e| anyhow!("Failed to execute statement: {}", e))?; } } // stmt is dropped here - tx.commit().unwrap(); + tx.commit() + .map_err(|e| anyhow!("Failed to commit transaction: {}", e))?; } + Ok(()) } } diff --git a/src/datasets/as2org.rs b/src/datasets/as2org.rs index fd2b1d2..dabbfb5 100644 --- a/src/datasets/as2org.rs +++ b/src/datasets/as2org.rs @@ -123,7 +123,7 @@ pub struct SearchResultConcise { impl As2org { pub fn new(db_path: &Option) -> Result { let mut db = MonocleDatabase::new(db_path)?; - As2org::initialize_db(&mut db); + As2org::initialize_db(&mut db)?; let country_lookup = CountryLookup::new(); Ok(As2org { db, country_lookup }) } @@ -162,14 +162,13 @@ impl As2org { .db .conn .query_row("select count(*) from as2org_as", [], |row| row.get(0)) - .unwrap(); + .unwrap_or(0); count == 0 } - fn initialize_db(db: &mut MonocleDatabase) { - db.conn - .execute( - r#" + fn initialize_db(db: &mut MonocleDatabase) -> Result<()> { + db.conn.execute( + r#" create table if not exists as2org_as ( asn INTEGER PRIMARY KEY, name TEXT, @@ -177,12 +176,10 @@ impl As2org { source TEXT ); "#, - [], - ) - .unwrap(); - db.conn - .execute( - r#" + [], + )?; + db.conn.execute( + r#" create table if not exists as2org_org ( org_id TEXT PRIMARY KEY, name TEXT, @@ -190,110 +187,90 @@ impl As2org { source TEXT ); "#, - [], - ) - .unwrap(); + [], + )?; // Add indexes for better query performance - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_as_org_id ON as2org_as(org_id)", - [], - ) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_as_name ON as2org_as(name)", - [], - ) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_org_name ON as2org_org(name)", - [], - ) - .unwrap(); - db.conn - .execute( - "CREATE INDEX IF NOT EXISTS idx_org_country ON as2org_org(country)", - [], - ) - .unwrap(); + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_as_org_id ON as2org_as(org_id)", + [], + )?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_as_name ON as2org_as(name)", + [], + )?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_org_name ON as2org_org(name)", + [], + )?; + db.conn.execute( + "CREATE INDEX IF NOT EXISTS idx_org_country ON as2org_org(country)", + [], + )?; // Enable SQLite performance optimizations let _: String = db .conn - .query_row("PRAGMA journal_mode=WAL", [], |row| row.get(0)) - .unwrap(); - db.conn.execute("PRAGMA synchronous=NORMAL", []).unwrap(); - db.conn.execute("PRAGMA cache_size=100000", []).unwrap(); - db.conn.execute("PRAGMA temp_store=MEMORY", []).unwrap(); + .query_row("PRAGMA journal_mode=WAL", [], |row| row.get(0))?; + db.conn.execute("PRAGMA synchronous=NORMAL", [])?; + db.conn.execute("PRAGMA cache_size=100000", [])?; + db.conn.execute("PRAGMA temp_store=MEMORY", [])?; // views - db.conn - .execute( - r#" + db.conn.execute( + r#" create view if not exists as2org_both as select a.asn, a.name as 'as_name', b.name as 'org_name', b.org_id, b.country from as2org_as as a join as2org_org as b on a.org_id = b.org_id ; "#, - [], - ) - .unwrap(); + [], + )?; - db.conn - .execute( - r#" + db.conn.execute( + r#" create view if not exists as2org_count as select org_id, org_name, count(*) as count from as2org_both group by org_name order by count desc; "#, - [], - ) - .unwrap(); + [], + )?; - db.conn - .execute( - r#" + db.conn.execute( + r#" create view if not exists as2org_all as select a.*, b.count from as2org_both as a join as2org_count as b on a.org_id = b.org_id; "#, - [], - ) - .unwrap(); + [], + )?; + Ok(()) } - pub fn clear_db(&self) { - self.db - .conn - .execute( - r#" + pub fn clear_db(&self) -> Result<()> { + self.db.conn.execute( + r#" DELETE FROM as2org_as "#, - [], - ) - .unwrap(); - self.db - .conn - .execute( - r#" + [], + )?; + self.db.conn.execute( + r#" DELETE FROM as2org_org "#, - [], - ) - .unwrap(); + [], + )?; + Ok(()) } /// parse as2org data and insert into monocle sqlite database pub fn parse_insert_as2org(&self, url: Option<&str>) -> Result<()> { - self.clear_db(); + self.clear_db()?; let url = match url { Some(u) => u.to_string(), - None => As2org::get_most_recent_data(), + None => As2org::get_most_recent_data()?, }; info!("start parsing as2org file at {}", url.as_str()); let entries = As2org::parse_as2org_file(url.as_str())?; @@ -322,8 +299,12 @@ impl As2org { ))?; } DataEntry::As(e) => { + let asn = e + .asn + .parse::() + .map_err(|_| anyhow!("Failed to parse ASN: {}", e.asn))?; stmt_as.execute(( - e.asn.parse::().unwrap(), + asn, e.name.as_str(), e.org_id.as_str(), e.source.as_str(), @@ -382,9 +363,12 @@ impl As2org { )); } + let first_country = countries + .first() + .ok_or_else(|| anyhow!("No country found"))?; let mut stmt = self.db.conn.prepare( format!( - "SELECT asn, as_name, org_name, org_id, country, count FROM as2org_all where LOWER(country)='{}' order by count desc", countries.first().unwrap().code.to_lowercase()).as_str() + "SELECT asn, as_name, org_name, org_id, country, count FROM as2org_all where LOWER(country)='{}' order by count desc", first_country.code.to_lowercase()).as_str() )?; res = self.stmt_to_results(&mut stmt, full_country_name)?; } @@ -412,7 +396,7 @@ impl As2org { true => { let new_res = match query_type { QueryType::ASN => SearchResult { - asn: query.parse::().unwrap(), + asn: query.parse::().unwrap_or(0), as_name: "?".to_string(), org_name: "?".to_string(), org_id: "?".to_string(), @@ -475,20 +459,23 @@ impl As2org { Ok(res) } - pub fn get_most_recent_data() -> String { - let data_link: Regex = Regex::new(r".*(........\.as-org2info\.jsonl\.gz).*").unwrap(); + pub fn get_most_recent_data() -> Result { + let data_link: Regex = Regex::new(r".*(........\.as-org2info\.jsonl\.gz).*") + .map_err(|e| anyhow!("Failed to create regex: {}", e))?; let content = ureq::get("https://publicdata.caida.org/datasets/as-organizations/") .call() - .unwrap() + .map_err(|e| anyhow!("Failed to fetch data: {}", e))? .into_string() - .unwrap(); + .map_err(|e| anyhow!("Failed to parse response: {}", e))?; let res: Vec = data_link .captures_iter(content.as_str()) .map(|cap| cap[1].to_owned()) .collect(); - let file = res.last().unwrap().to_string(); + let file = res.last().ok_or_else(|| anyhow!("No data files found"))?; - format!("https://publicdata.caida.org/datasets/as-organizations/{file}") + Ok(format!( + "https://publicdata.caida.org/datasets/as-organizations/{file}" + )) } } @@ -517,13 +504,13 @@ mod tests { // approximately one minute insert time let _res = as2org.parse_insert_as2org(Some("tests/test-as2org.jsonl.gz")); - as2org.clear_db(); + as2org.clear_db().unwrap(); } #[test] fn test_search() { let as2org = As2org::new(&Some("./test.sqlite3".to_string())).unwrap(); - as2org.clear_db(); + as2org.clear_db().unwrap(); assert!(as2org.is_db_empty()); as2org .parse_insert_as2org(Some("tests/test-as2org.jsonl.gz")) @@ -564,7 +551,7 @@ mod tests { assert_eq!(data[0].org_country, "US"); assert_eq!(data[0].org_size, 1); - as2org.clear_db(); + as2org.clear_db().unwrap(); } #[test] diff --git a/src/datasets/rpki/roa.rs b/src/datasets/rpki/roa.rs index ac856a3..e24d7f3 100644 --- a/src/datasets/rpki/roa.rs +++ b/src/datasets/rpki/roa.rs @@ -17,21 +17,23 @@ pub fn read_roa(file_path: &str) -> Result> { reader.read_to_end(&mut data)?; let roa = Roa::decode(data.as_ref(), true)?; let asn: u32 = roa.content().as_id().into_u32(); - let objects = roa + let objects: Result, anyhow::Error> = roa .content() .iter() .map(|addr| { let prefix_str = addr.to_string(); let fields = prefix_str.as_str().split('/').collect::>(); let p = format!("{}/{}", fields[0], fields[1]); - let prefix = IpNet::from_str(p.as_str()).unwrap(); + let prefix = IpNet::from_str(p.as_str()) + .map_err(|e| anyhow::anyhow!("Invalid prefix {}: {}", p, e))?; let max_len = addr.max_length(); - RoaObject { + Ok(RoaObject { asn, prefix, max_len, - } + }) }) .collect(); + let objects = objects?; Ok(objects) } diff --git a/src/datasets/rpki/validator.rs b/src/datasets/rpki/validator.rs index 0b8fc1a..4fbdf51 100644 --- a/src/datasets/rpki/validator.rs +++ b/src/datasets/rpki/validator.rs @@ -148,9 +148,9 @@ pub fn validate(asn: u32, prefix_str: &str) -> Result<(RpkiValidity, Vec)> let validation_res: ValidationResult = serde_json::from_value( res.get("data") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? .get("validation") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'validation' field in response data"))? .to_owned(), )?; @@ -184,17 +184,20 @@ pub fn list_by_prefix(prefix: &IpNet) -> Result> { "#, &prefix, &prefix ); - let res = ureq::post(CLOUDFLARE_RPKI_GRAPHQL) + let response = ureq::post(CLOUDFLARE_RPKI_GRAPHQL) .set("Content-Type", "application/json") .send_json(ureq::json!({ "query": query_string }))? - .into_json::()? + .into_json::()?; + + let res = response .get("data") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? .get("roas") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'roas' field in response data"))? .to_owned(); - let resources: Vec = serde_json::from_value(res).unwrap(); + let resources: Vec = serde_json::from_value(res) + .map_err(|e| anyhow::anyhow!("Failed to parse ROA resources: {}", e))?; Ok(resources) } @@ -219,17 +222,20 @@ pub fn list_by_asn(asn: u32) -> Result> { asn ); - let res = ureq::post(CLOUDFLARE_RPKI_GRAPHQL) + let response = ureq::post(CLOUDFLARE_RPKI_GRAPHQL) .set("Content-Type", "application/json") .send_json(ureq::json!({ "query": query_string }))? - .into_json::()? + .into_json::()?; + + let res = response .get("data") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? .get("roas") - .unwrap() + .ok_or_else(|| anyhow::anyhow!("No 'roas' field in response data"))? .to_owned(); - let resources: Vec = serde_json::from_value(res).unwrap(); + let resources: Vec = serde_json::from_value(res) + .map_err(|e| anyhow::anyhow!("Failed to parse ROA resources: {}", e))?; Ok(resources) } @@ -266,8 +272,13 @@ pub fn list_routed_by_state(asn: u32, state: ValidationState) -> Result()?; - let bgp_res: Vec = - serde_json::from_value(res.get("data").unwrap().get("bgp").unwrap().to_owned())?; + let bgp_res: Vec = serde_json::from_value( + res.get("data") + .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? + .get("bgp") + .ok_or_else(|| anyhow::anyhow!("No 'bgp' field in response data"))? + .to_owned(), + )?; Ok(bgp_res) } diff --git a/src/filters/parse.rs b/src/filters/parse.rs index c49a41b..27096a5 100644 --- a/src/filters/parse.rs +++ b/src/filters/parse.rs @@ -112,7 +112,10 @@ impl ParseFilters { } } else { // this case is start_ts AND end_ts - return Ok((start_ts.unwrap().timestamp(), end_ts.unwrap().timestamp())); + match (start_ts, end_ts) { + (Some(start), Some(end)) => return Ok((start.timestamp(), end.timestamp())), + _ => return Err(anyhow!("Both start_ts and end_ts must be provided when duration is not set")), + } } Err(anyhow!("unexpected time-string parsing result")) diff --git a/src/lib.rs b/src/lib.rs index e1eff2b..b6e9e0f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,6 @@ +#![deny(clippy::unwrap_used)] +#![deny(clippy::expect_used)] + mod config; mod database; mod datasets; diff --git a/src/time.rs b/src/time.rs index 647ae4b..81fa37e 100644 --- a/src/time.rs +++ b/src/time.rs @@ -15,7 +15,8 @@ pub fn string_to_time(time_string: &str) -> anyhow::Result> { let ts = match dateparser::parse_with( time_string, &Utc, - chrono::NaiveTime::from_hms_opt(0, 0, 0).unwrap(), + chrono::NaiveTime::from_hms_opt(0, 0, 0) + .ok_or_else(|| anyhow!("Failed to create time"))?, ) { Ok(ts) => ts, Err(_) => { @@ -60,7 +61,9 @@ pub fn time_to_table(time_vec: &[String]) -> anyhow::Result { let ht = HumanTime::from(chrono::Local::now() - chrono::Duration::seconds(now_ts - ts)); let human = ht.to_string(); let rfc3339 = Utc - .from_utc_datetime(&DateTime::from_timestamp(ts, 0).unwrap().naive_utc()) + .from_utc_datetime(&DateTime::from_timestamp(ts, 0) + .unwrap_or_default() + .naive_utc()) .to_rfc3339(); BgpTime { unix: ts, From 5ff4e86ca2ae7ed13dd3e79d50cf6d2e14f4f434 Mon Sep 17 00:00:00 2001 From: Mingwei Zhang Date: Thu, 4 Sep 2025 12:49:57 -0700 Subject: [PATCH 2/3] fix: handle network errors gracefully in multi-file processing - Fixed panic when remote file download fails during search operations - Replaced unwrap() with proper error handling in filters.to_parser() calls - Failed files now log error and continue with remaining files in batch - Parallel processing continues uninterrupted when individual files fail - Resolves thread panic that caused entire search process to halt --- src/bin/monocle.rs | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/src/bin/monocle.rs b/src/bin/monocle.rs index 9f0db5f..f257ac5 100644 --- a/src/bin/monocle.rs +++ b/src/bin/monocle.rs @@ -302,8 +302,20 @@ fn main() { return; } - let file_path = file_path.to_str().unwrap(); - let parser = filters.to_parser(file_path).unwrap(); + let file_path = match file_path.to_str() { + Some(path) => path, + None => { + eprintln!("Invalid file path"); + std::process::exit(1); + } + }; + let parser = match filters.to_parser(file_path) { + Ok(p) => p, + Err(e) => { + eprintln!("Failed to create parser for {}: {}", file_path, e); + std::process::exit(1); + } + }; let mut stdout = std::io::stdout(); @@ -380,7 +392,13 @@ fn main() { let show_progress = sqlite_db.is_some() || mrt_path.is_some(); // it's fine to unwrap as the filters.validate() function has already checked for issues - let items = filters.to_broker_items().unwrap(); + let items = match filters.to_broker_items() { + Ok(items) => items, + Err(e) => { + eprintln!("Failed to convert filters to broker items: {}", e); + std::process::exit(1); + } + }; let total_items = items.len(); @@ -510,7 +528,13 @@ fn main() { let url = item.url; let collector = item.collector_id; info!("start parsing {}", url.as_str()); - let parser = filters.to_parser(url.as_str()).unwrap(); + let parser = match filters.to_parser(url.as_str()) { + Ok(p) => p, + Err(e) => { + eprintln!("Failed to parse {}: {}", url.as_str(), e); + return; + } + }; let mut elems_count = 0; for elem in parser { @@ -544,7 +568,10 @@ fn main() { if update { // if the update flag is set, clear existing as2org data and re-download later - as2org.clear_db(); + if let Err(e) = as2org.clear_db() { + eprintln!("Failed to clear database: {}", e); + std::process::exit(1); + } } if as2org.is_db_empty() { From adda53847ac4966aa74a18af43d73e66608858ab Mon Sep 17 00:00:00 2001 From: Mingwei Zhang Date: Thu, 4 Sep 2025 12:55:51 -0700 Subject: [PATCH 3/3] style: apply cargo fmt formatting --- src/bin/monocle.rs | 22 ++++++++++++++-------- src/datasets/rpki/validator.rs | 4 ++-- src/filters/parse.rs | 6 +++++- src/time.rs | 11 ++++++----- 4 files changed, 27 insertions(+), 16 deletions(-) diff --git a/src/bin/monocle.rs b/src/bin/monocle.rs index f257ac5..ceb0f72 100644 --- a/src/bin/monocle.rs +++ b/src/bin/monocle.rs @@ -251,7 +251,12 @@ enum RadarCommands { }, } -fn elem_to_string(elem: &BgpElem, json: bool, pretty: bool, collector: &str) -> Result { +fn elem_to_string( + elem: &BgpElem, + json: bool, + pretty: bool, + collector: &str, +) -> Result { if json { let mut val = json!(elem); val.as_object_mut() @@ -451,13 +456,14 @@ fn main() { msg_count += 1; if display_stdout { - let output_str = match elem_to_string(&elem, json, pretty, collector.as_str()) { - Ok(s) => s, - Err(e) => { - eprintln!("Failed to format element: {}", e); - continue; - } - }; + let output_str = + match elem_to_string(&elem, json, pretty, collector.as_str()) { + Ok(s) => s, + Err(e) => { + eprintln!("Failed to format element: {}", e); + continue; + } + }; println!("{output_str}"); continue; } diff --git a/src/datasets/rpki/validator.rs b/src/datasets/rpki/validator.rs index 4fbdf51..87f3d27 100644 --- a/src/datasets/rpki/validator.rs +++ b/src/datasets/rpki/validator.rs @@ -188,7 +188,7 @@ pub fn list_by_prefix(prefix: &IpNet) -> Result> { .set("Content-Type", "application/json") .send_json(ureq::json!({ "query": query_string }))? .into_json::()?; - + let res = response .get("data") .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? @@ -226,7 +226,7 @@ pub fn list_by_asn(asn: u32) -> Result> { .set("Content-Type", "application/json") .send_json(ureq::json!({ "query": query_string }))? .into_json::()?; - + let res = response .get("data") .ok_or_else(|| anyhow::anyhow!("No 'data' field in response"))? diff --git a/src/filters/parse.rs b/src/filters/parse.rs index 27096a5..712207e 100644 --- a/src/filters/parse.rs +++ b/src/filters/parse.rs @@ -114,7 +114,11 @@ impl ParseFilters { // this case is start_ts AND end_ts match (start_ts, end_ts) { (Some(start), Some(end)) => return Ok((start.timestamp(), end.timestamp())), - _ => return Err(anyhow!("Both start_ts and end_ts must be provided when duration is not set")), + _ => { + return Err(anyhow!( + "Both start_ts and end_ts must be provided when duration is not set" + )) + } } } diff --git a/src/time.rs b/src/time.rs index 81fa37e..6f85be6 100644 --- a/src/time.rs +++ b/src/time.rs @@ -15,8 +15,7 @@ pub fn string_to_time(time_string: &str) -> anyhow::Result> { let ts = match dateparser::parse_with( time_string, &Utc, - chrono::NaiveTime::from_hms_opt(0, 0, 0) - .ok_or_else(|| anyhow!("Failed to create time"))?, + chrono::NaiveTime::from_hms_opt(0, 0, 0).ok_or_else(|| anyhow!("Failed to create time"))?, ) { Ok(ts) => ts, Err(_) => { @@ -61,9 +60,11 @@ pub fn time_to_table(time_vec: &[String]) -> anyhow::Result { let ht = HumanTime::from(chrono::Local::now() - chrono::Duration::seconds(now_ts - ts)); let human = ht.to_string(); let rfc3339 = Utc - .from_utc_datetime(&DateTime::from_timestamp(ts, 0) - .unwrap_or_default() - .naive_utc()) + .from_utc_datetime( + &DateTime::from_timestamp(ts, 0) + .unwrap_or_default() + .naive_utc(), + ) .to_rfc3339(); BgpTime { unix: ts,