diff --git a/butcher/src/main.rs b/butcher/src/main.rs index fe04e77..724b451 100644 --- a/butcher/src/main.rs +++ b/butcher/src/main.rs @@ -1,6 +1,5 @@ use std::{ sync::Arc, - ops::Deref, }; use futures::StreamExt; use cave::{ @@ -74,7 +73,7 @@ async fn main() { let post = Arc::new(post); store.save_post_tags(&post, is_profane(&profanity, &post).await).await; - let update_set = UpdateSet::from(post.deref()); + let update_set = UpdateSet::from(&*post); if ! update_set.is_empty() { trend_setter_tx.send(update_set).await.unwrap(); } diff --git a/butcher/src/trend_setter.rs b/butcher/src/trend_setter.rs index 86b48aa..208d9db 100644 --- a/butcher/src/trend_setter.rs +++ b/butcher/src/trend_setter.rs @@ -62,7 +62,7 @@ pub fn start(mut store: Store) -> Tx { } std::collections::hash_map::Entry::Occupied(mut entry) => { // merge into buffered - for tag in tags.into_iter() { + for tag in tags { entry.get_mut().insert(tag); } } @@ -70,7 +70,7 @@ pub fn start(mut store: Store) -> Tx { } loop { - let mut next_run = queue.keys().cloned().next(); + let mut next_run = queue.keys().copied().next(); if let Some(next_run_) = next_run { let now = Instant::now(); if next_run_ <= now { @@ -80,14 +80,14 @@ pub fn start(mut store: Store) -> Tx { run(&language, buffered.clone(), &mut store).await.unwrap(); // update with next in queue - next_run = queue.keys().cloned().next(); + next_run = queue.keys().copied().next(); } else { tracing::trace!("next_run in {:?}", next_run_ - now); } } else { let languages = store.get_languages().await.unwrap(); tracing::info!("queue empty, filling from {} languages", languages.len()); - for language in languages.into_iter() { + for language in languages { enqueue(Some(language.clone()), &mut queue, &mut buffer, HashSet::new()); } } diff --git a/buzzback/src/main.rs b/buzzback/src/main.rs index 2226783..5dff3ea 100644 --- a/buzzback/src/main.rs +++ b/buzzback/src/main.rs @@ -84,7 +84,7 @@ async fn main() { } else if relay.ends_with("/actor") { (relay.clone(), relay.replace("/actor", "/inbox")) } else { - panic!("Not sure how to deal with relay {}", relay); + panic!("Not sure how to deal with relay {relay}"); }; tracing::trace!("Following {}", &id); let follow = db::Follow { diff --git a/cave/src/activitypub/digest.rs b/cave/src/activitypub/digest.rs index 32d6eab..877cfe2 100644 --- a/cave/src/activitypub/digest.rs +++ b/cave/src/activitypub/digest.rs @@ -3,7 +3,7 @@ use http_digest_headers::{DigestHeader, DigestMethod}; pub fn generate_header(body: &[u8]) -> Result { let mut digest_header = DigestHeader::new() .with_method(DigestMethod::SHA256, body) - .map(|h| format!("{}", h)) + .map(|h| format!("{h}")) .map_err(|_| ())?; // mastodon expects uppercase algo name diff --git a/cave/src/activitypub/mod.rs b/cave/src/activitypub/mod.rs index f885453..532dfe0 100644 --- a/cave/src/activitypub/mod.rs +++ b/cave/src/activitypub/mod.rs @@ -49,7 +49,7 @@ pub struct Action { } impl Action { - pub fn object_id(&self) -> Option<&str> { + #[must_use] pub fn object_id(&self) -> Option<&str> { if let Some(id) = self.object.as_str() { Some(id) } else if let Some(object) = self.object.as_object() { @@ -95,15 +95,15 @@ pub struct Post { } impl Post { - pub fn language(&self) -> Option<&str> { + #[must_use] pub fn language(&self) -> Option<&str> { self.content_map.keys() - .next().map(|s| s.as_str()) + .next().map(std::string::String::as_str) } - /// Translate ActivityPub post to Mastodon client API post format - pub fn to_feed_post(self, actor: Actor) -> super::feed::Post { + /// Translate `ActivityPub` post to Mastodon client API post format + #[must_use] pub fn to_feed_post(self, actor: Actor) -> super::feed::Post { let language = self.language() - .map(|s| s.to_string()); + .map(std::string::ToString::to_string); super::feed::Post { created_at: self.published, url: self.url.unwrap_or(self.id), diff --git a/cave/src/config.rs b/cave/src/config.rs index efe67d4..ce49c6d 100644 --- a/cave/src/config.rs +++ b/cave/src/config.rs @@ -6,7 +6,7 @@ impl serde::Deserialize<'a>> LoadConfig for T { fn load() -> Self { let path = std::env::args().nth(1) .expect("Call with config.yaml"); - crate::systemd::status(&format!("Loading config file {}", path)); + crate::systemd::status(&format!("Loading config file {path}")); let config_file = std::fs::read_to_string(path) .expect("read config"); diff --git a/cave/src/feed.rs b/cave/src/feed.rs index 6c390c7..a103821 100644 --- a/cave/src/feed.rs +++ b/cave/src/feed.rs @@ -4,12 +4,12 @@ use futures::{Stream, StreamExt}; use eventsource_stream::Eventsource; use reqwest::StatusCode; -pub fn url_host(url: &str) -> Option { +#[must_use] pub fn url_host(url: &str) -> Option { reqwest::Url::parse(url) .map_err(|e| tracing::warn!("Cannot parse url {:?}: {}", url, e)) .ok() .and_then(|url| url.domain() - .map(|host| host.to_lowercase()) + .map(str::to_lowercase) ) } @@ -25,7 +25,7 @@ pub struct Account { } impl Account { - pub fn host(&self) -> Option { + #[must_use] pub fn host(&self) -> Option { url_host(&self.url) } } @@ -49,7 +49,7 @@ pub struct Mention { } impl Mention { - pub fn host(&self) -> Option { + #[must_use] pub fn host(&self) -> Option { url_host(&self.url) } } @@ -82,27 +82,27 @@ pub struct Post { } impl Post { - pub fn url_host(&self) -> Option { + #[must_use] pub fn url_host(&self) -> Option { reqwest::Url::parse(&self.url) .ok() .and_then(|url| url.domain() - .map(|host| host.to_owned()) + .map(std::borrow::ToOwned::to_owned) ) } - pub fn user_id(&self) -> Option { + #[must_use] pub fn user_id(&self) -> Option { let username = self.account.username.to_lowercase(); let host = self.url_host()?; - Some(format!("{}@{}", username, host)) + Some(format!("{username}@{host}")) } - pub fn timestamp(&self) -> Option> { + #[must_use] pub fn timestamp(&self) -> Option> { DateTime::parse_from_rfc3339(&self.created_at) .ok() } /// clip "en-us" to "en" - pub fn lang(&self) -> Option { + #[must_use] pub fn lang(&self) -> Option { let language = match &self.language { Some(language) => language, None => return None, @@ -214,7 +214,7 @@ pub struct Feed { impl Feed { /// Analyze time intervals between posts to estimate when to fetch /// next - pub fn mean_post_interval(&self) -> Option { + #[must_use] pub fn mean_post_interval(&self) -> Option { let mut timestamps = self.posts.iter() .filter_map(|post| post.timestamp()) .collect::>(); @@ -285,11 +285,11 @@ impl std::fmt::Display for StreamError { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match self { StreamError::HttpStatus(code) => - write!(fmt, "HTTP/{}", code), + write!(fmt, "HTTP/{code}"), StreamError::Http(e) => e.fmt(fmt), StreamError::InvalidContentType(ct) => - write!(fmt, "Invalid Content-Type: {}", ct), + write!(fmt, "Invalid Content-Type: {ct}"), } } } diff --git a/cave/src/firehose.rs b/cave/src/firehose.rs index 396a0c6..79b2ce0 100644 --- a/cave/src/firehose.rs +++ b/cave/src/firehose.rs @@ -8,7 +8,7 @@ pub struct FirehoseFactory { } impl FirehoseFactory { - pub fn new(redis_url: String, redis_password_file: String) -> Self { + #[must_use] pub fn new(redis_url: String, redis_password_file: String) -> Self { let redis_password = std::fs::read_to_string(redis_password_file) .expect("redis_password_file"); let mut redis_url = Url::parse(&redis_url) diff --git a/cave/src/lib.rs b/cave/src/lib.rs index 74b710b..f973ea2 100644 --- a/cave/src/lib.rs +++ b/cave/src/lib.rs @@ -18,6 +18,6 @@ pub const PERIODS: &[u64] = &[4, 24, 7 * 24]; /// compare the selected period against how many multiples of preceding periods pub const PERIOD_COMPARE_WINDOW: u64 = 3; -pub fn current_hour() -> u64 { +#[must_use] pub fn current_hour() -> u64 { chrono::offset::Utc::now().timestamp() as u64 / 3600 } diff --git a/cave/src/live_file.rs b/cave/src/live_file.rs index 74b97ca..14970eb 100644 --- a/cave/src/live_file.rs +++ b/cave/src/live_file.rs @@ -1,4 +1,4 @@ -use std::ops::Deref; + use std::sync::Arc; use futures::{Future, StreamExt}; use tokio::fs::File; @@ -18,8 +18,7 @@ where let lock_ = lock.clone(); let path = std::env::current_dir() .unwrap() - .join(path) - .to_path_buf(); + .join(path).clone(); let dir = path.parent().unwrap().to_path_buf(); let path = Arc::new(RwLock::new(path)); @@ -28,7 +27,7 @@ where tokio::spawn(async move { let inotify = Inotify::init() .unwrap(); - inotify.watches().add(dir.read().await.deref(), WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO) + inotify.watches().add(&*dir.read().await, WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO) .unwrap(); tracing::debug!("Watching directory {:?}", &dir); inotify.into_event_stream([0; 1024]) @@ -45,7 +44,7 @@ where let dir = dir.read().await; let path = path.read().await; if dir.join(name) == *path { - match File::open(path.deref()).await { + match File::open(&*path).await { Ok(file) => { let t = f(file).await; *lock.write().await = t; diff --git a/cave/src/posts_cache.rs b/cave/src/posts_cache.rs index f38a8e0..e513f55 100644 --- a/cave/src/posts_cache.rs +++ b/cave/src/posts_cache.rs @@ -17,7 +17,7 @@ struct PostsCacheInner { } impl PostsCache { - pub fn new(size: usize) -> Self { + #[must_use] pub fn new(size: usize) -> Self { PostsCache { inner: Arc::new(Mutex::new(PostsCacheInner { cache: HashSet::new(), @@ -28,7 +28,7 @@ impl PostsCache { } // returns true if already exists - pub fn insert(&self, k: String) -> bool { + #[must_use] pub fn insert(&self, k: String) -> bool { let k = Arc::new(k); let mut inner = self.inner.lock().expect("lock"); @@ -45,7 +45,7 @@ impl PostsCache { inner.cache.insert(k); while inner.cache.len() > inner.size { - let oldest = inner.ages.keys().cloned().next().expect("ages first"); + let oldest = inner.ages.keys().copied().next().expect("ages first"); let oldest_k = inner.ages.remove(&oldest).expect("remove oldest"); inner.cache.remove(&oldest_k); } diff --git a/cave/src/store.rs b/cave/src/store.rs index cc71878..26438f2 100644 --- a/cave/src/store.rs +++ b/cave/src/store.rs @@ -21,7 +21,7 @@ pub const IMAGES_PER_TAG: usize = 8; pub type Error = RedisError; -/// wrapper so we can impl ManageConnection +/// wrapper so we can impl `ManageConnection` struct RedisPool { redis_url: Url, } @@ -197,14 +197,14 @@ impl Store { for spelling in spellings { cmd.hincr( &tag_key, - format!("s:{}", spelling), + format!("s:{spelling}"), 1 ).ignore(); } // by instance cmd.hincr( tag_key, - format!("h:{}", host), + format!("h:{host}"), 1 ).ignore(); if let Some(user_id) = &user_id { @@ -228,24 +228,24 @@ impl Store { vec![] }; let mut image_keys = vec![]; - for (name, spellings) in tags.into_iter() { + for (name, spellings) in tags { // global store_tags(&mut cmd, spellings.clone(), - format!("g:{}", name), - format!("u::{}:{}", hour, name), + format!("g:{name}"), + format!("u::{hour}:{name}"), ); // by language if let Some(language) = &language { store_tags(&mut cmd, spellings, - format!("l:{}:{}", language, name), - format!("u:{}:{}:{}", language, hour, name), + format!("l:{language}:{name}"), + format!("u:{language}:{hour}:{name}"), ); } for image in &images { - let image_key = format!("i:{}", name); + let image_key = format!("i:{name}"); cmd.sadd(&image_key, image) .ignore() .expire(&image_key, TAG_EXPIRE as usize * 3600) @@ -275,7 +275,7 @@ impl Store { } pub async fn save_host(&mut self, host: &str) -> Result<(), RedisError> { - let key = format!("h:{}", host); + let key = format!("h:{host}"); redis::pipe() .set(&key, "1") .ignore() @@ -286,7 +286,7 @@ impl Store { } pub async fn remove_host(&mut self, host: &str) -> Result<(), RedisError> { - redis::Cmd::del(format!("h:{}", host)) + redis::Cmd::del(format!("h:{host}")) .query_async::<_, ()>(self) .await } @@ -303,7 +303,7 @@ impl Store { } pub async fn get_tag_images(&mut self, tag: &str) -> Result, RedisError> { - redis::Cmd::smembers(format!("i:{}", tag)) + redis::Cmd::smembers(format!("i:{tag}")) .query_async(self) .await } @@ -332,7 +332,7 @@ impl Store { } pub async fn scan_prefix<'a>(&'a mut self, prefix: &'a str) -> Result + '_, RedisError> { - let keys = self.scan(&format!("{}*", prefix)) + let keys = self.scan(&format!("{prefix}*")) .await? .map(|key| key[prefix.len()..].to_string()); Ok(keys) @@ -388,7 +388,7 @@ impl Store { .into_iter(); let mut results = Vec::with_capacity(names.len()); - for name in names.into_iter() { + for name in names { let hash_values = if let Some(Value::Bulk(hash_values)) = values.next() { hash_values } else { @@ -417,7 +417,7 @@ impl Store { } let sets: Vec> = cmd.query_async(self) .await?; - let results = periods.iter().cloned() + let results = periods.iter().copied() .zip(sets.into_iter()) .collect(); Ok(results) @@ -472,8 +472,8 @@ impl Store { tag: &str, ) -> Result<(), RedisError> { let key = match language { - Some(language) => format!("l:{}:{}", language, tag), - None => format!("g:{}", tag), + Some(language) => format!("l:{language}:{tag}"), + None => format!("g:{tag}"), }; redis::Cmd::del(key) .query_async(self) @@ -483,16 +483,16 @@ impl Store { fn tag_key(language: &Option, name: &str) -> String { match language { - Some(language) => format!("l:{}:{}", language, name), - None => format!("g:{}", name), + Some(language) => format!("l:{language}:{name}"), + None => format!("g:{name}"), } } fn pool_key(language: &Option, period: u64) -> String { match language { Some(language) => - format!("q:{}:{}", period, language), + format!("q:{period}:{language}"), None => - format!("q:{}", period), + format!("q:{period}"), } } diff --git a/cave/src/systemd.rs b/cave/src/systemd.rs index d824def..447606d 100644 --- a/cave/src/systemd.rs +++ b/cave/src/systemd.rs @@ -4,7 +4,7 @@ pub fn status(text: &str) { } pub fn extend_timeout(usec: u64) { - systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{}", usec))].iter()) + systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{usec}"))].iter()) .unwrap(); } diff --git a/cave/src/trend_tag.rs b/cave/src/trend_tag.rs index 9238d0f..da6552a 100644 --- a/cave/src/trend_tag.rs +++ b/cave/src/trend_tag.rs @@ -19,7 +19,7 @@ impl TrendTag { let mut other = Vec::with_capacity(hash_values.len() / 2); let mut key: Option = None; - for value in hash_values.into_iter() { + for value in hash_values { if let Some(key) = key.take() { if let Ok(value) = str::parse(&value) { other.push((key, value)); @@ -36,7 +36,7 @@ impl TrendTag { } } - pub fn score(&self, period: u64, until: u64) -> f64 { + #[must_use] pub fn score(&self, period: u64, until: u64) -> f64 { // ignore spam that comes from only 1 instance if self.hosts().nth(1).is_none() { return -1.; @@ -48,7 +48,7 @@ impl TrendTag { let mut before_hours = 0; let mut after_mentions = 0; - for (hour, mentions) in self.hour_users.iter().cloned() { + for (hour, mentions) in self.hour_users.iter().copied() { if hour > from { if mentions > 1 { after_mentions += mentions; @@ -66,22 +66,22 @@ impl TrendTag { } let before = if before_hours > 0 && before_mentions > 0 { - (before_mentions as f64) / (before_hours as f64) + (before_mentions as f64) / f64::from(before_hours) } else { 0.1 }; let after = (after_mentions as f64) / (period as f64); after / before } - pub fn hour_scores_data(&self, period: u64) -> String { + #[must_use] pub fn hour_scores_data(&self, period: u64) -> String { let offset = self.hour_users.len().saturating_sub(period as usize); self.hour_users[offset..] .iter() .map(|(_, count)| *count) .enumerate() .map(|(i, count)| if i == 0 { - format!("{}", count) + format!("{count}") } else { - format!(" {}", count) + format!(" {count}") }) .collect() } @@ -101,10 +101,10 @@ impl TrendTag { }) } - pub fn spelling(&self) -> &str { + #[must_use] pub fn spelling(&self) -> &str { self.spellings() .map(|(count, spelling)| { - if spelling.chars().any(|c| c.is_uppercase()) { + if spelling.chars().any(char::is_uppercase) { // favor captialized spelling (10 * count, spelling) } else { @@ -112,8 +112,7 @@ impl TrendTag { } }) .max() - .map(|(_count, spelling)| spelling) - .unwrap_or(&self.name) + .map_or(&self.name, |(_count, spelling)| spelling) } pub fn hosts(&self) -> impl Iterator { @@ -132,7 +131,7 @@ impl TrendTag { } /// ordered by count - pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> { + #[must_use] pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> { self.hosts().collect() } } diff --git a/gatherer/src/html_template.rs b/gatherer/src/html_template.rs index ab137cf..b7b3351 100644 --- a/gatherer/src/html_template.rs +++ b/gatherer/src/html_template.rs @@ -15,7 +15,7 @@ where Ok(html) => Html(html).into_response(), Err(err) => ( StatusCode::INTERNAL_SERVER_ERROR, - format!("Failed to render template. Error: {}", err), + format!("Failed to render template. Error: {err}"), ) .into_response(), } diff --git a/gatherer/src/http_server.rs b/gatherer/src/http_server.rs index ac607bf..bf04dd9 100644 --- a/gatherer/src/http_server.rs +++ b/gatherer/src/http_server.rs @@ -68,7 +68,7 @@ impl ServerState { .into_iter() .enumerate() .flat_map(|(i, url)| if i == 0 { - ["".to_owned(), url] + [String::new(), url] } else { [" ".to_owned(), url] }) @@ -127,12 +127,7 @@ impl TrendsPage { // service is very much alive: systemd::watchdog(); - TrendsPage { - results, - language, - languages, - tag_images, - } + TrendsPage { language, languages, results, tag_images } } fn template(self) -> HtmlTemplate { diff --git a/gatherer/src/http_server/token_collect.rs b/gatherer/src/http_server/token_collect.rs index 72c0bcc..6ff08c5 100644 --- a/gatherer/src/http_server/token_collect.rs +++ b/gatherer/src/http_server/token_collect.rs @@ -18,7 +18,7 @@ async fn collect_token( ) -> Result<(), String> { // try a few registered apps until one works for (client_id, client_secret) in db.get_apps(host).await - .map_err(|e| format!("{}", e))? + .map_err(|e| format!("{e}"))? { let app = oauth::Application { client_id, @@ -39,7 +39,7 @@ async fn collect_token( } } - Err(format!("No registered app found for instance {}", host)) + Err(format!("No registered app found for instance {host}")) } #[derive(serde::Deserialize)] diff --git a/gatherer/src/http_server/token_donate.rs b/gatherer/src/http_server/token_donate.rs index deb87d4..e59a6ad 100644 --- a/gatherer/src/http_server/token_donate.rs +++ b/gatherer/src/http_server/token_donate.rs @@ -60,7 +60,7 @@ pub async fn post_token_donate( Ok(app) => app, Err(e) => { tracing::error!("Canont register OAuth app: {}", e); - return PostTokenDonateResult::Error(format!("{}", e)); + return PostTokenDonateResult::Error(format!("{e}")); } }; db.add_app(&form.instance, &app.client_id, &app.client_secret).await diff --git a/gatherer/src/oauth.rs b/gatherer/src/oauth.rs index 45144c2..3d62d1a 100644 --- a/gatherer/src/oauth.rs +++ b/gatherer/src/oauth.rs @@ -32,7 +32,7 @@ const SCOPES: &str = "read:statuses"; impl Application { pub async fn register(client: &reqwest::Client, host: &str) -> Result { - let url = format!("https://{}/api/v1/apps", host); + let url = format!("https://{host}/api/v1/apps"); let form = AppRegistration { client_name: "#FediBuzz".to_string(), website: "https://fedi.buzz/".to_string(), @@ -49,7 +49,7 @@ impl Application { } pub fn generate_redirect_url(host: &str) -> String { - format!("https://fedi.buzz/token/collect/{}", host) + format!("https://fedi.buzz/token/collect/{host}") } pub fn generate_auth_url(&self, host: &str) -> String { @@ -63,7 +63,7 @@ impl Application { } pub async fn obtain_token(&self, client: &reqwest::Client, host: &str, code: String) -> Result { - let url = format!("https://{}/oauth/token", host); + let url = format!("https://{host}/oauth/token"); let form = TokenRequest { grant_type: "authorization_code".to_string(), scope: SCOPES.to_string(), diff --git a/gatherer/src/trends.rs b/gatherer/src/trends.rs index 239371e..3e7ba98 100644 --- a/gatherer/src/trends.rs +++ b/gatherer/src/trends.rs @@ -54,7 +54,7 @@ impl TrendAnalyzer { let until = current_hour(); let mut analyzers: Vec = periods.iter() - .cloned() + .copied() .map(|period| TrendAnalyzer { period, until, @@ -75,7 +75,7 @@ impl TrendAnalyzer { metrics::histogram!("trends_page_time", t2 - t1, "step" => "get_trend_pools", "lang" => lang); metrics::histogram!("trends_page_time", t3 - t2, "step" => "get_trend_tags", "lang" => lang); metrics::histogram!("trends_page_tags", tags_len as f64, "lang" => lang); - for trend_tag in trend_tags.into_iter() { + for trend_tag in trend_tags { let trend_tag = Arc::new(trend_tag); let name = Arc::new(trend_tag.name.clone()); for analyzer in &mut analyzers { diff --git a/hunter/src/main.rs b/hunter/src/main.rs index d6741b3..9272647 100644 --- a/hunter/src/main.rs +++ b/hunter/src/main.rs @@ -50,7 +50,7 @@ async fn run() { cave::systemd::status("Starting scheduler"); let mut scheduler = scheduler::Scheduler::new(block_list.clone()); cave::systemd::status("Loading known hosts from config"); - for host in config.hosts.into_iter() { + for host in config.hosts { scheduler.introduce(InstanceHost::just_host(host)).await; } #[cfg(not(dev))] diff --git a/hunter/src/scheduler.rs b/hunter/src/scheduler.rs index eebb814..c6c858c 100644 --- a/hunter/src/scheduler.rs +++ b/hunter/src/scheduler.rs @@ -136,7 +136,7 @@ impl Scheduler { pub fn dequeue(&mut self) -> Result { let now = Instant::now(); - if let Some(time) = self.queue.keys().next().cloned() { + if let Some(time) = self.queue.keys().next().copied() { if time <= now { self.queue.remove(&time) .ok_or(Duration::from_secs(1)) diff --git a/hunter/src/worker.rs b/hunter/src/worker.rs index 3874255..e274738 100644 --- a/hunter/src/worker.rs +++ b/hunter/src/worker.rs @@ -24,7 +24,7 @@ impl RobotsTxt { client: &reqwest::Client, host: &Host, ) -> Self { - let url = format!("https://{}/robots.txt", host); + let url = format!("https://{host}/robots.txt"); metrics::increment_gauge!("hunter_requests", 1.0, "type" => "robotstxt"); let robot = async { let body = client.get(url) @@ -172,7 +172,7 @@ async fn fetch_timeline( robots_txt: RobotsTxt, host: &Host, ) -> Result<(Option, Option), reqwest::Error> { - let url = format!("https://{}/api/v1/timelines/public?limit=40", host); + let url = format!("https://{host}/api/v1/timelines/public?limit=40"); if ! robots_txt.allowed(&url) { tracing::warn!("Timeline of {} forbidden by robots.txt", host); return Ok((None, None)); @@ -192,7 +192,7 @@ async fn fetch_timeline( let mean_interval = feed.mean_post_interval(); let (new_post_ratio, introduce_hosts) = process_posts(&mut store, posts_cache, block_list, host, feed.posts.into_iter()).await; - for introduce_host in introduce_hosts.into_iter() { + for introduce_host in introduce_hosts { message_tx.send(Message::IntroduceHost(introduce_host)).unwrap(); } @@ -250,7 +250,7 @@ async fn process_posts( } let t2 = Instant::now(); - metrics::histogram!("hunter_post_process_seconds", t2 - t1) + metrics::histogram!("hunter_post_process_seconds", t2 - t1); } } tracing::trace!("{}: {}/{} new posts", host, new_posts, posts_len); @@ -299,9 +299,9 @@ async fn open_stream( robots_txt: RobotsTxt, host: Host, ) -> Result<(&'static str, impl Future), String> { - let url = format!("https://{}/api/v1/streaming/public", host); + let url = format!("https://{host}/api/v1/streaming/public"); if ! robots_txt.allowed(&url) { - return Err(format!("Streaming of {} forbidden by robots.txt", host)); + return Err(format!("Streaming of {host} forbidden by robots.txt")); } // free as early as possible drop(robots_txt); @@ -346,7 +346,7 @@ async fn open_stream( } let stream = stream.map_err(|e| { - format!("Stream error for {}: {}", host, e) + format!("Stream error for {host}: {e}") })?; Ok((stats_key, stream.fold(0, move |post_count, post| { @@ -358,7 +358,7 @@ async fn open_stream( async move { let (_, introduce_hosts) = process_posts(&mut store, &posts_cache, block_list, &host, [post].into_iter()).await; - for introduce_host in introduce_hosts.into_iter() { + for introduce_host in introduce_hosts { message_tx.send(Message::IntroduceHost(introduce_host)).unwrap(); } post_count + 1 diff --git a/smokestack/src/main.rs b/smokestack/src/main.rs index 77a34fb..f53f952 100644 --- a/smokestack/src/main.rs +++ b/smokestack/src/main.rs @@ -5,7 +5,7 @@ use std::{ RwLock, }, }; -use ansi_term::Colour::{self, *}; +use ansi_term::Colour::{self, Black, RGB, Red, Yellow}; use futures::{Stream, StreamExt}; use tokio::{ io::AsyncWriteExt, @@ -216,7 +216,7 @@ async fn main() { tokio::spawn(async move { while let Some(msg) = pipe.rx.recv().await { match socket.write_all(&msg[..]).await { - Ok(_) => {} + Ok(()) => {} Err(_) => break, } }