This commit is contained in:
Astro 2024-03-27 02:49:15 +01:00
parent b06c0e48c5
commit 295a7791be
24 changed files with 92 additions and 100 deletions

View File

@ -1,6 +1,5 @@
use std::{ use std::{
sync::Arc, sync::Arc,
ops::Deref,
}; };
use futures::StreamExt; use futures::StreamExt;
use cave::{ use cave::{
@ -74,7 +73,7 @@ async fn main() {
let post = Arc::new(post); let post = Arc::new(post);
store.save_post_tags(&post, is_profane(&profanity, &post).await).await; store.save_post_tags(&post, is_profane(&profanity, &post).await).await;
let update_set = UpdateSet::from(post.deref()); let update_set = UpdateSet::from(&*post);
if ! update_set.is_empty() { if ! update_set.is_empty() {
trend_setter_tx.send(update_set).await.unwrap(); trend_setter_tx.send(update_set).await.unwrap();
} }

View File

@ -62,7 +62,7 @@ pub fn start(mut store: Store) -> Tx {
} }
std::collections::hash_map::Entry::Occupied(mut entry) => { std::collections::hash_map::Entry::Occupied(mut entry) => {
// merge into buffered // merge into buffered
for tag in tags.into_iter() { for tag in tags {
entry.get_mut().insert(tag); entry.get_mut().insert(tag);
} }
} }
@ -70,7 +70,7 @@ pub fn start(mut store: Store) -> Tx {
} }
loop { loop {
let mut next_run = queue.keys().cloned().next(); let mut next_run = queue.keys().copied().next();
if let Some(next_run_) = next_run { if let Some(next_run_) = next_run {
let now = Instant::now(); let now = Instant::now();
if next_run_ <= now { if next_run_ <= now {
@ -80,14 +80,14 @@ pub fn start(mut store: Store) -> Tx {
run(&language, buffered.clone(), &mut store).await.unwrap(); run(&language, buffered.clone(), &mut store).await.unwrap();
// update with next in queue // update with next in queue
next_run = queue.keys().cloned().next(); next_run = queue.keys().copied().next();
} else { } else {
tracing::trace!("next_run in {:?}", next_run_ - now); tracing::trace!("next_run in {:?}", next_run_ - now);
} }
} else { } else {
let languages = store.get_languages().await.unwrap(); let languages = store.get_languages().await.unwrap();
tracing::info!("queue empty, filling from {} languages", languages.len()); tracing::info!("queue empty, filling from {} languages", languages.len());
for language in languages.into_iter() { for language in languages {
enqueue(Some(language.clone()), &mut queue, &mut buffer, HashSet::new()); enqueue(Some(language.clone()), &mut queue, &mut buffer, HashSet::new());
} }
} }

View File

@ -84,7 +84,7 @@ async fn main() {
} else if relay.ends_with("/actor") { } else if relay.ends_with("/actor") {
(relay.clone(), relay.replace("/actor", "/inbox")) (relay.clone(), relay.replace("/actor", "/inbox"))
} else { } else {
panic!("Not sure how to deal with relay {}", relay); panic!("Not sure how to deal with relay {relay}");
}; };
tracing::trace!("Following {}", &id); tracing::trace!("Following {}", &id);
let follow = db::Follow { let follow = db::Follow {

View File

@ -3,7 +3,7 @@ use http_digest_headers::{DigestHeader, DigestMethod};
pub fn generate_header(body: &[u8]) -> Result<String, ()> { pub fn generate_header(body: &[u8]) -> Result<String, ()> {
let mut digest_header = DigestHeader::new() let mut digest_header = DigestHeader::new()
.with_method(DigestMethod::SHA256, body) .with_method(DigestMethod::SHA256, body)
.map(|h| format!("{}", h)) .map(|h| format!("{h}"))
.map_err(|_| ())?; .map_err(|_| ())?;
// mastodon expects uppercase algo name // mastodon expects uppercase algo name

View File

@ -49,7 +49,7 @@ pub struct Action<O> {
} }
impl Action<serde_json::Value> { impl Action<serde_json::Value> {
pub fn object_id(&self) -> Option<&str> { #[must_use] pub fn object_id(&self) -> Option<&str> {
if let Some(id) = self.object.as_str() { if let Some(id) = self.object.as_str() {
Some(id) Some(id)
} else if let Some(object) = self.object.as_object() { } else if let Some(object) = self.object.as_object() {
@ -95,15 +95,15 @@ pub struct Post {
} }
impl Post { impl Post {
pub fn language(&self) -> Option<&str> { #[must_use] pub fn language(&self) -> Option<&str> {
self.content_map.keys() self.content_map.keys()
.next().map(|s| s.as_str()) .next().map(std::string::String::as_str)
} }
/// Translate ActivityPub post to Mastodon client API post format /// Translate `ActivityPub` post to Mastodon client API post format
pub fn to_feed_post(self, actor: Actor) -> super::feed::Post { #[must_use] pub fn to_feed_post(self, actor: Actor) -> super::feed::Post {
let language = self.language() let language = self.language()
.map(|s| s.to_string()); .map(std::string::ToString::to_string);
super::feed::Post { super::feed::Post {
created_at: self.published, created_at: self.published,
url: self.url.unwrap_or(self.id), url: self.url.unwrap_or(self.id),

View File

@ -6,7 +6,7 @@ impl<T: Sized + for<'a> serde::Deserialize<'a>> LoadConfig for T {
fn load() -> Self { fn load() -> Self {
let path = std::env::args().nth(1) let path = std::env::args().nth(1)
.expect("Call with config.yaml"); .expect("Call with config.yaml");
crate::systemd::status(&format!("Loading config file {}", path)); crate::systemd::status(&format!("Loading config file {path}"));
let config_file = std::fs::read_to_string(path) let config_file = std::fs::read_to_string(path)
.expect("read config"); .expect("read config");

View File

@ -4,12 +4,12 @@ use futures::{Stream, StreamExt};
use eventsource_stream::Eventsource; use eventsource_stream::Eventsource;
use reqwest::StatusCode; use reqwest::StatusCode;
pub fn url_host(url: &str) -> Option<String> { #[must_use] pub fn url_host(url: &str) -> Option<String> {
reqwest::Url::parse(url) reqwest::Url::parse(url)
.map_err(|e| tracing::warn!("Cannot parse url {:?}: {}", url, e)) .map_err(|e| tracing::warn!("Cannot parse url {:?}: {}", url, e))
.ok() .ok()
.and_then(|url| url.domain() .and_then(|url| url.domain()
.map(|host| host.to_lowercase()) .map(str::to_lowercase)
) )
} }
@ -25,7 +25,7 @@ pub struct Account {
} }
impl Account { impl Account {
pub fn host(&self) -> Option<String> { #[must_use] pub fn host(&self) -> Option<String> {
url_host(&self.url) url_host(&self.url)
} }
} }
@ -49,7 +49,7 @@ pub struct Mention {
} }
impl Mention { impl Mention {
pub fn host(&self) -> Option<String> { #[must_use] pub fn host(&self) -> Option<String> {
url_host(&self.url) url_host(&self.url)
} }
} }
@ -82,27 +82,27 @@ pub struct Post {
} }
impl Post { impl Post {
pub fn url_host(&self) -> Option<String> { #[must_use] pub fn url_host(&self) -> Option<String> {
reqwest::Url::parse(&self.url) reqwest::Url::parse(&self.url)
.ok() .ok()
.and_then(|url| url.domain() .and_then(|url| url.domain()
.map(|host| host.to_owned()) .map(std::borrow::ToOwned::to_owned)
) )
} }
pub fn user_id(&self) -> Option<String> { #[must_use] pub fn user_id(&self) -> Option<String> {
let username = self.account.username.to_lowercase(); let username = self.account.username.to_lowercase();
let host = self.url_host()?; let host = self.url_host()?;
Some(format!("{}@{}", username, host)) Some(format!("{username}@{host}"))
} }
pub fn timestamp(&self) -> Option<DateTime<FixedOffset>> { #[must_use] pub fn timestamp(&self) -> Option<DateTime<FixedOffset>> {
DateTime::parse_from_rfc3339(&self.created_at) DateTime::parse_from_rfc3339(&self.created_at)
.ok() .ok()
} }
/// clip "en-us" to "en" /// clip "en-us" to "en"
pub fn lang(&self) -> Option<String> { #[must_use] pub fn lang(&self) -> Option<String> {
let language = match &self.language { let language = match &self.language {
Some(language) => language, Some(language) => language,
None => return None, None => return None,
@ -214,7 +214,7 @@ pub struct Feed {
impl Feed { impl Feed {
/// Analyze time intervals between posts to estimate when to fetch /// Analyze time intervals between posts to estimate when to fetch
/// next /// next
pub fn mean_post_interval(&self) -> Option<Duration> { #[must_use] pub fn mean_post_interval(&self) -> Option<Duration> {
let mut timestamps = self.posts.iter() let mut timestamps = self.posts.iter()
.filter_map(|post| post.timestamp()) .filter_map(|post| post.timestamp())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -285,11 +285,11 @@ impl std::fmt::Display for StreamError {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match self { match self {
StreamError::HttpStatus(code) => StreamError::HttpStatus(code) =>
write!(fmt, "HTTP/{}", code), write!(fmt, "HTTP/{code}"),
StreamError::Http(e) => StreamError::Http(e) =>
e.fmt(fmt), e.fmt(fmt),
StreamError::InvalidContentType(ct) => StreamError::InvalidContentType(ct) =>
write!(fmt, "Invalid Content-Type: {}", ct), write!(fmt, "Invalid Content-Type: {ct}"),
} }
} }
} }

View File

@ -8,7 +8,7 @@ pub struct FirehoseFactory {
} }
impl FirehoseFactory { impl FirehoseFactory {
pub fn new(redis_url: String, redis_password_file: String) -> Self { #[must_use] pub fn new(redis_url: String, redis_password_file: String) -> Self {
let redis_password = std::fs::read_to_string(redis_password_file) let redis_password = std::fs::read_to_string(redis_password_file)
.expect("redis_password_file"); .expect("redis_password_file");
let mut redis_url = Url::parse(&redis_url) let mut redis_url = Url::parse(&redis_url)

View File

@ -18,6 +18,6 @@ pub const PERIODS: &[u64] = &[4, 24, 7 * 24];
/// compare the selected period against how many multiples of preceding periods /// compare the selected period against how many multiples of preceding periods
pub const PERIOD_COMPARE_WINDOW: u64 = 3; pub const PERIOD_COMPARE_WINDOW: u64 = 3;
pub fn current_hour() -> u64 { #[must_use] pub fn current_hour() -> u64 {
chrono::offset::Utc::now().timestamp() as u64 / 3600 chrono::offset::Utc::now().timestamp() as u64 / 3600
} }

View File

@ -1,4 +1,4 @@
use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use futures::{Future, StreamExt}; use futures::{Future, StreamExt};
use tokio::fs::File; use tokio::fs::File;
@ -18,8 +18,7 @@ where
let lock_ = lock.clone(); let lock_ = lock.clone();
let path = std::env::current_dir() let path = std::env::current_dir()
.unwrap() .unwrap()
.join(path) .join(path).clone();
.to_path_buf();
let dir = path.parent().unwrap().to_path_buf(); let dir = path.parent().unwrap().to_path_buf();
let path = Arc::new(RwLock::new(path)); let path = Arc::new(RwLock::new(path));
@ -28,7 +27,7 @@ where
tokio::spawn(async move { tokio::spawn(async move {
let inotify = Inotify::init() let inotify = Inotify::init()
.unwrap(); .unwrap();
inotify.watches().add(dir.read().await.deref(), WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO) inotify.watches().add(&*dir.read().await, WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO)
.unwrap(); .unwrap();
tracing::debug!("Watching directory {:?}", &dir); tracing::debug!("Watching directory {:?}", &dir);
inotify.into_event_stream([0; 1024]) inotify.into_event_stream([0; 1024])
@ -45,7 +44,7 @@ where
let dir = dir.read().await; let dir = dir.read().await;
let path = path.read().await; let path = path.read().await;
if dir.join(name) == *path { if dir.join(name) == *path {
match File::open(path.deref()).await { match File::open(&*path).await {
Ok(file) => { Ok(file) => {
let t = f(file).await; let t = f(file).await;
*lock.write().await = t; *lock.write().await = t;

View File

@ -17,7 +17,7 @@ struct PostsCacheInner {
} }
impl PostsCache { impl PostsCache {
pub fn new(size: usize) -> Self { #[must_use] pub fn new(size: usize) -> Self {
PostsCache { PostsCache {
inner: Arc::new(Mutex::new(PostsCacheInner { inner: Arc::new(Mutex::new(PostsCacheInner {
cache: HashSet::new(), cache: HashSet::new(),
@ -28,7 +28,7 @@ impl PostsCache {
} }
// returns true if already exists // returns true if already exists
pub fn insert(&self, k: String) -> bool { #[must_use] pub fn insert(&self, k: String) -> bool {
let k = Arc::new(k); let k = Arc::new(k);
let mut inner = self.inner.lock().expect("lock"); let mut inner = self.inner.lock().expect("lock");
@ -45,7 +45,7 @@ impl PostsCache {
inner.cache.insert(k); inner.cache.insert(k);
while inner.cache.len() > inner.size { while inner.cache.len() > inner.size {
let oldest = inner.ages.keys().cloned().next().expect("ages first"); let oldest = inner.ages.keys().copied().next().expect("ages first");
let oldest_k = inner.ages.remove(&oldest).expect("remove oldest"); let oldest_k = inner.ages.remove(&oldest).expect("remove oldest");
inner.cache.remove(&oldest_k); inner.cache.remove(&oldest_k);
} }

View File

@ -21,7 +21,7 @@ pub const IMAGES_PER_TAG: usize = 8;
pub type Error = RedisError; pub type Error = RedisError;
/// wrapper so we can impl ManageConnection /// wrapper so we can impl `ManageConnection`
struct RedisPool { struct RedisPool {
redis_url: Url, redis_url: Url,
} }
@ -197,14 +197,14 @@ impl Store {
for spelling in spellings { for spelling in spellings {
cmd.hincr( cmd.hincr(
&tag_key, &tag_key,
format!("s:{}", spelling), format!("s:{spelling}"),
1 1
).ignore(); ).ignore();
} }
// by instance // by instance
cmd.hincr( cmd.hincr(
tag_key, tag_key,
format!("h:{}", host), format!("h:{host}"),
1 1
).ignore(); ).ignore();
if let Some(user_id) = &user_id { if let Some(user_id) = &user_id {
@ -228,24 +228,24 @@ impl Store {
vec![] vec![]
}; };
let mut image_keys = vec![]; let mut image_keys = vec![];
for (name, spellings) in tags.into_iter() { for (name, spellings) in tags {
// global // global
store_tags(&mut cmd, store_tags(&mut cmd,
spellings.clone(), spellings.clone(),
format!("g:{}", name), format!("g:{name}"),
format!("u::{}:{}", hour, name), format!("u::{hour}:{name}"),
); );
// by language // by language
if let Some(language) = &language { if let Some(language) = &language {
store_tags(&mut cmd, store_tags(&mut cmd,
spellings, spellings,
format!("l:{}:{}", language, name), format!("l:{language}:{name}"),
format!("u:{}:{}:{}", language, hour, name), format!("u:{language}:{hour}:{name}"),
); );
} }
for image in &images { for image in &images {
let image_key = format!("i:{}", name); let image_key = format!("i:{name}");
cmd.sadd(&image_key, image) cmd.sadd(&image_key, image)
.ignore() .ignore()
.expire(&image_key, TAG_EXPIRE as usize * 3600) .expire(&image_key, TAG_EXPIRE as usize * 3600)
@ -275,7 +275,7 @@ impl Store {
} }
pub async fn save_host(&mut self, host: &str) -> Result<(), RedisError> { pub async fn save_host(&mut self, host: &str) -> Result<(), RedisError> {
let key = format!("h:{}", host); let key = format!("h:{host}");
redis::pipe() redis::pipe()
.set(&key, "1") .set(&key, "1")
.ignore() .ignore()
@ -286,7 +286,7 @@ impl Store {
} }
pub async fn remove_host(&mut self, host: &str) -> Result<(), RedisError> { pub async fn remove_host(&mut self, host: &str) -> Result<(), RedisError> {
redis::Cmd::del(format!("h:{}", host)) redis::Cmd::del(format!("h:{host}"))
.query_async::<_, ()>(self) .query_async::<_, ()>(self)
.await .await
} }
@ -303,7 +303,7 @@ impl Store {
} }
pub async fn get_tag_images(&mut self, tag: &str) -> Result<Vec<String>, RedisError> { pub async fn get_tag_images(&mut self, tag: &str) -> Result<Vec<String>, RedisError> {
redis::Cmd::smembers(format!("i:{}", tag)) redis::Cmd::smembers(format!("i:{tag}"))
.query_async(self) .query_async(self)
.await .await
} }
@ -332,7 +332,7 @@ impl Store {
} }
pub async fn scan_prefix<'a>(&'a mut self, prefix: &'a str) -> Result<impl Stream<Item = String> + '_, RedisError> { pub async fn scan_prefix<'a>(&'a mut self, prefix: &'a str) -> Result<impl Stream<Item = String> + '_, RedisError> {
let keys = self.scan(&format!("{}*", prefix)) let keys = self.scan(&format!("{prefix}*"))
.await? .await?
.map(|key| key[prefix.len()..].to_string()); .map(|key| key[prefix.len()..].to_string());
Ok(keys) Ok(keys)
@ -388,7 +388,7 @@ impl Store {
.into_iter(); .into_iter();
let mut results = Vec::with_capacity(names.len()); let mut results = Vec::with_capacity(names.len());
for name in names.into_iter() { for name in names {
let hash_values = if let Some(Value::Bulk(hash_values)) = values.next() { let hash_values = if let Some(Value::Bulk(hash_values)) = values.next() {
hash_values hash_values
} else { } else {
@ -417,7 +417,7 @@ impl Store {
} }
let sets: Vec<Vec<String>> = cmd.query_async(self) let sets: Vec<Vec<String>> = cmd.query_async(self)
.await?; .await?;
let results = periods.iter().cloned() let results = periods.iter().copied()
.zip(sets.into_iter()) .zip(sets.into_iter())
.collect(); .collect();
Ok(results) Ok(results)
@ -472,8 +472,8 @@ impl Store {
tag: &str, tag: &str,
) -> Result<(), RedisError> { ) -> Result<(), RedisError> {
let key = match language { let key = match language {
Some(language) => format!("l:{}:{}", language, tag), Some(language) => format!("l:{language}:{tag}"),
None => format!("g:{}", tag), None => format!("g:{tag}"),
}; };
redis::Cmd::del(key) redis::Cmd::del(key)
.query_async(self) .query_async(self)
@ -483,16 +483,16 @@ impl Store {
fn tag_key(language: &Option<String>, name: &str) -> String { fn tag_key(language: &Option<String>, name: &str) -> String {
match language { match language {
Some(language) => format!("l:{}:{}", language, name), Some(language) => format!("l:{language}:{name}"),
None => format!("g:{}", name), None => format!("g:{name}"),
} }
} }
fn pool_key(language: &Option<String>, period: u64) -> String { fn pool_key(language: &Option<String>, period: u64) -> String {
match language { match language {
Some(language) => Some(language) =>
format!("q:{}:{}", period, language), format!("q:{period}:{language}"),
None => None =>
format!("q:{}", period), format!("q:{period}"),
} }
} }

View File

@ -4,7 +4,7 @@ pub fn status(text: &str) {
} }
pub fn extend_timeout(usec: u64) { pub fn extend_timeout(usec: u64) {
systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{}", usec))].iter()) systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{usec}"))].iter())
.unwrap(); .unwrap();
} }

View File

@ -19,7 +19,7 @@ impl TrendTag {
let mut other = Vec::with_capacity(hash_values.len() / 2); let mut other = Vec::with_capacity(hash_values.len() / 2);
let mut key: Option<String> = None; let mut key: Option<String> = None;
for value in hash_values.into_iter() { for value in hash_values {
if let Some(key) = key.take() { if let Some(key) = key.take() {
if let Ok(value) = str::parse(&value) { if let Ok(value) = str::parse(&value) {
other.push((key, value)); other.push((key, value));
@ -36,7 +36,7 @@ impl TrendTag {
} }
} }
pub fn score(&self, period: u64, until: u64) -> f64 { #[must_use] pub fn score(&self, period: u64, until: u64) -> f64 {
// ignore spam that comes from only 1 instance // ignore spam that comes from only 1 instance
if self.hosts().nth(1).is_none() { if self.hosts().nth(1).is_none() {
return -1.; return -1.;
@ -48,7 +48,7 @@ impl TrendTag {
let mut before_hours = 0; let mut before_hours = 0;
let mut after_mentions = 0; let mut after_mentions = 0;
for (hour, mentions) in self.hour_users.iter().cloned() { for (hour, mentions) in self.hour_users.iter().copied() {
if hour > from { if hour > from {
if mentions > 1 { if mentions > 1 {
after_mentions += mentions; after_mentions += mentions;
@ -66,22 +66,22 @@ impl TrendTag {
} }
let before = if before_hours > 0 && before_mentions > 0 { let before = if before_hours > 0 && before_mentions > 0 {
(before_mentions as f64) / (before_hours as f64) (before_mentions as f64) / f64::from(before_hours)
} else { 0.1 }; } else { 0.1 };
let after = (after_mentions as f64) / (period as f64); let after = (after_mentions as f64) / (period as f64);
after / before after / before
} }
pub fn hour_scores_data(&self, period: u64) -> String { #[must_use] pub fn hour_scores_data(&self, period: u64) -> String {
let offset = self.hour_users.len().saturating_sub(period as usize); let offset = self.hour_users.len().saturating_sub(period as usize);
self.hour_users[offset..] self.hour_users[offset..]
.iter() .iter()
.map(|(_, count)| *count) .map(|(_, count)| *count)
.enumerate() .enumerate()
.map(|(i, count)| if i == 0 { .map(|(i, count)| if i == 0 {
format!("{}", count) format!("{count}")
} else { } else {
format!(" {}", count) format!(" {count}")
}) })
.collect() .collect()
} }
@ -101,10 +101,10 @@ impl TrendTag {
}) })
} }
pub fn spelling(&self) -> &str { #[must_use] pub fn spelling(&self) -> &str {
self.spellings() self.spellings()
.map(|(count, spelling)| { .map(|(count, spelling)| {
if spelling.chars().any(|c| c.is_uppercase()) { if spelling.chars().any(char::is_uppercase) {
// favor captialized spelling // favor captialized spelling
(10 * count, spelling) (10 * count, spelling)
} else { } else {
@ -112,8 +112,7 @@ impl TrendTag {
} }
}) })
.max() .max()
.map(|(_count, spelling)| spelling) .map_or(&self.name, |(_count, spelling)| spelling)
.unwrap_or(&self.name)
} }
pub fn hosts(&self) -> impl Iterator<Item = (usize, &str)> { pub fn hosts(&self) -> impl Iterator<Item = (usize, &str)> {
@ -132,7 +131,7 @@ impl TrendTag {
} }
/// ordered by count /// ordered by count
pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> { #[must_use] pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> {
self.hosts().collect() self.hosts().collect()
} }
} }

View File

@ -15,7 +15,7 @@ where
Ok(html) => Html(html).into_response(), Ok(html) => Html(html).into_response(),
Err(err) => ( Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR, StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to render template. Error: {}", err), format!("Failed to render template. Error: {err}"),
) )
.into_response(), .into_response(),
} }

View File

@ -68,7 +68,7 @@ impl ServerState {
.into_iter() .into_iter()
.enumerate() .enumerate()
.flat_map(|(i, url)| if i == 0 { .flat_map(|(i, url)| if i == 0 {
["".to_owned(), url] [String::new(), url]
} else { } else {
[" ".to_owned(), url] [" ".to_owned(), url]
}) })
@ -127,12 +127,7 @@ impl TrendsPage {
// service is very much alive: // service is very much alive:
systemd::watchdog(); systemd::watchdog();
TrendsPage { TrendsPage { language, languages, results, tag_images }
results,
language,
languages,
tag_images,
}
} }
fn template(self) -> HtmlTemplate<Self> { fn template(self) -> HtmlTemplate<Self> {

View File

@ -18,7 +18,7 @@ async fn collect_token(
) -> Result<(), String> { ) -> Result<(), String> {
// try a few registered apps until one works // try a few registered apps until one works
for (client_id, client_secret) in db.get_apps(host).await for (client_id, client_secret) in db.get_apps(host).await
.map_err(|e| format!("{}", e))? .map_err(|e| format!("{e}"))?
{ {
let app = oauth::Application { let app = oauth::Application {
client_id, client_id,
@ -39,7 +39,7 @@ async fn collect_token(
} }
} }
Err(format!("No registered app found for instance {}", host)) Err(format!("No registered app found for instance {host}"))
} }
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]

View File

@ -60,7 +60,7 @@ pub async fn post_token_donate(
Ok(app) => app, Ok(app) => app,
Err(e) => { Err(e) => {
tracing::error!("Canont register OAuth app: {}", e); tracing::error!("Canont register OAuth app: {}", e);
return PostTokenDonateResult::Error(format!("{}", e)); return PostTokenDonateResult::Error(format!("{e}"));
} }
}; };
db.add_app(&form.instance, &app.client_id, &app.client_secret).await db.add_app(&form.instance, &app.client_id, &app.client_secret).await

View File

@ -32,7 +32,7 @@ const SCOPES: &str = "read:statuses";
impl Application { impl Application {
pub async fn register(client: &reqwest::Client, host: &str) -> Result<Self, reqwest::Error> { pub async fn register(client: &reqwest::Client, host: &str) -> Result<Self, reqwest::Error> {
let url = format!("https://{}/api/v1/apps", host); let url = format!("https://{host}/api/v1/apps");
let form = AppRegistration { let form = AppRegistration {
client_name: "#FediBuzz".to_string(), client_name: "#FediBuzz".to_string(),
website: "https://fedi.buzz/".to_string(), website: "https://fedi.buzz/".to_string(),
@ -49,7 +49,7 @@ impl Application {
} }
pub fn generate_redirect_url(host: &str) -> String { pub fn generate_redirect_url(host: &str) -> String {
format!("https://fedi.buzz/token/collect/{}", host) format!("https://fedi.buzz/token/collect/{host}")
} }
pub fn generate_auth_url(&self, host: &str) -> String { pub fn generate_auth_url(&self, host: &str) -> String {
@ -63,7 +63,7 @@ impl Application {
} }
pub async fn obtain_token(&self, client: &reqwest::Client, host: &str, code: String) -> Result<String, reqwest::Error> { pub async fn obtain_token(&self, client: &reqwest::Client, host: &str, code: String) -> Result<String, reqwest::Error> {
let url = format!("https://{}/oauth/token", host); let url = format!("https://{host}/oauth/token");
let form = TokenRequest { let form = TokenRequest {
grant_type: "authorization_code".to_string(), grant_type: "authorization_code".to_string(),
scope: SCOPES.to_string(), scope: SCOPES.to_string(),

View File

@ -54,7 +54,7 @@ impl TrendAnalyzer {
let until = current_hour(); let until = current_hour();
let mut analyzers: Vec<TrendAnalyzer> = periods.iter() let mut analyzers: Vec<TrendAnalyzer> = periods.iter()
.cloned() .copied()
.map(|period| TrendAnalyzer { .map(|period| TrendAnalyzer {
period, period,
until, until,
@ -75,7 +75,7 @@ impl TrendAnalyzer {
metrics::histogram!("trends_page_time", t2 - t1, "step" => "get_trend_pools", "lang" => lang); metrics::histogram!("trends_page_time", t2 - t1, "step" => "get_trend_pools", "lang" => lang);
metrics::histogram!("trends_page_time", t3 - t2, "step" => "get_trend_tags", "lang" => lang); metrics::histogram!("trends_page_time", t3 - t2, "step" => "get_trend_tags", "lang" => lang);
metrics::histogram!("trends_page_tags", tags_len as f64, "lang" => lang); metrics::histogram!("trends_page_tags", tags_len as f64, "lang" => lang);
for trend_tag in trend_tags.into_iter() { for trend_tag in trend_tags {
let trend_tag = Arc::new(trend_tag); let trend_tag = Arc::new(trend_tag);
let name = Arc::new(trend_tag.name.clone()); let name = Arc::new(trend_tag.name.clone());
for analyzer in &mut analyzers { for analyzer in &mut analyzers {

View File

@ -50,7 +50,7 @@ async fn run() {
cave::systemd::status("Starting scheduler"); cave::systemd::status("Starting scheduler");
let mut scheduler = scheduler::Scheduler::new(block_list.clone()); let mut scheduler = scheduler::Scheduler::new(block_list.clone());
cave::systemd::status("Loading known hosts from config"); cave::systemd::status("Loading known hosts from config");
for host in config.hosts.into_iter() { for host in config.hosts {
scheduler.introduce(InstanceHost::just_host(host)).await; scheduler.introduce(InstanceHost::just_host(host)).await;
} }
#[cfg(not(dev))] #[cfg(not(dev))]

View File

@ -136,7 +136,7 @@ impl Scheduler {
pub fn dequeue(&mut self) -> Result<InstanceHost, Duration> { pub fn dequeue(&mut self) -> Result<InstanceHost, Duration> {
let now = Instant::now(); let now = Instant::now();
if let Some(time) = self.queue.keys().next().cloned() { if let Some(time) = self.queue.keys().next().copied() {
if time <= now { if time <= now {
self.queue.remove(&time) self.queue.remove(&time)
.ok_or(Duration::from_secs(1)) .ok_or(Duration::from_secs(1))

View File

@ -24,7 +24,7 @@ impl RobotsTxt {
client: &reqwest::Client, client: &reqwest::Client,
host: &Host, host: &Host,
) -> Self { ) -> Self {
let url = format!("https://{}/robots.txt", host); let url = format!("https://{host}/robots.txt");
metrics::increment_gauge!("hunter_requests", 1.0, "type" => "robotstxt"); metrics::increment_gauge!("hunter_requests", 1.0, "type" => "robotstxt");
let robot = async { let robot = async {
let body = client.get(url) let body = client.get(url)
@ -172,7 +172,7 @@ async fn fetch_timeline(
robots_txt: RobotsTxt, robots_txt: RobotsTxt,
host: &Host, host: &Host,
) -> Result<(Option<f64>, Option<Duration>), reqwest::Error> { ) -> Result<(Option<f64>, Option<Duration>), reqwest::Error> {
let url = format!("https://{}/api/v1/timelines/public?limit=40", host); let url = format!("https://{host}/api/v1/timelines/public?limit=40");
if ! robots_txt.allowed(&url) { if ! robots_txt.allowed(&url) {
tracing::warn!("Timeline of {} forbidden by robots.txt", host); tracing::warn!("Timeline of {} forbidden by robots.txt", host);
return Ok((None, None)); return Ok((None, None));
@ -192,7 +192,7 @@ async fn fetch_timeline(
let mean_interval = feed.mean_post_interval(); let mean_interval = feed.mean_post_interval();
let (new_post_ratio, introduce_hosts) = process_posts(&mut store, posts_cache, block_list, host, feed.posts.into_iter()).await; let (new_post_ratio, introduce_hosts) = process_posts(&mut store, posts_cache, block_list, host, feed.posts.into_iter()).await;
for introduce_host in introduce_hosts.into_iter() { for introduce_host in introduce_hosts {
message_tx.send(Message::IntroduceHost(introduce_host)).unwrap(); message_tx.send(Message::IntroduceHost(introduce_host)).unwrap();
} }
@ -250,7 +250,7 @@ async fn process_posts(
} }
let t2 = Instant::now(); let t2 = Instant::now();
metrics::histogram!("hunter_post_process_seconds", t2 - t1) metrics::histogram!("hunter_post_process_seconds", t2 - t1);
} }
} }
tracing::trace!("{}: {}/{} new posts", host, new_posts, posts_len); tracing::trace!("{}: {}/{} new posts", host, new_posts, posts_len);
@ -299,9 +299,9 @@ async fn open_stream(
robots_txt: RobotsTxt, robots_txt: RobotsTxt,
host: Host, host: Host,
) -> Result<(&'static str, impl Future<Output = usize>), String> { ) -> Result<(&'static str, impl Future<Output = usize>), String> {
let url = format!("https://{}/api/v1/streaming/public", host); let url = format!("https://{host}/api/v1/streaming/public");
if ! robots_txt.allowed(&url) { if ! robots_txt.allowed(&url) {
return Err(format!("Streaming of {} forbidden by robots.txt", host)); return Err(format!("Streaming of {host} forbidden by robots.txt"));
} }
// free as early as possible // free as early as possible
drop(robots_txt); drop(robots_txt);
@ -346,7 +346,7 @@ async fn open_stream(
} }
let stream = stream.map_err(|e| { let stream = stream.map_err(|e| {
format!("Stream error for {}: {}", host, e) format!("Stream error for {host}: {e}")
})?; })?;
Ok((stats_key, stream.fold(0, move |post_count, post| { Ok((stats_key, stream.fold(0, move |post_count, post| {
@ -358,7 +358,7 @@ async fn open_stream(
async move { async move {
let (_, introduce_hosts) = let (_, introduce_hosts) =
process_posts(&mut store, &posts_cache, block_list, &host, [post].into_iter()).await; process_posts(&mut store, &posts_cache, block_list, &host, [post].into_iter()).await;
for introduce_host in introduce_hosts.into_iter() { for introduce_host in introduce_hosts {
message_tx.send(Message::IntroduceHost(introduce_host)).unwrap(); message_tx.send(Message::IntroduceHost(introduce_host)).unwrap();
} }
post_count + 1 post_count + 1

View File

@ -5,7 +5,7 @@ use std::{
RwLock, RwLock,
}, },
}; };
use ansi_term::Colour::{self, *}; use ansi_term::Colour::{self, Black, RGB, Red, Yellow};
use futures::{Stream, StreamExt}; use futures::{Stream, StreamExt};
use tokio::{ use tokio::{
io::AsyncWriteExt, io::AsyncWriteExt,
@ -216,7 +216,7 @@ async fn main() {
tokio::spawn(async move { tokio::spawn(async move {
while let Some(msg) = pipe.rx.recv().await { while let Some(msg) = pipe.rx.recv().await {
match socket.write_all(&msg[..]).await { match socket.write_all(&msg[..]).await {
Ok(_) => {} Ok(()) => {}
Err(_) => break, Err(_) => break,
} }
} }