This commit is contained in:
Astro 2024-03-27 02:49:15 +01:00
parent b06c0e48c5
commit 295a7791be
24 changed files with 92 additions and 100 deletions

View File

@ -1,6 +1,5 @@
use std::{
sync::Arc,
ops::Deref,
};
use futures::StreamExt;
use cave::{
@ -74,7 +73,7 @@ async fn main() {
let post = Arc::new(post);
store.save_post_tags(&post, is_profane(&profanity, &post).await).await;
let update_set = UpdateSet::from(post.deref());
let update_set = UpdateSet::from(&*post);
if ! update_set.is_empty() {
trend_setter_tx.send(update_set).await.unwrap();
}

View File

@ -62,7 +62,7 @@ pub fn start(mut store: Store) -> Tx {
}
std::collections::hash_map::Entry::Occupied(mut entry) => {
// merge into buffered
for tag in tags.into_iter() {
for tag in tags {
entry.get_mut().insert(tag);
}
}
@ -70,7 +70,7 @@ pub fn start(mut store: Store) -> Tx {
}
loop {
let mut next_run = queue.keys().cloned().next();
let mut next_run = queue.keys().copied().next();
if let Some(next_run_) = next_run {
let now = Instant::now();
if next_run_ <= now {
@ -80,14 +80,14 @@ pub fn start(mut store: Store) -> Tx {
run(&language, buffered.clone(), &mut store).await.unwrap();
// update with next in queue
next_run = queue.keys().cloned().next();
next_run = queue.keys().copied().next();
} else {
tracing::trace!("next_run in {:?}", next_run_ - now);
}
} else {
let languages = store.get_languages().await.unwrap();
tracing::info!("queue empty, filling from {} languages", languages.len());
for language in languages.into_iter() {
for language in languages {
enqueue(Some(language.clone()), &mut queue, &mut buffer, HashSet::new());
}
}

View File

@ -84,7 +84,7 @@ async fn main() {
} else if relay.ends_with("/actor") {
(relay.clone(), relay.replace("/actor", "/inbox"))
} else {
panic!("Not sure how to deal with relay {}", relay);
panic!("Not sure how to deal with relay {relay}");
};
tracing::trace!("Following {}", &id);
let follow = db::Follow {

View File

@ -3,7 +3,7 @@ use http_digest_headers::{DigestHeader, DigestMethod};
pub fn generate_header(body: &[u8]) -> Result<String, ()> {
let mut digest_header = DigestHeader::new()
.with_method(DigestMethod::SHA256, body)
.map(|h| format!("{}", h))
.map(|h| format!("{h}"))
.map_err(|_| ())?;
// mastodon expects uppercase algo name

View File

@ -49,7 +49,7 @@ pub struct Action<O> {
}
impl Action<serde_json::Value> {
pub fn object_id(&self) -> Option<&str> {
#[must_use] pub fn object_id(&self) -> Option<&str> {
if let Some(id) = self.object.as_str() {
Some(id)
} else if let Some(object) = self.object.as_object() {
@ -95,15 +95,15 @@ pub struct Post {
}
impl Post {
pub fn language(&self) -> Option<&str> {
#[must_use] pub fn language(&self) -> Option<&str> {
self.content_map.keys()
.next().map(|s| s.as_str())
.next().map(std::string::String::as_str)
}
/// Translate ActivityPub post to Mastodon client API post format
pub fn to_feed_post(self, actor: Actor) -> super::feed::Post {
/// Translate `ActivityPub` post to Mastodon client API post format
#[must_use] pub fn to_feed_post(self, actor: Actor) -> super::feed::Post {
let language = self.language()
.map(|s| s.to_string());
.map(std::string::ToString::to_string);
super::feed::Post {
created_at: self.published,
url: self.url.unwrap_or(self.id),

View File

@ -6,7 +6,7 @@ impl<T: Sized + for<'a> serde::Deserialize<'a>> LoadConfig for T {
fn load() -> Self {
let path = std::env::args().nth(1)
.expect("Call with config.yaml");
crate::systemd::status(&format!("Loading config file {}", path));
crate::systemd::status(&format!("Loading config file {path}"));
let config_file = std::fs::read_to_string(path)
.expect("read config");

View File

@ -4,12 +4,12 @@ use futures::{Stream, StreamExt};
use eventsource_stream::Eventsource;
use reqwest::StatusCode;
pub fn url_host(url: &str) -> Option<String> {
#[must_use] pub fn url_host(url: &str) -> Option<String> {
reqwest::Url::parse(url)
.map_err(|e| tracing::warn!("Cannot parse url {:?}: {}", url, e))
.ok()
.and_then(|url| url.domain()
.map(|host| host.to_lowercase())
.map(str::to_lowercase)
)
}
@ -25,7 +25,7 @@ pub struct Account {
}
impl Account {
pub fn host(&self) -> Option<String> {
#[must_use] pub fn host(&self) -> Option<String> {
url_host(&self.url)
}
}
@ -49,7 +49,7 @@ pub struct Mention {
}
impl Mention {
pub fn host(&self) -> Option<String> {
#[must_use] pub fn host(&self) -> Option<String> {
url_host(&self.url)
}
}
@ -82,27 +82,27 @@ pub struct Post {
}
impl Post {
pub fn url_host(&self) -> Option<String> {
#[must_use] pub fn url_host(&self) -> Option<String> {
reqwest::Url::parse(&self.url)
.ok()
.and_then(|url| url.domain()
.map(|host| host.to_owned())
.map(std::borrow::ToOwned::to_owned)
)
}
pub fn user_id(&self) -> Option<String> {
#[must_use] pub fn user_id(&self) -> Option<String> {
let username = self.account.username.to_lowercase();
let host = self.url_host()?;
Some(format!("{}@{}", username, host))
Some(format!("{username}@{host}"))
}
pub fn timestamp(&self) -> Option<DateTime<FixedOffset>> {
#[must_use] pub fn timestamp(&self) -> Option<DateTime<FixedOffset>> {
DateTime::parse_from_rfc3339(&self.created_at)
.ok()
}
/// clip "en-us" to "en"
pub fn lang(&self) -> Option<String> {
#[must_use] pub fn lang(&self) -> Option<String> {
let language = match &self.language {
Some(language) => language,
None => return None,
@ -214,7 +214,7 @@ pub struct Feed {
impl Feed {
/// Analyze time intervals between posts to estimate when to fetch
/// next
pub fn mean_post_interval(&self) -> Option<Duration> {
#[must_use] pub fn mean_post_interval(&self) -> Option<Duration> {
let mut timestamps = self.posts.iter()
.filter_map(|post| post.timestamp())
.collect::<Vec<_>>();
@ -285,11 +285,11 @@ impl std::fmt::Display for StreamError {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match self {
StreamError::HttpStatus(code) =>
write!(fmt, "HTTP/{}", code),
write!(fmt, "HTTP/{code}"),
StreamError::Http(e) =>
e.fmt(fmt),
StreamError::InvalidContentType(ct) =>
write!(fmt, "Invalid Content-Type: {}", ct),
write!(fmt, "Invalid Content-Type: {ct}"),
}
}
}

View File

@ -8,7 +8,7 @@ pub struct FirehoseFactory {
}
impl FirehoseFactory {
pub fn new(redis_url: String, redis_password_file: String) -> Self {
#[must_use] pub fn new(redis_url: String, redis_password_file: String) -> Self {
let redis_password = std::fs::read_to_string(redis_password_file)
.expect("redis_password_file");
let mut redis_url = Url::parse(&redis_url)

View File

@ -18,6 +18,6 @@ pub const PERIODS: &[u64] = &[4, 24, 7 * 24];
/// compare the selected period against how many multiples of preceding periods
pub const PERIOD_COMPARE_WINDOW: u64 = 3;
pub fn current_hour() -> u64 {
#[must_use] pub fn current_hour() -> u64 {
chrono::offset::Utc::now().timestamp() as u64 / 3600
}

View File

@ -1,4 +1,4 @@
use std::ops::Deref;
use std::sync::Arc;
use futures::{Future, StreamExt};
use tokio::fs::File;
@ -18,8 +18,7 @@ where
let lock_ = lock.clone();
let path = std::env::current_dir()
.unwrap()
.join(path)
.to_path_buf();
.join(path).clone();
let dir = path.parent().unwrap().to_path_buf();
let path = Arc::new(RwLock::new(path));
@ -28,7 +27,7 @@ where
tokio::spawn(async move {
let inotify = Inotify::init()
.unwrap();
inotify.watches().add(dir.read().await.deref(), WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO)
inotify.watches().add(&*dir.read().await, WatchMask::MODIFY | WatchMask::CREATE | WatchMask::MOVED_TO)
.unwrap();
tracing::debug!("Watching directory {:?}", &dir);
inotify.into_event_stream([0; 1024])
@ -45,7 +44,7 @@ where
let dir = dir.read().await;
let path = path.read().await;
if dir.join(name) == *path {
match File::open(path.deref()).await {
match File::open(&*path).await {
Ok(file) => {
let t = f(file).await;
*lock.write().await = t;

View File

@ -17,7 +17,7 @@ struct PostsCacheInner {
}
impl PostsCache {
pub fn new(size: usize) -> Self {
#[must_use] pub fn new(size: usize) -> Self {
PostsCache {
inner: Arc::new(Mutex::new(PostsCacheInner {
cache: HashSet::new(),
@ -28,7 +28,7 @@ impl PostsCache {
}
// returns true if already exists
pub fn insert(&self, k: String) -> bool {
#[must_use] pub fn insert(&self, k: String) -> bool {
let k = Arc::new(k);
let mut inner = self.inner.lock().expect("lock");
@ -45,7 +45,7 @@ impl PostsCache {
inner.cache.insert(k);
while inner.cache.len() > inner.size {
let oldest = inner.ages.keys().cloned().next().expect("ages first");
let oldest = inner.ages.keys().copied().next().expect("ages first");
let oldest_k = inner.ages.remove(&oldest).expect("remove oldest");
inner.cache.remove(&oldest_k);
}

View File

@ -21,7 +21,7 @@ pub const IMAGES_PER_TAG: usize = 8;
pub type Error = RedisError;
/// wrapper so we can impl ManageConnection
/// wrapper so we can impl `ManageConnection`
struct RedisPool {
redis_url: Url,
}
@ -197,14 +197,14 @@ impl Store {
for spelling in spellings {
cmd.hincr(
&tag_key,
format!("s:{}", spelling),
format!("s:{spelling}"),
1
).ignore();
}
// by instance
cmd.hincr(
tag_key,
format!("h:{}", host),
format!("h:{host}"),
1
).ignore();
if let Some(user_id) = &user_id {
@ -228,24 +228,24 @@ impl Store {
vec![]
};
let mut image_keys = vec![];
for (name, spellings) in tags.into_iter() {
for (name, spellings) in tags {
// global
store_tags(&mut cmd,
spellings.clone(),
format!("g:{}", name),
format!("u::{}:{}", hour, name),
format!("g:{name}"),
format!("u::{hour}:{name}"),
);
// by language
if let Some(language) = &language {
store_tags(&mut cmd,
spellings,
format!("l:{}:{}", language, name),
format!("u:{}:{}:{}", language, hour, name),
format!("l:{language}:{name}"),
format!("u:{language}:{hour}:{name}"),
);
}
for image in &images {
let image_key = format!("i:{}", name);
let image_key = format!("i:{name}");
cmd.sadd(&image_key, image)
.ignore()
.expire(&image_key, TAG_EXPIRE as usize * 3600)
@ -275,7 +275,7 @@ impl Store {
}
pub async fn save_host(&mut self, host: &str) -> Result<(), RedisError> {
let key = format!("h:{}", host);
let key = format!("h:{host}");
redis::pipe()
.set(&key, "1")
.ignore()
@ -286,7 +286,7 @@ impl Store {
}
pub async fn remove_host(&mut self, host: &str) -> Result<(), RedisError> {
redis::Cmd::del(format!("h:{}", host))
redis::Cmd::del(format!("h:{host}"))
.query_async::<_, ()>(self)
.await
}
@ -303,7 +303,7 @@ impl Store {
}
pub async fn get_tag_images(&mut self, tag: &str) -> Result<Vec<String>, RedisError> {
redis::Cmd::smembers(format!("i:{}", tag))
redis::Cmd::smembers(format!("i:{tag}"))
.query_async(self)
.await
}
@ -332,7 +332,7 @@ impl Store {
}
pub async fn scan_prefix<'a>(&'a mut self, prefix: &'a str) -> Result<impl Stream<Item = String> + '_, RedisError> {
let keys = self.scan(&format!("{}*", prefix))
let keys = self.scan(&format!("{prefix}*"))
.await?
.map(|key| key[prefix.len()..].to_string());
Ok(keys)
@ -388,7 +388,7 @@ impl Store {
.into_iter();
let mut results = Vec::with_capacity(names.len());
for name in names.into_iter() {
for name in names {
let hash_values = if let Some(Value::Bulk(hash_values)) = values.next() {
hash_values
} else {
@ -417,7 +417,7 @@ impl Store {
}
let sets: Vec<Vec<String>> = cmd.query_async(self)
.await?;
let results = periods.iter().cloned()
let results = periods.iter().copied()
.zip(sets.into_iter())
.collect();
Ok(results)
@ -472,8 +472,8 @@ impl Store {
tag: &str,
) -> Result<(), RedisError> {
let key = match language {
Some(language) => format!("l:{}:{}", language, tag),
None => format!("g:{}", tag),
Some(language) => format!("l:{language}:{tag}"),
None => format!("g:{tag}"),
};
redis::Cmd::del(key)
.query_async(self)
@ -483,16 +483,16 @@ impl Store {
fn tag_key(language: &Option<String>, name: &str) -> String {
match language {
Some(language) => format!("l:{}:{}", language, name),
None => format!("g:{}", name),
Some(language) => format!("l:{language}:{name}"),
None => format!("g:{name}"),
}
}
fn pool_key(language: &Option<String>, period: u64) -> String {
match language {
Some(language) =>
format!("q:{}:{}", period, language),
format!("q:{period}:{language}"),
None =>
format!("q:{}", period),
format!("q:{period}"),
}
}

View File

@ -4,7 +4,7 @@ pub fn status(text: &str) {
}
pub fn extend_timeout(usec: u64) {
systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{}", usec))].iter())
systemd::daemon::notify(false, [(systemd::daemon::STATE_EXTEND_TIMEOUT_USEC, format!("{usec}"))].iter())
.unwrap();
}

View File

@ -19,7 +19,7 @@ impl TrendTag {
let mut other = Vec::with_capacity(hash_values.len() / 2);
let mut key: Option<String> = None;
for value in hash_values.into_iter() {
for value in hash_values {
if let Some(key) = key.take() {
if let Ok(value) = str::parse(&value) {
other.push((key, value));
@ -36,7 +36,7 @@ impl TrendTag {
}
}
pub fn score(&self, period: u64, until: u64) -> f64 {
#[must_use] pub fn score(&self, period: u64, until: u64) -> f64 {
// ignore spam that comes from only 1 instance
if self.hosts().nth(1).is_none() {
return -1.;
@ -48,7 +48,7 @@ impl TrendTag {
let mut before_hours = 0;
let mut after_mentions = 0;
for (hour, mentions) in self.hour_users.iter().cloned() {
for (hour, mentions) in self.hour_users.iter().copied() {
if hour > from {
if mentions > 1 {
after_mentions += mentions;
@ -66,22 +66,22 @@ impl TrendTag {
}
let before = if before_hours > 0 && before_mentions > 0 {
(before_mentions as f64) / (before_hours as f64)
(before_mentions as f64) / f64::from(before_hours)
} else { 0.1 };
let after = (after_mentions as f64) / (period as f64);
after / before
}
pub fn hour_scores_data(&self, period: u64) -> String {
#[must_use] pub fn hour_scores_data(&self, period: u64) -> String {
let offset = self.hour_users.len().saturating_sub(period as usize);
self.hour_users[offset..]
.iter()
.map(|(_, count)| *count)
.enumerate()
.map(|(i, count)| if i == 0 {
format!("{}", count)
format!("{count}")
} else {
format!(" {}", count)
format!(" {count}")
})
.collect()
}
@ -101,10 +101,10 @@ impl TrendTag {
})
}
pub fn spelling(&self) -> &str {
#[must_use] pub fn spelling(&self) -> &str {
self.spellings()
.map(|(count, spelling)| {
if spelling.chars().any(|c| c.is_uppercase()) {
if spelling.chars().any(char::is_uppercase) {
// favor captialized spelling
(10 * count, spelling)
} else {
@ -112,8 +112,7 @@ impl TrendTag {
}
})
.max()
.map(|(_count, spelling)| spelling)
.unwrap_or(&self.name)
.map_or(&self.name, |(_count, spelling)| spelling)
}
pub fn hosts(&self) -> impl Iterator<Item = (usize, &str)> {
@ -132,7 +131,7 @@ impl TrendTag {
}
/// ordered by count
pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> {
#[must_use] pub fn hosts_set(&self) -> BTreeSet<(usize, &str)> {
self.hosts().collect()
}
}

View File

@ -15,7 +15,7 @@ where
Ok(html) => Html(html).into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("Failed to render template. Error: {}", err),
format!("Failed to render template. Error: {err}"),
)
.into_response(),
}

View File

@ -68,7 +68,7 @@ impl ServerState {
.into_iter()
.enumerate()
.flat_map(|(i, url)| if i == 0 {
["".to_owned(), url]
[String::new(), url]
} else {
[" ".to_owned(), url]
})
@ -127,12 +127,7 @@ impl TrendsPage {
// service is very much alive:
systemd::watchdog();
TrendsPage {
results,
language,
languages,
tag_images,
}
TrendsPage { language, languages, results, tag_images }
}
fn template(self) -> HtmlTemplate<Self> {

View File

@ -18,7 +18,7 @@ async fn collect_token(
) -> Result<(), String> {
// try a few registered apps until one works
for (client_id, client_secret) in db.get_apps(host).await
.map_err(|e| format!("{}", e))?
.map_err(|e| format!("{e}"))?
{
let app = oauth::Application {
client_id,
@ -39,7 +39,7 @@ async fn collect_token(
}
}
Err(format!("No registered app found for instance {}", host))
Err(format!("No registered app found for instance {host}"))
}
#[derive(serde::Deserialize)]

View File

@ -60,7 +60,7 @@ pub async fn post_token_donate(
Ok(app) => app,
Err(e) => {
tracing::error!("Canont register OAuth app: {}", e);
return PostTokenDonateResult::Error(format!("{}", e));
return PostTokenDonateResult::Error(format!("{e}"));
}
};
db.add_app(&form.instance, &app.client_id, &app.client_secret).await

View File

@ -32,7 +32,7 @@ const SCOPES: &str = "read:statuses";
impl Application {
pub async fn register(client: &reqwest::Client, host: &str) -> Result<Self, reqwest::Error> {
let url = format!("https://{}/api/v1/apps", host);
let url = format!("https://{host}/api/v1/apps");
let form = AppRegistration {
client_name: "#FediBuzz".to_string(),
website: "https://fedi.buzz/".to_string(),
@ -49,7 +49,7 @@ impl Application {
}
pub fn generate_redirect_url(host: &str) -> String {
format!("https://fedi.buzz/token/collect/{}", host)
format!("https://fedi.buzz/token/collect/{host}")
}
pub fn generate_auth_url(&self, host: &str) -> String {
@ -63,7 +63,7 @@ impl Application {
}
pub async fn obtain_token(&self, client: &reqwest::Client, host: &str, code: String) -> Result<String, reqwest::Error> {
let url = format!("https://{}/oauth/token", host);
let url = format!("https://{host}/oauth/token");
let form = TokenRequest {
grant_type: "authorization_code".to_string(),
scope: SCOPES.to_string(),

View File

@ -54,7 +54,7 @@ impl TrendAnalyzer {
let until = current_hour();
let mut analyzers: Vec<TrendAnalyzer> = periods.iter()
.cloned()
.copied()
.map(|period| TrendAnalyzer {
period,
until,
@ -75,7 +75,7 @@ impl TrendAnalyzer {
metrics::histogram!("trends_page_time", t2 - t1, "step" => "get_trend_pools", "lang" => lang);
metrics::histogram!("trends_page_time", t3 - t2, "step" => "get_trend_tags", "lang" => lang);
metrics::histogram!("trends_page_tags", tags_len as f64, "lang" => lang);
for trend_tag in trend_tags.into_iter() {
for trend_tag in trend_tags {
let trend_tag = Arc::new(trend_tag);
let name = Arc::new(trend_tag.name.clone());
for analyzer in &mut analyzers {

View File

@ -50,7 +50,7 @@ async fn run() {
cave::systemd::status("Starting scheduler");
let mut scheduler = scheduler::Scheduler::new(block_list.clone());
cave::systemd::status("Loading known hosts from config");
for host in config.hosts.into_iter() {
for host in config.hosts {
scheduler.introduce(InstanceHost::just_host(host)).await;
}
#[cfg(not(dev))]

View File

@ -136,7 +136,7 @@ impl Scheduler {
pub fn dequeue(&mut self) -> Result<InstanceHost, Duration> {
let now = Instant::now();
if let Some(time) = self.queue.keys().next().cloned() {
if let Some(time) = self.queue.keys().next().copied() {
if time <= now {
self.queue.remove(&time)
.ok_or(Duration::from_secs(1))

View File

@ -24,7 +24,7 @@ impl RobotsTxt {
client: &reqwest::Client,
host: &Host,
) -> Self {
let url = format!("https://{}/robots.txt", host);
let url = format!("https://{host}/robots.txt");
metrics::increment_gauge!("hunter_requests", 1.0, "type" => "robotstxt");
let robot = async {
let body = client.get(url)
@ -172,7 +172,7 @@ async fn fetch_timeline(
robots_txt: RobotsTxt,
host: &Host,
) -> Result<(Option<f64>, Option<Duration>), reqwest::Error> {
let url = format!("https://{}/api/v1/timelines/public?limit=40", host);
let url = format!("https://{host}/api/v1/timelines/public?limit=40");
if ! robots_txt.allowed(&url) {
tracing::warn!("Timeline of {} forbidden by robots.txt", host);
return Ok((None, None));
@ -192,7 +192,7 @@ async fn fetch_timeline(
let mean_interval = feed.mean_post_interval();
let (new_post_ratio, introduce_hosts) = process_posts(&mut store, posts_cache, block_list, host, feed.posts.into_iter()).await;
for introduce_host in introduce_hosts.into_iter() {
for introduce_host in introduce_hosts {
message_tx.send(Message::IntroduceHost(introduce_host)).unwrap();
}
@ -250,7 +250,7 @@ async fn process_posts(
}
let t2 = Instant::now();
metrics::histogram!("hunter_post_process_seconds", t2 - t1)
metrics::histogram!("hunter_post_process_seconds", t2 - t1);
}
}
tracing::trace!("{}: {}/{} new posts", host, new_posts, posts_len);
@ -299,9 +299,9 @@ async fn open_stream(
robots_txt: RobotsTxt,
host: Host,
) -> Result<(&'static str, impl Future<Output = usize>), String> {
let url = format!("https://{}/api/v1/streaming/public", host);
let url = format!("https://{host}/api/v1/streaming/public");
if ! robots_txt.allowed(&url) {
return Err(format!("Streaming of {} forbidden by robots.txt", host));
return Err(format!("Streaming of {host} forbidden by robots.txt"));
}
// free as early as possible
drop(robots_txt);
@ -346,7 +346,7 @@ async fn open_stream(
}
let stream = stream.map_err(|e| {
format!("Stream error for {}: {}", host, e)
format!("Stream error for {host}: {e}")
})?;
Ok((stats_key, stream.fold(0, move |post_count, post| {
@ -358,7 +358,7 @@ async fn open_stream(
async move {
let (_, introduce_hosts) =
process_posts(&mut store, &posts_cache, block_list, &host, [post].into_iter()).await;
for introduce_host in introduce_hosts.into_iter() {
for introduce_host in introduce_hosts {
message_tx.send(Message::IntroduceHost(introduce_host)).unwrap();
}
post_count + 1

View File

@ -5,7 +5,7 @@ use std::{
RwLock,
},
};
use ansi_term::Colour::{self, *};
use ansi_term::Colour::{self, Black, RGB, Red, Yellow};
use futures::{Stream, StreamExt};
use tokio::{
io::AsyncWriteExt,
@ -216,7 +216,7 @@ async fn main() {
tokio::spawn(async move {
while let Some(msg) = pipe.rx.recv().await {
match socket.write_all(&msg[..]).await {
Ok(_) => {}
Ok(()) => {}
Err(_) => break,
}
}