From 996b161c1e07459f5349e576d34c5ba0dafc15b3 Mon Sep 17 00:00:00 2001 From: Kitaiti Makoto Date: Thu, 27 Jan 2022 11:45:35 +0900 Subject: [PATCH] Satisfy clippy --- plume-models/src/comments.rs | 3 +-- plume-models/src/search/tokenizer.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/plume-models/src/comments.rs b/plume-models/src/comments.rs index 0809aab5..09f26a35 100644 --- a/plume-models/src/comments.rs +++ b/plume-models/src/comments.rs @@ -304,14 +304,13 @@ impl FromId for Comment { .chain(bcc) .collect::>() // remove duplicates (don't do a query more than once) .into_iter() - .map(|v| { + .flat_map(|v| { if let Ok(user) = User::from_id(conn, &v, None, CONFIG.proxy()) { vec![user] } else { vec![] // TODO try to fetch collection } }) - .flatten() .filter(|u| u.get_instance(conn).map(|i| i.local).unwrap_or(false)) .collect::>(); //remove duplicates (prevent db error) diff --git a/plume-models/src/search/tokenizer.rs b/plume-models/src/search/tokenizer.rs index 52cd62b2..83228e67 100644 --- a/plume-models/src/search/tokenizer.rs +++ b/plume-models/src/search/tokenizer.rs @@ -57,7 +57,7 @@ impl<'a> WhitespaceTokenStream<'a> { .filter(|&(_, ref c)| c.is_whitespace()) .map(|(offset, _)| offset) .next() - .unwrap_or_else(|| self.text.len()) + .unwrap_or(self.text.len()) } }