From b945d1f602aba50d2cfdc77eba0742f6e707632b Mon Sep 17 00:00:00 2001 From: Atul Bhosale Date: Wed, 20 Mar 2019 22:26:17 +0530 Subject: [PATCH] Run 'cargo fmt' to format code (#489) --- Cargo.lock | 2 + build.rs | 29 +- plume-api/src/lib.rs | 2 +- plume-cli/src/instance.rs | 51 +-- plume-cli/src/main.rs | 22 +- plume-cli/src/search.rs | 108 +++--- plume-cli/src/users.rs | 182 ++++++---- plume-common/src/activity_pub/mod.rs | 18 +- plume-common/src/activity_pub/request.rs | 19 +- plume-common/src/activity_pub/sign.rs | 18 +- plume-common/src/lib.rs | 2 +- plume-common/src/utils.rs | 290 +++++++++------ plume-front/src/editor.rs | 88 +++-- plume-front/src/main.rs | 47 ++- plume-models/src/api_tokens.rs | 16 +- plume-models/src/apps.rs | 5 +- plume-models/src/blogs.rs | 342 ++++++++---------- plume-models/src/comment_seers.rs | 3 +- plume-models/src/comments.rs | 188 +++++----- plume-models/src/db_conn.rs | 13 +- plume-models/src/follows.rs | 109 +++--- plume-models/src/instance.rs | 70 ++-- plume-models/src/lib.rs | 12 +- plume-models/src/likes.rs | 46 +-- plume-models/src/medias.rs | 123 ++++--- plume-models/src/mentions.rs | 23 +- plume-models/src/notifications.rs | 42 ++- plume-models/src/posts.rs | 438 +++++++++++++---------- plume-models/src/reshares.rs | 48 +-- plume-models/src/safe_string.rs | 16 +- plume-models/src/search/mod.rs | 86 +++-- plume-models/src/search/query.rs | 144 +++++--- plume-models/src/search/searcher.rs | 91 +++-- plume-models/src/tags.rs | 7 +- plume-models/src/users.rs | 312 +++++++--------- src/api/apps.rs | 6 +- src/api/authorization.rs | 27 +- src/api/mod.rs | 37 +- src/api/posts.rs | 86 +++-- src/inbox.rs | 83 +++-- src/mail.rs | 35 +- src/main.rs | 299 +++++++++------- src/routes/blogs.rs | 113 +++--- src/routes/comments.rs | 141 +++++--- src/routes/errors.rs | 68 ++-- src/routes/instance.rs | 139 ++++--- src/routes/likes.rs | 40 ++- src/routes/medias.rs | 108 +++--- src/routes/mod.rs | 74 ++-- src/routes/notifications.rs | 20 +- src/routes/posts.rs | 309 +++++++++++----- src/routes/reshares.rs | 36 +- src/routes/search.rs | 19 +- src/routes/session.rs | 163 +++++---- src/routes/tags.rs | 16 +- src/routes/user.rs | 239 ++++++++----- src/routes/well_known.rs | 69 ++-- src/template_utils.rs | 216 +++++++---- 58 files changed, 3160 insertions(+), 2195 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18f6a9ae..770e06ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "MacTypes-sys" version = "2.1.0" diff --git a/build.rs b/build.rs index a9d14f61..6832254d 100644 --- a/build.rs +++ b/build.rs @@ -1,8 +1,8 @@ -extern crate ructe; extern crate rsass; +extern crate ructe; use ructe::*; -use std::{env, fs::*, io::Write, path::PathBuf}; use std::process::{Command, Stdio}; +use std::{env, fs::*, io::Write, path::PathBuf}; fn compute_static_hash() -> String { //"find static/ -type f ! -path 'static/media/*' | sort | xargs stat --printf='%n %Y\n' | sha256sum" @@ -34,25 +34,36 @@ fn compute_static_hash() -> String { String::from_utf8(sha.stdout).unwrap() } - fn main() { let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); - let in_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()) - .join("templates"); + let in_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()).join("templates"); compile_templates(&in_dir, &out_dir).expect("compile templates"); println!("cargo:rerun-if-changed=static/css"); let mut out = File::create("static/css/main.css").expect("Couldn't create main.css"); out.write_all( - &rsass::compile_scss_file("static/css/main.scss".as_ref(), rsass::OutputStyle::Compressed) - .expect("Error during SCSS compilation") - ).expect("Couldn't write CSS output"); + &rsass::compile_scss_file( + "static/css/main.scss".as_ref(), + rsass::OutputStyle::Compressed, + ) + .expect("Error during SCSS compilation"), + ) + .expect("Couldn't write CSS output"); let cache_id = &compute_static_hash()[..8]; println!("cargo:rerun-if-changed=target/deploy/plume-front.wasm"); copy("target/deploy/plume-front.wasm", "static/plume-front.wasm") .and_then(|_| read_to_string("target/deploy/plume-front.js")) - .and_then(|js| write("static/plume-front.js", js.replace("\"plume-front.wasm\"", &format!("\"/static/cached/{}/plume-front.wasm\"", cache_id)))).ok(); + .and_then(|js| { + write( + "static/plume-front.js", + js.replace( + "\"plume-front.wasm\"", + &format!("\"/static/cached/{}/plume-front.wasm\"", cache_id), + ), + ) + }) + .ok(); println!("cargo:rustc-env=CACHE_ID={}", cache_id) } diff --git a/plume-api/src/lib.rs b/plume-api/src/lib.rs index 3b08158f..129950c2 100644 --- a/plume-api/src/lib.rs +++ b/plume-api/src/lib.rs @@ -21,4 +21,4 @@ pub mod posts; #[derive(Default)] pub struct Api { pub posts: posts::PostEndpoint, -} \ No newline at end of file +} diff --git a/plume-cli/src/instance.rs b/plume-cli/src/instance.rs index 6f399b95..6ffb0430 100644 --- a/plume-cli/src/instance.rs +++ b/plume-cli/src/instance.rs @@ -1,11 +1,7 @@ -use clap::{Arg, ArgMatches, App, SubCommand}; +use clap::{App, Arg, ArgMatches, SubCommand}; +use plume_models::{instance::*, safe_string::SafeString, Connection}; use std::env; -use plume_models::{ - Connection, - instance::*, - safe_string::SafeString, -}; pub fn command<'a, 'b>() -> App<'a, 'b> { SubCommand::with_name("instance") @@ -42,22 +38,33 @@ pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) { } fn new<'a>(args: &ArgMatches<'a>, conn: &Connection) { - let domain = args.value_of("domain").map(String::from) - .unwrap_or_else(|| env::var("BASE_URL") - .unwrap_or_else(|_| super::ask_for("Domain name"))); - let name = args.value_of("name").map(String::from).unwrap_or_else(|| super::ask_for("Instance name")); - let license = args.value_of("default-license").map(String::from).unwrap_or_else(|| String::from("CC-BY-SA")); + let domain = args + .value_of("domain") + .map(String::from) + .unwrap_or_else(|| env::var("BASE_URL").unwrap_or_else(|_| super::ask_for("Domain name"))); + let name = args + .value_of("name") + .map(String::from) + .unwrap_or_else(|| super::ask_for("Instance name")); + let license = args + .value_of("default-license") + .map(String::from) + .unwrap_or_else(|| String::from("CC-BY-SA")); let open_reg = !args.is_present("private"); - Instance::insert(conn, NewInstance { - public_domain: domain, - name, - local: true, - long_description: SafeString::new(""), - short_description: SafeString::new(""), - default_license: license, - open_registrations: open_reg, - short_description_html: String::new(), - long_description_html: String::new() - }).expect("Couldn't save instance"); + Instance::insert( + conn, + NewInstance { + public_domain: domain, + name, + local: true, + long_description: SafeString::new(""), + short_description: SafeString::new(""), + default_license: license, + open_registrations: open_reg, + short_description_html: String::new(), + long_description_html: String::new(), + }, + ) + .expect("Couldn't save instance"); } diff --git a/plume-cli/src/main.rs b/plume-cli/src/main.rs index ea9c5ccf..28ca7b72 100644 --- a/plume-cli/src/main.rs +++ b/plume-cli/src/main.rs @@ -6,12 +6,12 @@ extern crate rpassword; use clap::App; use diesel::Connection; +use plume_models::{Connection as Conn, DATABASE_URL}; use std::io::{self, prelude::*}; -use plume_models::{DATABASE_URL, Connection as Conn}; mod instance; -mod users; mod search; +mod users; fn main() { let mut app = App::new("Plume CLI") @@ -27,10 +27,16 @@ fn main() { let conn = Conn::establish(DATABASE_URL.as_str()); match matches.subcommand() { - ("instance", Some(args)) => instance::run(args, &conn.expect("Couldn't connect to the database.")), - ("users", Some(args)) => users::run(args, &conn.expect("Couldn't connect to the database.")), - ("search", Some(args)) => search::run(args, &conn.expect("Couldn't connect to the database.")), - _ => app.print_help().expect("Couldn't print help") + ("instance", Some(args)) => { + instance::run(args, &conn.expect("Couldn't connect to the database.")) + } + ("users", Some(args)) => { + users::run(args, &conn.expect("Couldn't connect to the database.")) + } + ("search", Some(args)) => { + search::run(args, &conn.expect("Couldn't connect to the database.")) + } + _ => app.print_help().expect("Couldn't print help"), }; } @@ -38,7 +44,9 @@ pub fn ask_for(something: &str) -> String { print!("{}: ", something); io::stdout().flush().expect("Couldn't flush STDOUT"); let mut input = String::new(); - io::stdin().read_line(&mut input).expect("Unable to read line"); + io::stdin() + .read_line(&mut input) + .expect("Unable to read line"); input.retain(|c| c != '\n'); input } diff --git a/plume-cli/src/search.rs b/plume-cli/src/search.rs index f078fc99..b23692de 100644 --- a/plume-cli/src/search.rs +++ b/plume-cli/src/search.rs @@ -1,47 +1,56 @@ -use clap::{Arg, ArgMatches, App, SubCommand}; +use clap::{App, Arg, ArgMatches, SubCommand}; use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; +use plume_models::{posts::Post, schema::posts, search::Searcher, Connection}; use std::fs::{read_dir, remove_file}; use std::io::ErrorKind; use std::path::Path; -use plume_models::{ - Connection, - posts::Post, - schema::posts, - search::Searcher, -}; pub fn command<'a, 'b>() -> App<'a, 'b> { SubCommand::with_name("search") .about("Manage search index") - .subcommand(SubCommand::with_name("init") - .arg(Arg::with_name("path") - .short("p") - .long("path") - .takes_value(true) - .required(false) - .help("Path to Plume's working directory")) - .arg(Arg::with_name("force") - .short("f") - .long("force") - .help("Ignore already using directory") - ).about("Initialize Plume's internal search engine")) - .subcommand(SubCommand::with_name("refill") - .arg(Arg::with_name("path") - .short("p") - .long("path") - .takes_value(true) - .required(false) - .help("Path to Plume's working directory") - ).about("Regenerate Plume's search index")) - .subcommand(SubCommand::with_name("unlock") - .arg(Arg::with_name("path") - .short("p") - .long("path") - .takes_value(true) - .required(false) - .help("Path to Plume's working directory") - ).about("Release lock on search directory")) + .subcommand( + SubCommand::with_name("init") + .arg( + Arg::with_name("path") + .short("p") + .long("path") + .takes_value(true) + .required(false) + .help("Path to Plume's working directory"), + ) + .arg( + Arg::with_name("force") + .short("f") + .long("force") + .help("Ignore already using directory"), + ) + .about("Initialize Plume's internal search engine"), + ) + .subcommand( + SubCommand::with_name("refill") + .arg( + Arg::with_name("path") + .short("p") + .long("path") + .takes_value(true) + .required(false) + .help("Path to Plume's working directory"), + ) + .about("Regenerate Plume's search index"), + ) + .subcommand( + SubCommand::with_name("unlock") + .arg( + Arg::with_name("path") + .short("p") + .long("path") + .takes_value(true) + .required(false) + .help("Path to Plume's working directory"), + ) + .about("Release lock on search directory"), + ) } pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) { @@ -59,19 +68,25 @@ fn init<'a>(args: &ArgMatches<'a>, conn: &Connection) { let force = args.is_present("force"); let path = Path::new(path).join("search_index"); - let can_do = match read_dir(path.clone()) { // try to read the directory specified - Ok(mut contents) => contents.next().is_none(), - Err(e) => if e.kind() == ErrorKind::NotFound { - true - } else { - panic!("Error while initialising search index : {}", e); + let can_do = match read_dir(path.clone()) { + // try to read the directory specified + Ok(mut contents) => contents.next().is_none(), + Err(e) => { + if e.kind() == ErrorKind::NotFound { + true + } else { + panic!("Error while initialising search index : {}", e); + } } }; if can_do || force { let searcher = Searcher::create(&path).unwrap(); refill(args, conn, Some(searcher)); } else { - eprintln!("Can't create new index, {} exist and is not empty", path.to_str().unwrap()); + eprintln!( + "Can't create new index, {} exist and is not empty", + path.to_str().unwrap() + ); } } @@ -86,15 +101,16 @@ fn refill<'a>(args: &ArgMatches<'a>, conn: &Connection, searcher: Option(args: &ArgMatches<'a>) { let path = args.value_of("path").unwrap_or("."); let path = Path::new(path).join("search_index/.tantivy-indexer.lock"); diff --git a/plume-cli/src/users.rs b/plume-cli/src/users.rs index 2cbc66af..1815dd5b 100644 --- a/plume-cli/src/users.rs +++ b/plume-cli/src/users.rs @@ -1,62 +1,78 @@ -use clap::{Arg, ArgMatches, App, SubCommand}; +use clap::{App, Arg, ArgMatches, SubCommand}; +use plume_models::{instance::Instance, users::*, Connection}; use rpassword; use std::io::{self, Write}; -use plume_models::{ - Connection, - instance::Instance, - users::*, -}; pub fn command<'a, 'b>() -> App<'a, 'b> { SubCommand::with_name("users") .about("Manage users") - .subcommand(SubCommand::with_name("new") - .arg(Arg::with_name("name") - .short("n") - .long("name") - .alias("username") - .takes_value(true) - .help("The username of the new user") - ).arg(Arg::with_name("display-name") - .short("N") - .long("display-name") - .takes_value(true) - .help("The display name of the new user") - ).arg(Arg::with_name("biography") - .short("b") - .long("bio") - .alias("biography") - .takes_value(true) - .help("The biography of the new user") - ).arg(Arg::with_name("email") - .short("e") - .long("email") - .takes_value(true) - .help("Email address of the new user") - ).arg(Arg::with_name("password") - .short("p") - .long("password") - .takes_value(true) - .help("The password of the new user") - ).arg(Arg::with_name("admin") - .short("a") - .long("admin") - .help("Makes the user an administrator of the instance") - ).about("Create a new user on this instance")) - .subcommand(SubCommand::with_name("reset-password") - .arg(Arg::with_name("name") - .short("u") - .long("user") - .alias("username") - .takes_value(true) - .help("The username of the user to reset password to") - ).arg(Arg::with_name("password") - .short("p") - .long("password") - .takes_value(true) - .help("The password new for the user") - ).about("Reset user password")) + .subcommand( + SubCommand::with_name("new") + .arg( + Arg::with_name("name") + .short("n") + .long("name") + .alias("username") + .takes_value(true) + .help("The username of the new user"), + ) + .arg( + Arg::with_name("display-name") + .short("N") + .long("display-name") + .takes_value(true) + .help("The display name of the new user"), + ) + .arg( + Arg::with_name("biography") + .short("b") + .long("bio") + .alias("biography") + .takes_value(true) + .help("The biography of the new user"), + ) + .arg( + Arg::with_name("email") + .short("e") + .long("email") + .takes_value(true) + .help("Email address of the new user"), + ) + .arg( + Arg::with_name("password") + .short("p") + .long("password") + .takes_value(true) + .help("The password of the new user"), + ) + .arg( + Arg::with_name("admin") + .short("a") + .long("admin") + .help("Makes the user an administrator of the instance"), + ) + .about("Create a new user on this instance"), + ) + .subcommand( + SubCommand::with_name("reset-password") + .arg( + Arg::with_name("name") + .short("u") + .long("user") + .alias("username") + .takes_value(true) + .help("The username of the user to reset password to"), + ) + .arg( + Arg::with_name("password") + .short("p") + .long("password") + .takes_value(true) + .help("The password new for the user"), + ) + .about("Reset user password"), + ) } pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) { @@ -69,16 +85,28 @@ pub fn run<'a>(args: &ArgMatches<'a>, conn: &Connection) { } fn new<'a>(args: &ArgMatches<'a>, conn: &Connection) { - let username = args.value_of("name").map(String::from).unwrap_or_else(|| super::ask_for("Username")); - let display_name = args.value_of("display-name").map(String::from).unwrap_or_else(|| super::ask_for("Display name")); + let username = args + .value_of("name") + .map(String::from) + .unwrap_or_else(|| super::ask_for("Username")); + let display_name = args + .value_of("display-name") + .map(String::from) + .unwrap_or_else(|| super::ask_for("Display name")); let admin = args.is_present("admin"); let bio = args.value_of("biography").unwrap_or("").to_string(); - let email = args.value_of("email").map(String::from).unwrap_or_else(|| super::ask_for("Email address")); - let password = args.value_of("password").map(String::from).unwrap_or_else(|| { - print!("Password: "); - io::stdout().flush().expect("Couldn't flush STDOUT"); - rpassword::read_password().expect("Couldn't read your password.") - }); + let email = args + .value_of("email") + .map(String::from) + .unwrap_or_else(|| super::ask_for("Email address")); + let password = args + .value_of("password") + .map(String::from) + .unwrap_or_else(|| { + print!("Password: "); + io::stdout().flush().expect("Couldn't flush STDOUT"); + rpassword::read_password().expect("Couldn't read your password.") + }); NewUser::new_local( conn, @@ -88,17 +116,31 @@ fn new<'a>(args: &ArgMatches<'a>, conn: &Connection) { &bio, email, User::hash_pass(&password).expect("Couldn't hash password"), - ).expect("Couldn't save new user"); + ) + .expect("Couldn't save new user"); } fn reset_password<'a>(args: &ArgMatches<'a>, conn: &Connection) { - let username = args.value_of("name").map(String::from).unwrap_or_else(|| super::ask_for("Username")); - let user = User::find_by_name(conn, &username, Instance::get_local(conn).expect("Failed to get local instance").id) - .expect("Failed to get user"); - let password = args.value_of("password").map(String::from).unwrap_or_else(|| { - print!("Password: "); - io::stdout().flush().expect("Couldn't flush STDOUT"); - rpassword::read_password().expect("Couldn't read your password.") - }); - user.reset_password(conn, &password).expect("Failed to reset password"); + let username = args + .value_of("name") + .map(String::from) + .unwrap_or_else(|| super::ask_for("Username")); + let user = User::find_by_name( + conn, + &username, + Instance::get_local(conn) + .expect("Failed to get local instance") + .id, + ) + .expect("Failed to get user"); + let password = args + .value_of("password") + .map(String::from) + .unwrap_or_else(|| { + print!("Password: "); + io::stdout().flush().expect("Couldn't flush STDOUT"); + rpassword::read_password().expect("Couldn't read your password.") + }); + user.reset_password(conn, &password) + .expect("Failed to reset password"); } diff --git a/plume-common/src/activity_pub/mod.rs b/plume-common/src/activity_pub/mod.rs index c15a14bd..32c30f98 100644 --- a/plume-common/src/activity_pub/mod.rs +++ b/plume-common/src/activity_pub/mod.rs @@ -18,7 +18,8 @@ pub mod sign; pub const CONTEXT_URL: &str = "https://www.w3.org/ns/activitystreams"; pub const PUBLIC_VISIBILTY: &str = "https://www.w3.org/ns/activitystreams#Public"; -pub const AP_CONTENT_TYPE: &str = r#"application/ld+json; profile="https://www.w3.org/ns/activitystreams""#; +pub const AP_CONTENT_TYPE: &str = + r#"application/ld+json; profile="https://www.w3.org/ns/activitystreams""#; pub fn ap_accept_header() -> Vec<&'static str> { vec![ @@ -114,13 +115,18 @@ pub fn broadcast( let boxes = to .into_iter() .filter(|u| !u.is_local()) - .map(|u| u.get_shared_inbox_url().unwrap_or_else(|| u.get_inbox_url())) + .map(|u| { + u.get_shared_inbox_url() + .unwrap_or_else(|| u.get_inbox_url()) + }) .collect::>() .unique(); let mut act = serde_json::to_value(act).expect("activity_pub::broadcast: serialization error"); act["@context"] = context(); - let signed = act.sign(sender).expect("activity_pub::broadcast: signature error"); + let signed = act + .sign(sender) + .expect("activity_pub::broadcast: signature error"); for inbox in boxes { // TODO: run it in Sidekiq or something like that @@ -130,7 +136,11 @@ pub fn broadcast( let res = Client::new() .post(&inbox) .headers(headers.clone()) - .header("Signature", request::signature(sender, &headers).expect("activity_pub::broadcast: request signature error")) + .header( + "Signature", + request::signature(sender, &headers) + .expect("activity_pub::broadcast: request signature error"), + ) .body(body) .send(); match res { diff --git a/plume-common/src/activity_pub/request.rs b/plume-common/src/activity_pub/request.rs index 78059dd3..60e36b8d 100644 --- a/plume-common/src/activity_pub/request.rs +++ b/plume-common/src/activity_pub/request.rs @@ -1,12 +1,12 @@ use base64; use chrono::{offset::Utc, DateTime}; use openssl::hash::{Hasher, MessageDigest}; -use reqwest::header::{ACCEPT, CONTENT_TYPE, DATE, HeaderMap, HeaderValue, USER_AGENT}; +use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, CONTENT_TYPE, DATE, USER_AGENT}; use std::ops::Deref; use std::time::SystemTime; -use activity_pub::{AP_CONTENT_TYPE, ap_accept_header}; use activity_pub::sign::Signer; +use activity_pub::{ap_accept_header, AP_CONTENT_TYPE}; const PLUME_USER_AGENT: &str = concat!("Plume/", env!("CARGO_PKG_VERSION")); @@ -42,7 +42,7 @@ impl Digest { } pub fn verify_header(&self, other: &Digest) -> bool { - self.value()==other.value() + self.value() == other.value() } pub fn algorithm(&self) -> &str { @@ -57,7 +57,8 @@ impl Digest { let pos = self .0 .find('=') - .expect("Digest::value: invalid header error") + 1; + .expect("Digest::value: invalid header error") + + 1; base64::decode(&self.0[pos..]).expect("Digest::value: invalid encoding error") } @@ -75,8 +76,11 @@ impl Digest { } pub fn from_body(body: &str) -> Self { - let mut hasher = Hasher::new(MessageDigest::sha256()).expect("Digest::digest: initialization error"); - hasher.update(body.as_bytes()).expect("Digest::digest: content insertion error"); + let mut hasher = + Hasher::new(MessageDigest::sha256()).expect("Digest::digest: initialization error"); + hasher + .update(body.as_bytes()) + .expect("Digest::digest: content insertion error"); let res = base64::encode(&hasher.finish().expect("Digest::digest: finalizing error")); Digest(format!("SHA-256={}", res)) } @@ -99,7 +103,8 @@ pub fn headers() -> HeaderMap { .into_iter() .collect::>() .join(", "), - ).expect("request::headers: accept error"), + ) + .expect("request::headers: accept error"), ); headers.insert(CONTENT_TYPE, HeaderValue::from_static(AP_CONTENT_TYPE)); headers diff --git a/plume-common/src/activity_pub/sign.rs b/plume-common/src/activity_pub/sign.rs index 9d00a2e1..83834b9a 100644 --- a/plume-common/src/activity_pub/sign.rs +++ b/plume-common/src/activity_pub/sign.rs @@ -1,7 +1,6 @@ use super::request; use base64; -use chrono::{DateTime, Duration, - naive::NaiveDateTime, Utc}; +use chrono::{naive::NaiveDateTime, DateTime, Duration, Utc}; use hex; use openssl::{pkey::PKey, rsa::Rsa, sha::sha256}; use rocket::http::HeaderMap; @@ -57,9 +56,10 @@ impl Signable for serde_json::Value { let options_hash = Self::hash( &json!({ - "@context": "https://w3id.org/identity/v1", - "created": creation_date - }).to_string(), + "@context": "https://w3id.org/identity/v1", + "created": creation_date + }) + .to_string(), ); let document_hash = Self::hash(&self.to_string()); let to_be_signed = options_hash + &document_hash; @@ -91,7 +91,8 @@ impl Signable for serde_json::Value { &json!({ "@context": "https://w3id.org/identity/v1", "created": creation_date - }).to_string(), + }) + .to_string(), ); let creation_date = creation_date.as_str(); if creation_date.is_none() { @@ -169,7 +170,10 @@ pub fn verify_http_headers( .collect::>() .join("\n"); - if !sender.verify(&h, &base64::decode(signature).unwrap_or_default()).unwrap_or(false) { + if !sender + .verify(&h, &base64::decode(signature).unwrap_or_default()) + .unwrap_or(false) + { return SignatureValidity::Invalid; } if !headers.contains(&"digest") { diff --git a/plume-common/src/lib.rs b/plume-common/src/lib.rs index a3c267fb..30c5a356 100644 --- a/plume-common/src/lib.rs +++ b/plume-common/src/lib.rs @@ -10,8 +10,8 @@ extern crate chrono; extern crate failure; #[macro_use] extern crate failure_derive; -extern crate hex; extern crate heck; +extern crate hex; extern crate openssl; extern crate pulldown_cmark; extern crate reqwest; diff --git a/plume-common/src/utils.rs b/plume-common/src/utils.rs index 409b9d07..23a2c3c0 100644 --- a/plume-common/src/utils.rs +++ b/plume-common/src/utils.rs @@ -1,18 +1,20 @@ use heck::CamelCase; use openssl::rand::rand_bytes; -use pulldown_cmark::{Event, Parser, Options, Tag, html}; +use pulldown_cmark::{html, Event, Options, Parser, Tag}; use rocket::{ http::uri::Uri, - response::{Redirect, Flash} + response::{Flash, Redirect}, }; use std::borrow::Cow; use std::collections::HashSet; /// Generates an hexadecimal representation of 32 bytes of random data pub fn random_hex() -> String { - let mut bytes = [0; 32]; + let mut bytes = [0; 32]; rand_bytes(&mut bytes).expect("Error while generating client id"); - bytes.iter().fold(String::new(), |res, byte| format!("{}{:x}", res, byte)) + bytes + .iter() + .fold(String::new(), |res, byte| format!("{}{:x}", res, byte)) } /// Remove non alphanumeric characters and CamelCase a string @@ -29,7 +31,11 @@ pub fn make_actor_id(name: &str) -> String { * Note that the message should be translated before passed to this function. */ pub fn requires_login>>(message: &str, url: T) -> Flash { - Flash::new(Redirect::to(format!("/login?m={}", Uri::percent_encode(message))), "callback", url.into().to_string()) + Flash::new( + Redirect::to(format!("/login?m={}", Uri::percent_encode(message))), + "callback", + url.into().to_string(), + ) } #[derive(Debug)] @@ -45,117 +51,161 @@ pub fn md_to_html(md: &str, base_url: &str) -> (String, HashSet, HashSet let parser = Parser::new_ext(md, Options::all()); let (parser, mentions, hashtags): (Vec, Vec, Vec) = parser - .scan(None, |state: &mut Option, evt|{ - let (s, res) = match evt { - Event::Text(txt) => match state.take() { - Some(mut prev_txt) => { - prev_txt.push_str(&txt); - (Some(prev_txt), vec![]) - }, - None => { - (Some(txt.into_owned()), vec![]) - } - }, - e => match state.take() { - Some(prev) => (None, vec![Event::Text(Cow::Owned(prev)), e]), - None => (None, vec![e]), - } - }; - *state = s; - Some(res) - }) - .flat_map(|v| v.into_iter()) - .map(|evt| match evt { - Event::Text(txt) => { - let (evts, _, _, _, new_mentions, new_hashtags) = txt.chars().fold((vec![], State::Ready, String::new(), 0, vec![], vec![]), |(mut events, state, mut text_acc, n, mut mentions, mut hashtags), c| { - match state { - State::Mention => { - let char_matches = c.is_alphanumeric() || "@.-_".contains(c); - if char_matches && (n < (txt.chars().count() - 1)) { - text_acc.push(c); - (events, State::Mention, text_acc, n + 1, mentions, hashtags) - } else { - if char_matches { - text_acc.push(c) - } - let mention = text_acc; - let short_mention = mention.splitn(1, '@').nth(0).unwrap_or(""); - let link = Tag::Link(format!("//{}/@/{}/", base_url, &mention).into(), short_mention.to_owned().into()); - - mentions.push(mention.clone()); - events.push(Event::Start(link.clone())); - events.push(Event::Text(format!("@{}", &short_mention).into())); - events.push(Event::End(link)); - - (events, State::Ready, c.to_string(), n + 1, mentions, hashtags) - } + .scan(None, |state: &mut Option, evt| { + let (s, res) = match evt { + Event::Text(txt) => match state.take() { + Some(mut prev_txt) => { + prev_txt.push_str(&txt); + (Some(prev_txt), vec![]) } - State::Hashtag => { - let char_matches = c.is_alphanumeric() || "-_".contains(c); - if char_matches && (n < (txt.chars().count() -1)) { - text_acc.push(c); - (events, State::Hashtag, text_acc, n+1, mentions, hashtags) - } else { - if char_matches { + None => (Some(txt.into_owned()), vec![]), + }, + e => match state.take() { + Some(prev) => (None, vec![Event::Text(Cow::Owned(prev)), e]), + None => (None, vec![e]), + }, + }; + *state = s; + Some(res) + }) + .flat_map(|v| v.into_iter()) + .map(|evt| match evt { + Event::Text(txt) => { + let (evts, _, _, _, new_mentions, new_hashtags) = txt.chars().fold( + (vec![], State::Ready, String::new(), 0, vec![], vec![]), + |(mut events, state, mut text_acc, n, mut mentions, mut hashtags), c| { + match state { + State::Mention => { + let char_matches = c.is_alphanumeric() || "@.-_".contains(c); + if char_matches && (n < (txt.chars().count() - 1)) { + text_acc.push(c); + (events, State::Mention, text_acc, n + 1, mentions, hashtags) + } else { + if char_matches { + text_acc.push(c) + } + let mention = text_acc; + let short_mention = mention.splitn(1, '@').nth(0).unwrap_or(""); + let link = Tag::Link( + format!("//{}/@/{}/", base_url, &mention).into(), + short_mention.to_owned().into(), + ); + + mentions.push(mention.clone()); + events.push(Event::Start(link.clone())); + events.push(Event::Text(format!("@{}", &short_mention).into())); + events.push(Event::End(link)); + + ( + events, + State::Ready, + c.to_string(), + n + 1, + mentions, + hashtags, + ) + } + } + State::Hashtag => { + let char_matches = c.is_alphanumeric() || "-_".contains(c); + if char_matches && (n < (txt.chars().count() - 1)) { + text_acc.push(c); + (events, State::Hashtag, text_acc, n + 1, mentions, hashtags) + } else { + if char_matches { + text_acc.push(c); + } + let hashtag = text_acc; + let link = Tag::Link( + format!("//{}/tag/{}", base_url, &hashtag.to_camel_case()) + .into(), + hashtag.to_owned().into(), + ); + + hashtags.push(hashtag.clone()); + events.push(Event::Start(link.clone())); + events.push(Event::Text(format!("#{}", &hashtag).into())); + events.push(Event::End(link)); + + ( + events, + State::Ready, + c.to_string(), + n + 1, + mentions, + hashtags, + ) + } + } + State::Ready => { + if c == '@' { + events.push(Event::Text(text_acc.into())); + ( + events, + State::Mention, + String::new(), + n + 1, + mentions, + hashtags, + ) + } else if c == '#' { + events.push(Event::Text(text_acc.into())); + ( + events, + State::Hashtag, + String::new(), + n + 1, + mentions, + hashtags, + ) + } else if c.is_alphanumeric() { + text_acc.push(c); + if n >= (txt.chars().count() - 1) { + // Add the text after at the end, even if it is not followed by a mention. + events.push(Event::Text(text_acc.clone().into())) + } + (events, State::Word, text_acc, n + 1, mentions, hashtags) + } else { + text_acc.push(c); + if n >= (txt.chars().count() - 1) { + // Add the text after at the end, even if it is not followed by a mention. + events.push(Event::Text(text_acc.clone().into())) + } + (events, State::Ready, text_acc, n + 1, mentions, hashtags) + } + } + State::Word => { text_acc.push(c); + if c.is_alphanumeric() { + if n >= (txt.chars().count() - 1) { + // Add the text after at the end, even if it is not followed by a mention. + events.push(Event::Text(text_acc.clone().into())) + } + (events, State::Word, text_acc, n + 1, mentions, hashtags) + } else { + if n >= (txt.chars().count() - 1) { + // Add the text after at the end, even if it is not followed by a mention. + events.push(Event::Text(text_acc.clone().into())) + } + (events, State::Ready, text_acc, n + 1, mentions, hashtags) + } } - let hashtag = text_acc; - let link = Tag::Link(format!("//{}/tag/{}", base_url, &hashtag.to_camel_case()).into(), hashtag.to_owned().into()); - - hashtags.push(hashtag.clone()); - events.push(Event::Start(link.clone())); - events.push(Event::Text(format!("#{}", &hashtag).into())); - events.push(Event::End(link)); - - (events, State::Ready, c.to_string(), n + 1, mentions, hashtags) } - } - State::Ready => { - if c == '@' { - events.push(Event::Text(text_acc.into())); - (events, State::Mention, String::new(), n + 1, mentions, hashtags) - } else if c == '#' { - events.push(Event::Text(text_acc.into())); - (events, State::Hashtag, String::new(), n + 1, mentions, hashtags) - } else if c.is_alphanumeric() { - text_acc.push(c); - if n >= (txt.chars().count() - 1) { // Add the text after at the end, even if it is not followed by a mention. - events.push(Event::Text(text_acc.clone().into())) - } - (events, State::Word, text_acc, n + 1, mentions, hashtags) - } else { - text_acc.push(c); - if n >= (txt.chars().count() - 1) { // Add the text after at the end, even if it is not followed by a mention. - events.push(Event::Text(text_acc.clone().into())) - } - (events, State::Ready, text_acc, n + 1, mentions, hashtags) - } - } - State::Word => { - text_acc.push(c); - if c.is_alphanumeric() { - if n >= (txt.chars().count() - 1) { // Add the text after at the end, even if it is not followed by a mention. - events.push(Event::Text(text_acc.clone().into())) - } - (events, State::Word, text_acc, n + 1, mentions, hashtags) - } else { - if n >= (txt.chars().count() - 1) { // Add the text after at the end, even if it is not followed by a mention. - events.push(Event::Text(text_acc.clone().into())) - } - (events, State::Ready, text_acc, n + 1, mentions, hashtags) - } - } - } - }); - (evts, new_mentions, new_hashtags) - }, - _ => (vec![evt], vec![], vec![]) - }).fold((vec![],vec![],vec![]), |(mut parser, mut mention, mut hashtag), (mut p, mut m, mut h)| { - parser.append(&mut p); - mention.append(&mut m); - hashtag.append(&mut h); - (parser, mention, hashtag) - }); + }, + ); + (evts, new_mentions, new_hashtags) + } + _ => (vec![evt], vec![], vec![]), + }) + .fold( + (vec![], vec![], vec![]), + |(mut parser, mut mention, mut hashtag), (mut p, mut m, mut h)| { + parser.append(&mut p); + mention.append(&mut m); + hashtag.append(&mut h); + (parser, mention, hashtag) + }, + ); let parser = parser.into_iter(); let mentions = mentions.into_iter().map(|m| String::from(m.trim())); let hashtags = hashtags.into_iter().map(|h| String::from(h.trim())); @@ -188,7 +238,13 @@ mod tests { ]; for (md, mentions) in tests { - assert_eq!(md_to_html(md, "").1, mentions.into_iter().map(|s| s.to_string()).collect::>()); + assert_eq!( + md_to_html(md, "").1, + mentions + .into_iter() + .map(|s| s.to_string()) + .collect::>() + ); } } @@ -207,7 +263,13 @@ mod tests { ]; for (md, mentions) in tests { - assert_eq!(md_to_html(md, "").2, mentions.into_iter().map(|s| s.to_string()).collect::>()); + assert_eq!( + md_to_html(md, "").2, + mentions + .into_iter() + .map(|s| s.to_string()) + .collect::>() + ); } } } diff --git a/plume-front/src/editor.rs b/plume-front/src/editor.rs index 8abfab36..eef77683 100644 --- a/plume-front/src/editor.rs +++ b/plume-front/src/editor.rs @@ -1,4 +1,7 @@ -use stdweb::{unstable::{TryInto, TryFrom}, web::{*, html_element::*, event::*}}; +use stdweb::{ + unstable::{TryFrom, TryInto}, + web::{event::*, html_element::*, *}, +}; use CATALOG; macro_rules! mv { @@ -14,7 +17,8 @@ fn get_elt_value(id: &'static str) -> String { let elt = document().get_element_by_id(id).unwrap(); let inp: Result = elt.clone().try_into(); let textarea: Result = elt.try_into(); - inp.map(|i| i.raw_value()).unwrap_or_else(|_| textarea.unwrap().value()) + inp.map(|i| i.raw_value()) + .unwrap_or_else(|_| textarea.unwrap().value()) } fn set_value>(id: &'static str, val: S) { @@ -64,7 +68,7 @@ fn init_widget( tag: &'static str, placeholder_text: String, content: String, - disable_return: bool + disable_return: bool, ) -> Result { let widget = placeholder(make_editable(tag).try_into()?, &placeholder_text); if !content.is_empty() { @@ -86,7 +90,7 @@ fn init_widget( pub fn init() -> Result<(), EditorError> { if let Some(ed) = document().get_element_by_id("plume-editor") { // Show the editor - js!{ @{&ed}.style.display = "block"; }; + js! { @{&ed}.style.display = "block"; }; // And hide the HTML-only fallback let old_ed = document().get_element_by_id("plume-fallback-editor")?; let old_title = document().get_element_by_id("plume-editor-title")?; @@ -101,8 +105,20 @@ pub fn init() -> Result<(), EditorError> { let content_val = get_elt_value("editor-content"); // And pre-fill the new editor with this values let title = init_widget(&ed, "h1", i18n!(CATALOG, "Title"), title_val, true)?; - let subtitle = init_widget(&ed, "h2", i18n!(CATALOG, "Subtitle or summary"), subtitle_val, true)?; - let content = init_widget(&ed, "article", i18n!(CATALOG, "Write your article here. Markdown is supported."), content_val.clone(), true)?; + let subtitle = init_widget( + &ed, + "h2", + i18n!(CATALOG, "Subtitle or summary"), + subtitle_val, + true, + )?; + let content = init_widget( + &ed, + "article", + i18n!(CATALOG, "Write your article here. Markdown is supported."), + content_val.clone(), + true, + )?; js! { @{&content}.innerHTML = @{content_val}; }; // character counter @@ -118,27 +134,38 @@ pub fn init() -> Result<(), EditorError> { }), 0); })); - document().get_element_by_id("publish")?.add_event_listener(mv!(title, subtitle, content, old_ed => move |_: ClickEvent| { - let popup = document().get_element_by_id("publish-popup").or_else(|| - init_popup(&title, &subtitle, &content, &old_ed).ok() - ).unwrap(); - let bg = document().get_element_by_id("popup-bg").or_else(|| - init_popup_bg().ok() - ).unwrap(); + document().get_element_by_id("publish")?.add_event_listener( + mv!(title, subtitle, content, old_ed => move |_: ClickEvent| { + let popup = document().get_element_by_id("publish-popup").or_else(|| + init_popup(&title, &subtitle, &content, &old_ed).ok() + ).unwrap(); + let bg = document().get_element_by_id("popup-bg").or_else(|| + init_popup_bg().ok() + ).unwrap(); - popup.class_list().add("show").unwrap(); - bg.class_list().add("show").unwrap(); - })); + popup.class_list().add("show").unwrap(); + bg.class_list().add("show").unwrap(); + }), + ); } Ok(()) } -fn init_popup(title: &HtmlElement, subtitle: &HtmlElement, content: &HtmlElement, old_ed: &Element) -> Result { +fn init_popup( + title: &HtmlElement, + subtitle: &HtmlElement, + content: &HtmlElement, + old_ed: &Element, +) -> Result { let popup = document().create_element("div")?; popup.class_list().add("popup")?; popup.set_attribute("id", "publish-popup")?; - let tags = get_elt_value("tags").split(',').map(str::trim).map(str::to_string).collect::>(); + let tags = get_elt_value("tags") + .split(',') + .map(str::trim) + .map(str::to_string) + .collect::>(); let license = get_elt_value("license"); make_input(&i18n!(CATALOG, "Tags"), "popup-tags", &popup).set_raw_value(&tags.join(", ")); make_input(&i18n!(CATALOG, "License"), "popup-license", &popup).set_raw_value(&license); @@ -152,7 +179,7 @@ fn init_popup(title: &HtmlElement, subtitle: &HtmlElement, content: &HtmlElement popup.append_child(&cover); let button = document().create_element("input")?; - js!{ + js! { @{&button}.type = "submit"; @{&button}.value = @{i18n!(CATALOG, "Publish")}; }; @@ -189,7 +216,10 @@ fn init_popup_bg() -> Result { fn chars_left(selector: &str, content: &HtmlElement) -> Option { match document().query_selector(selector) { Ok(Some(form)) => HtmlElement::try_from(form).ok().and_then(|form| { - if let Some(len) = form.get_attribute("content-size").and_then(|s| s.parse::().ok()) { + if let Some(len) = form + .get_attribute("content-size") + .and_then(|s| s.parse::().ok()) + { (js! { let x = encodeURIComponent(@{content}.innerHTML) .replace(/%20/g, "+") @@ -198,7 +228,10 @@ fn chars_left(selector: &str, content: &HtmlElement) -> Option { .length + 2; console.log(x); return x; - }).try_into().map(|c: i32| len - c).ok() + }) + .try_into() + .map(|c: i32| len - c) + .ok() } else { None } @@ -218,7 +251,11 @@ fn make_input(label_text: &str, name: &'static str, form: &Element) -> InputElem label.append_child(&document().create_text_node(label_text)); label.set_attribute("for", name).unwrap(); - let inp: InputElement = document().create_element("input").unwrap().try_into().unwrap(); + let inp: InputElement = document() + .create_element("input") + .unwrap() + .try_into() + .unwrap(); inp.set_attribute("name", name).unwrap(); inp.set_attribute("id", name).unwrap(); @@ -228,8 +265,11 @@ fn make_input(label_text: &str, name: &'static str, form: &Element) -> InputElem } fn make_editable(tag: &'static str) -> Element { - let elt = document().create_element(tag).expect("Couldn't create editable element"); - elt.set_attribute("contenteditable", "true").expect("Couldn't make element editable"); + let elt = document() + .create_element(tag) + .expect("Couldn't create editable element"); + elt.set_attribute("contenteditable", "true") + .expect("Couldn't make element editable"); elt } diff --git a/plume-front/src/main.rs b/plume-front/src/main.rs index 96f1d5be..c6be18aa 100644 --- a/plume-front/src/main.rs +++ b/plume-front/src/main.rs @@ -1,4 +1,4 @@ -#![recursion_limit="128"] +#![recursion_limit = "128"] #![feature(decl_macro, proc_macro_hygiene, try_trait)] extern crate gettext; @@ -9,7 +9,7 @@ extern crate lazy_static; #[macro_use] extern crate stdweb; -use stdweb::{web::{*, event::*}}; +use stdweb::web::{event::*, *}; init_i18n!("plume-front", en, fr); @@ -20,9 +20,14 @@ compile_i18n!(); lazy_static! { static ref CATALOG: gettext::Catalog = { let catalogs = include_i18n!(); - let lang = js!{ return navigator.language }.into_string().unwrap(); + let lang = js! { return navigator.language }.into_string().unwrap(); let lang = lang.splitn(2, '-').next().unwrap_or("en"); - catalogs.iter().find(|(l, _)| l == &lang).unwrap_or(&catalogs[0]).clone().1 + catalogs + .iter() + .find(|(l, _)| l == &lang) + .unwrap_or(&catalogs[0]) + .clone() + .1 }; } @@ -30,7 +35,8 @@ fn main() { menu(); search(); editor::init() - .map_err(|e| console!(error, format!("Editor error: {:?}", e))).ok(); + .map_err(|e| console!(error, format!("Editor error: {:?}", e))) + .ok(); } /// Toggle menu on mobile device @@ -41,10 +47,14 @@ fn menu() { if let Some(button) = document().get_element_by_id("menu") { if let Some(menu) = document().get_element_by_id("content") { button.add_event_listener(|_: ClickEvent| { - document().get_element_by_id("menu").map(|menu| menu.class_list().add("show")); + document() + .get_element_by_id("menu") + .map(|menu| menu.class_list().add("show")); }); menu.add_event_listener(|_: ClickEvent| { - document().get_element_by_id("menu").map(|menu| menu.class_list().remove("show")); + document() + .get_element_by_id("menu") + .map(|menu| menu.class_list().remove("show")); }); } } @@ -54,18 +64,21 @@ fn menu() { fn search() { if let Some(form) = document().get_element_by_id("form") { form.add_event_listener(|_: SubmitEvent| { - document().query_selector_all("#form input").map(|inputs| { - for input in inputs { - js! { - if (@{&input}.name === "") { - @{&input}.name = @{&input}.id - } - if (@{&input}.name && !@{&input}.value) { - @{&input}.name = ""; + document() + .query_selector_all("#form input") + .map(|inputs| { + for input in inputs { + js! { + if (@{&input}.name === "") { + @{&input}.name = @{&input}.id + } + if (@{&input}.name && !@{&input}.value) { + @{&input}.name = ""; + } } } - } - }).ok(); + }) + .ok(); }); } } diff --git a/plume-models/src/api_tokens.rs b/plume-models/src/api_tokens.rs index 3ec9ed91..cc876fbc 100644 --- a/plume-models/src/api_tokens.rs +++ b/plume-models/src/api_tokens.rs @@ -89,13 +89,19 @@ impl<'a, 'r> FromRequest<'a, 'r> for ApiToken { } let mut parsed_header = headers[0].split(' '); - let auth_type = parsed_header.next() - .map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoType)), Outcome::Success)?; - let val = parsed_header.next() - .map_or_else(|| Outcome::Failure((Status::BadRequest, TokenError::NoValue)), Outcome::Success)?; + let auth_type = parsed_header.next().map_or_else( + || Outcome::Failure((Status::BadRequest, TokenError::NoType)), + Outcome::Success, + )?; + let val = parsed_header.next().map_or_else( + || Outcome::Failure((Status::BadRequest, TokenError::NoValue)), + Outcome::Success, + )?; if auth_type == "Bearer" { - let conn = request.guard::().map_failure(|_| (Status::InternalServerError, TokenError::DbError))?; + let conn = request + .guard::() + .map_failure(|_| (Status::InternalServerError, TokenError::DbError))?; if let Ok(token) = ApiToken::find_by_value(&*conn, val) { return Outcome::Success(token); } diff --git a/plume-models/src/apps.rs b/plume-models/src/apps.rs index aae3af96..f245c0a7 100644 --- a/plume-models/src/apps.rs +++ b/plume-models/src/apps.rs @@ -5,7 +5,7 @@ use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl}; use plume_api::apps::AppEndpoint; use plume_common::utils::random_hex; use schema::apps; -use {Connection, Error, Result, ApiResult}; +use {ApiResult, Connection, Error, Result}; #[derive(Clone, Queryable)] pub struct App { @@ -52,7 +52,8 @@ impl Provider for App { redirect_uri: data.redirect_uri, website: data.website, }, - ).map_err(|_| ApiError::NotFound("Couldn't register app".into()))?; + ) + .map_err(|_| ApiError::NotFound("Couldn't register app".into()))?; Ok(AppEndpoint { id: Some(app.id), diff --git a/plume-models/src/blogs.rs b/plume-models/src/blogs.rs index 2f61e628..ff49bd0c 100644 --- a/plume-models/src/blogs.rs +++ b/plume-models/src/blogs.rs @@ -26,7 +26,7 @@ use safe_string::SafeString; use schema::blogs; use search::Searcher; use users::User; -use {Connection, BASE_URL, USE_HTTPS, Error, Result}; +use {Connection, Error, Result, BASE_URL, USE_HTTPS}; pub type CustomGroup = CustomObject; @@ -66,27 +66,15 @@ impl Blog { insert!(blogs, NewBlog, |inserted, conn| { let instance = inserted.get_instance(conn)?; if inserted.outbox_url.is_empty() { - inserted.outbox_url = instance.compute_box( - BLOG_PREFIX, - &inserted.actor_id, - "outbox", - ); + inserted.outbox_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, "outbox"); } if inserted.inbox_url.is_empty() { - inserted.inbox_url = instance.compute_box( - BLOG_PREFIX, - &inserted.actor_id, - "inbox", - ); + inserted.inbox_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, "inbox"); } if inserted.ap_url.is_empty() { - inserted.ap_url = instance.compute_box( - BLOG_PREFIX, - &inserted.actor_id, - "", - ); + inserted.ap_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, ""); } if inserted.fqn.is_empty() { @@ -154,16 +142,12 @@ impl Blog { } fn fetch_from_webfinger(conn: &Connection, acct: &str) -> Result { - resolve(acct.to_owned(), *USE_HTTPS)?.links + resolve(acct.to_owned(), *USE_HTTPS)? + .links .into_iter() .find(|l| l.mime_type == Some(String::from("application/activity+json"))) .ok_or(Error::Webfinger) - .and_then(|l| { - Blog::fetch_from_url( - conn, - &l.href? - ) - }) + .and_then(|l| Blog::fetch_from_url(conn, &l.href?)) } fn fetch_from_url(conn: &Connection, url: &str) -> Result { @@ -181,20 +165,14 @@ impl Blog { .send()?; let text = &res.text()?; - let ap_sign: ApSignature = - serde_json::from_str(text)?; - let mut json: CustomGroup = - serde_json::from_str(text)?; + let ap_sign: ApSignature = serde_json::from_str(text)?; + let mut json: CustomGroup = serde_json::from_str(text)?; json.custom_props = ap_sign; // without this workaround, publicKey is not correctly deserialized - Blog::from_activity( - conn, - &json, - Url::parse(url)?.host_str()?, - ) + Blog::from_activity(conn, &json, Url::parse(url)?.host_str()?) } fn from_activity(conn: &Connection, acct: &CustomGroup, inst: &str) -> Result { - let instance = Instance::find_by_domain(conn, inst).or_else(|_| + let instance = Instance::find_by_domain(conn, inst).or_else(|_| { Instance::insert( conn, NewInstance { @@ -210,35 +188,17 @@ impl Blog { long_description_html: String::new(), }, ) - )?; + })?; Blog::insert( conn, NewBlog { - actor_id: acct - .object - .ap_actor_props - .preferred_username_string()?, - title: acct - .object - .object_props - .name_string()?, - outbox_url: acct - .object - .ap_actor_props - .outbox_string()?, - inbox_url: acct - .object - .ap_actor_props - .inbox_string()?, - summary: acct - .object - .object_props - .summary_string()?, + actor_id: acct.object.ap_actor_props.preferred_username_string()?, + title: acct.object.object_props.name_string()?, + outbox_url: acct.object.ap_actor_props.outbox_string()?, + inbox_url: acct.object.ap_actor_props.inbox_string()?, + summary: acct.object.object_props.summary_string()?, instance_id: instance.id, - ap_url: acct - .object - .object_props - .id_string()?, + ap_url: acct.object.object_props.id_string()?, public_key: acct .custom_props .public_key_publickey()? @@ -252,27 +212,20 @@ impl Blog { let mut blog = Group::default(); blog.ap_actor_props .set_preferred_username_string(self.actor_id.clone())?; - blog.object_props - .set_name_string(self.title.clone())?; + blog.object_props.set_name_string(self.title.clone())?; blog.ap_actor_props .set_outbox_string(self.outbox_url.clone())?; blog.ap_actor_props .set_inbox_string(self.inbox_url.clone())?; - blog.object_props - .set_summary_string(self.summary.clone())?; - blog.object_props - .set_id_string(self.ap_url.clone())?; + blog.object_props.set_summary_string(self.summary.clone())?; + blog.object_props.set_id_string(self.ap_url.clone())?; let mut public_key = PublicKey::default(); - public_key - .set_id_string(format!("{}#main-key", self.ap_url))?; - public_key - .set_owner_string(self.ap_url.clone())?; - public_key - .set_public_key_pem_string(self.public_key.clone())?; + public_key.set_id_string(format!("{}#main-key", self.ap_url))?; + public_key.set_owner_string(self.ap_url.clone())?; + public_key.set_public_key_pem_string(self.public_key.clone())?; let mut ap_signature = ApSignature::default(); - ap_signature - .set_public_key_publickey(public_key)?; + ap_signature.set_public_key_publickey(public_key)?; Ok(CustomGroup::new(blog, ap_signature)) } @@ -290,13 +243,10 @@ impl Blog { } pub fn get_keypair(&self) -> Result> { - PKey::from_rsa( - Rsa::private_key_from_pem( - self.private_key - .clone()? - .as_ref(), - )?, - ).map_err(Error::from) + PKey::from_rsa(Rsa::private_key_from_pem( + self.private_key.clone()?.as_ref(), + )?) + .map_err(Error::from) } pub fn webfinger(&self, conn: &Connection) -> Result { @@ -386,25 +336,16 @@ impl sign::Signer for Blog { fn sign(&self, to_sign: &str) -> Result> { let key = self.get_keypair()?; - let mut signer = - Signer::new(MessageDigest::sha256(), &key)?; - signer - .update(to_sign.as_bytes())?; - signer - .sign_to_vec() - .map_err(Error::from) + let mut signer = Signer::new(MessageDigest::sha256(), &key)?; + signer.update(to_sign.as_bytes())?; + signer.sign_to_vec().map_err(Error::from) } fn verify(&self, data: &str, signature: &[u8]) -> Result { - let key = PKey::from_rsa( - Rsa::public_key_from_pem(self.public_key.as_ref())? - )?; + let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?; let mut verifier = Verifier::new(MessageDigest::sha256(), &key)?; - verifier - .update(data.as_bytes())?; - verifier - .verify(&signature) - .map_err(Error::from) + verifier.update(data.as_bytes())?; + verifier.verify(&signature).map_err(Error::from) } } @@ -434,32 +375,47 @@ pub(crate) mod tests { use blog_authors::*; use diesel::Connection; use instance::tests as instance_tests; + use search::tests::get_searcher; use tests::db; use users::tests as usersTests; - use search::tests::get_searcher; use Connection as Conn; pub(crate) fn fill_database(conn: &Conn) -> (Vec, Vec) { instance_tests::fill_database(conn); let users = usersTests::fill_database(conn); - let blog1 = Blog::insert(conn, NewBlog::new_local( - "BlogName".to_owned(), - "Blog name".to_owned(), - "This is a small blog".to_owned(), - Instance::get_local(conn).unwrap().id - ).unwrap()).unwrap(); - let blog2 = Blog::insert(conn, NewBlog::new_local( + let blog1 = Blog::insert( + conn, + NewBlog::new_local( + "BlogName".to_owned(), + "Blog name".to_owned(), + "This is a small blog".to_owned(), + Instance::get_local(conn).unwrap().id, + ) + .unwrap(), + ) + .unwrap(); + let blog2 = Blog::insert( + conn, + NewBlog::new_local( "MyBlog".to_owned(), "My blog".to_owned(), "Welcome to my blog".to_owned(), - Instance::get_local(conn).unwrap().id - ).unwrap()).unwrap(); - let blog3 = Blog::insert(conn, NewBlog::new_local( + Instance::get_local(conn).unwrap().id, + ) + .unwrap(), + ) + .unwrap(); + let blog3 = Blog::insert( + conn, + NewBlog::new_local( "WhyILikePlume".to_owned(), "Why I like Plume".to_owned(), "In this blog I will explay you why I like Plume so much".to_owned(), - Instance::get_local(conn).unwrap().id - ).unwrap()).unwrap(); + Instance::get_local(conn).unwrap().id, + ) + .unwrap(), + ) + .unwrap(); BlogAuthor::insert( conn, @@ -468,7 +424,8 @@ pub(crate) mod tests { author_id: users[0].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -477,7 +434,8 @@ pub(crate) mod tests { author_id: users[1].id, is_owner: false, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -486,7 +444,8 @@ pub(crate) mod tests { author_id: users[1].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -495,8 +454,9 @@ pub(crate) mod tests { author_id: users[2].id, is_owner: true, }, - ).unwrap(); - (users, vec![ blog1, blog2, blog3 ]) + ) + .unwrap(); + (users, vec![blog1, blog2, blog3]) } #[test] @@ -511,11 +471,16 @@ pub(crate) mod tests { "SomeName".to_owned(), "Some name".to_owned(), "This is some blog".to_owned(), - Instance::get_local(conn).unwrap().id - ).unwrap(), - ).unwrap(); + Instance::get_local(conn).unwrap().id, + ) + .unwrap(), + ) + .unwrap(); - assert_eq!(blog.get_instance(conn).unwrap().id, Instance::get_local(conn).unwrap().id); + assert_eq!( + blog.get_instance(conn).unwrap().id, + Instance::get_local(conn).unwrap().id + ); // TODO add tests for remote instance Ok(()) @@ -535,18 +500,22 @@ pub(crate) mod tests { "Some name".to_owned(), "This is some blog".to_owned(), Instance::get_local(conn).unwrap().id, - ).unwrap(), - ).unwrap(); + ) + .unwrap(), + ) + .unwrap(); let b2 = Blog::insert( conn, NewBlog::new_local( "Blog".to_owned(), "Blog".to_owned(), "I've named my blog Blog".to_owned(), - Instance::get_local(conn).unwrap().id - ).unwrap(), - ).unwrap(); - let blog = vec![ b1, b2 ]; + Instance::get_local(conn).unwrap().id, + ) + .unwrap(), + ) + .unwrap(); + let blog = vec![b1, b2]; BlogAuthor::insert( conn, @@ -555,7 +524,8 @@ pub(crate) mod tests { author_id: user[0].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -564,7 +534,8 @@ pub(crate) mod tests { author_id: user[1].id, is_owner: false, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -573,53 +544,46 @@ pub(crate) mod tests { author_id: user[0].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); - assert!( - blog[0] - .list_authors(conn).unwrap() - .iter() - .any(|a| a.id == user[0].id) - ); - assert!( - blog[0] - .list_authors(conn).unwrap() - .iter() - .any(|a| a.id == user[1].id) - ); - assert!( - blog[1] - .list_authors(conn).unwrap() - .iter() - .any(|a| a.id == user[0].id) - ); - assert!( - !blog[1] - .list_authors(conn).unwrap() - .iter() - .any(|a| a.id == user[1].id) - ); + assert!(blog[0] + .list_authors(conn) + .unwrap() + .iter() + .any(|a| a.id == user[0].id)); + assert!(blog[0] + .list_authors(conn) + .unwrap() + .iter() + .any(|a| a.id == user[1].id)); + assert!(blog[1] + .list_authors(conn) + .unwrap() + .iter() + .any(|a| a.id == user[0].id)); + assert!(!blog[1] + .list_authors(conn) + .unwrap() + .iter() + .any(|a| a.id == user[1].id)); - assert!( - Blog::find_for_author(conn, &user[0]).unwrap() - .iter() - .any(|b| b.id == blog[0].id) - ); - assert!( - Blog::find_for_author(conn, &user[1]).unwrap() - .iter() - .any(|b| b.id == blog[0].id) - ); - assert!( - Blog::find_for_author(conn, &user[0]).unwrap() - .iter() - .any(|b| b.id == blog[1].id) - ); - assert!( - !Blog::find_for_author(conn, &user[1]).unwrap() - .iter() - .any(|b| b.id == blog[1].id) - ); + assert!(Blog::find_for_author(conn, &user[0]) + .unwrap() + .iter() + .any(|b| b.id == blog[0].id)); + assert!(Blog::find_for_author(conn, &user[1]) + .unwrap() + .iter() + .any(|b| b.id == blog[0].id)); + assert!(Blog::find_for_author(conn, &user[0]) + .unwrap() + .iter() + .any(|b| b.id == blog[1].id)); + assert!(!Blog::find_for_author(conn, &user[1]) + .unwrap() + .iter() + .any(|b| b.id == blog[1].id)); Ok(()) }); @@ -638,13 +602,12 @@ pub(crate) mod tests { "Some name".to_owned(), "This is some blog".to_owned(), Instance::get_local(conn).unwrap().id, - ).unwrap(), - ).unwrap(); + ) + .unwrap(), + ) + .unwrap(); - assert_eq!( - Blog::find_by_fqn(conn, "SomeName").unwrap().id, - blog.id - ); + assert_eq!(Blog::find_by_fqn(conn, "SomeName").unwrap().id, blog.id); Ok(()) }); @@ -663,8 +626,10 @@ pub(crate) mod tests { "Some name".to_owned(), "This is some blog".to_owned(), Instance::get_local(conn).unwrap().id, - ).unwrap(), - ).unwrap(); + ) + .unwrap(), + ) + .unwrap(); assert_eq!(blog.fqn, "SomeName"); @@ -699,8 +664,10 @@ pub(crate) mod tests { "Some name".to_owned(), "This is some blog".to_owned(), Instance::get_local(conn).unwrap().id, - ).unwrap(), - ).unwrap(); + ) + .unwrap(), + ) + .unwrap(); let b2 = Blog::insert( conn, NewBlog::new_local( @@ -708,9 +675,11 @@ pub(crate) mod tests { "Blog".to_owned(), "I've named my blog Blog".to_owned(), Instance::get_local(conn).unwrap().id, - ).unwrap(), - ).unwrap(); - let blog = vec![ b1, b2 ]; + ) + .unwrap(), + ) + .unwrap(); + let blog = vec![b1, b2]; BlogAuthor::insert( conn, @@ -719,7 +688,8 @@ pub(crate) mod tests { author_id: user[0].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -728,7 +698,8 @@ pub(crate) mod tests { author_id: user[1].id, is_owner: false, }, - ).unwrap(); + ) + .unwrap(); BlogAuthor::insert( conn, @@ -737,7 +708,8 @@ pub(crate) mod tests { author_id: user[0].id, is_owner: true, }, - ).unwrap(); + ) + .unwrap(); user[0].delete(conn, &searcher).unwrap(); assert!(Blog::get(conn, blog[0].id).is_ok()); diff --git a/plume-models/src/comment_seers.rs b/plume-models/src/comment_seers.rs index 7d5a87f4..d26d622d 100644 --- a/plume-models/src/comment_seers.rs +++ b/plume-models/src/comment_seers.rs @@ -23,7 +23,8 @@ impl CommentSeers { insert!(comment_seers, NewCommentSeers); pub fn can_see(conn: &Connection, c: &Comment, u: &User) -> Result { - comment_seers::table.filter(comment_seers::comment_id.eq(c.id)) + comment_seers::table + .filter(comment_seers::comment_id.eq(c.id)) .filter(comment_seers::user_id.eq(u.id)) .load::(conn) .map_err(Error::from) diff --git a/plume-models/src/comments.rs b/plume-models/src/comments.rs index 44ea8e40..4dec943f 100644 --- a/plume-models/src/comments.rs +++ b/plume-models/src/comments.rs @@ -1,19 +1,23 @@ -use activitypub::{activity::{Create, Delete}, link, object::{Note, Tombstone}}; +use activitypub::{ + activity::{Create, Delete}, + link, + object::{Note, Tombstone}, +}; use chrono::{self, NaiveDateTime}; use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl, SaveChangesDsl}; use serde_json; use std::collections::HashSet; +use comment_seers::{CommentSeers, NewCommentSeers}; use instance::Instance; use mentions::Mention; use notifications::*; use plume_common::activity_pub::{ - inbox::{FromActivity, Notify, Deletable}, + inbox::{Deletable, FromActivity, Notify}, Id, IntoId, PUBLIC_VISIBILTY, }; use plume_common::utils; -use comment_seers::{CommentSeers, NewCommentSeers}; use posts::Post; use safe_string::SafeString; use schema::comments; @@ -50,7 +54,11 @@ pub struct NewComment { impl Comment { insert!(comments, NewComment, |inserted, conn| { if inserted.ap_url.is_none() { - inserted.ap_url = Some(format!("{}comment/{}", inserted.get_post(conn)?.ap_url, inserted.id)); + inserted.ap_url = Some(format!( + "{}comment/{}", + inserted.get_post(conn)?.ap_url, + inserted.id + )); let _: Comment = inserted.save_changes(conn)?; } Ok(inserted) @@ -80,20 +88,25 @@ impl Comment { } pub fn get_responses(&self, conn: &Connection) -> Result> { - comments::table.filter(comments::in_response_to_id.eq(self.id)) + comments::table + .filter(comments::in_response_to_id.eq(self.id)) .load::(conn) .map_err(Error::from) } pub fn can_see(&self, conn: &Connection, user: Option<&User>) -> bool { - self.public_visibility || - user.as_ref().map(|u| CommentSeers::can_see(conn, self, u).unwrap_or(false)) + self.public_visibility + || user + .as_ref() + .map(|u| CommentSeers::can_see(conn, self, u).unwrap_or(false)) .unwrap_or(false) } pub fn to_activity(&self, conn: &Connection) -> Result { - let (html, mentions, _hashtags) = utils::md_to_html(self.content.get().as_ref(), - &Instance::get_local(conn)?.public_domain); + let (html, mentions, _hashtags) = utils::md_to_html( + self.content.get().as_ref(), + &Instance::get_local(conn)?.public_domain, + ); let author = User::get(conn, self.author_id)?; let mut note = Note::default(); @@ -103,8 +116,7 @@ impl Comment { .set_id_string(self.ap_url.clone().unwrap_or_default())?; note.object_props .set_summary_string(self.spoiler_text.clone())?; - note.object_props - .set_content_string(html)?; + note.object_props.set_content_string(html)?; note.object_props .set_in_reply_to_link(Id::new(self.in_response_to_id.map_or_else( || Ok(Post::get(conn, self.post_id)?.ap_url), @@ -114,41 +126,28 @@ impl Comment { .set_published_string(chrono::Utc::now().to_rfc3339())?; note.object_props .set_attributed_to_link(author.clone().into_id())?; - note.object_props - .set_to_link_vec(to.clone())?; - note.object_props - .set_tag_link_vec( - mentions - .into_iter() - .filter_map(|m| Mention::build_activity(conn, &m).ok()) - .collect::>(), - )?; + note.object_props.set_to_link_vec(to.clone())?; + note.object_props.set_tag_link_vec( + mentions + .into_iter() + .filter_map(|m| Mention::build_activity(conn, &m).ok()) + .collect::>(), + )?; Ok(note) } pub fn create_activity(&self, conn: &Connection) -> Result { - let author = - User::get(conn, self.author_id)?; + let author = User::get(conn, self.author_id)?; let note = self.to_activity(conn)?; let mut act = Create::default(); - act.create_props - .set_actor_link(author.into_id())?; - act.create_props - .set_object_object(note.clone())?; + act.create_props.set_actor_link(author.into_id())?; + act.create_props.set_object_object(note.clone())?; act.object_props - .set_id_string(format!( - "{}/activity", - self.ap_url - .clone()?, - ))?; + .set_id_string(format!("{}/activity", self.ap_url.clone()?,))?; act.object_props - .set_to_link_vec( - note.object_props - .to_link_vec::()?, - )?; - act.object_props - .set_cc_link_vec::(vec![])?; + .set_to_link_vec(note.object_props.to_link_vec::()?)?; + act.object_props.set_cc_link_vec::(vec![])?; Ok(act) } } @@ -158,43 +157,39 @@ impl FromActivity for Comment { fn from_activity(conn: &Connection, note: Note, actor: Id) -> Result { let comm = { - let previous_url = note - .object_props - .in_reply_to - .as_ref()? - .as_str()?; + let previous_url = note.object_props.in_reply_to.as_ref()?.as_str()?; let previous_comment = Comment::find_by_ap_url(conn, previous_url); - let is_public = |v: &Option| match v.as_ref().unwrap_or(&serde_json::Value::Null) { - serde_json::Value::Array(v) => v.iter().filter_map(serde_json::Value::as_str).any(|s| s==PUBLIC_VISIBILTY), + let is_public = |v: &Option| match v + .as_ref() + .unwrap_or(&serde_json::Value::Null) + { + serde_json::Value::Array(v) => v + .iter() + .filter_map(serde_json::Value::as_str) + .any(|s| s == PUBLIC_VISIBILTY), serde_json::Value::String(s) => s == PUBLIC_VISIBILTY, _ => false, }; - let public_visibility = is_public(¬e.object_props.to) || - is_public(¬e.object_props.bto) || - is_public(¬e.object_props.cc) || - is_public(¬e.object_props.bcc); + let public_visibility = is_public(¬e.object_props.to) + || is_public(¬e.object_props.bto) + || is_public(¬e.object_props.cc) + || is_public(¬e.object_props.bcc); let comm = Comment::insert( conn, NewComment { - content: SafeString::new( - ¬e - .object_props - .content_string()? - ), - spoiler_text: note - .object_props - .summary_string() - .unwrap_or_default(), + content: SafeString::new(¬e.object_props.content_string()?), + spoiler_text: note.object_props.summary_string().unwrap_or_default(), ap_url: note.object_props.id_string().ok(), in_response_to_id: previous_comment.iter().map(|c| c.id).next(), - post_id: previous_comment.map(|c| c.post_id) - .or_else(|_| Ok(Post::find_by_ap_url(conn, previous_url)?.id) as Result)?, + post_id: previous_comment.map(|c| c.post_id).or_else(|_| { + Ok(Post::find_by_ap_url(conn, previous_url)?.id) as Result + })?, author_id: User::from_url(conn, actor.as_ref())?.id, sensitive: false, // "sensitive" is not a standard property, we need to think about how to support it with the activitypub crate - public_visibility + public_visibility, }, )?; @@ -204,13 +199,11 @@ impl FromActivity for Comment { serde_json::from_value::(tag) .map_err(Error::from) .and_then(|m| { - let author = &Post::get(conn, comm.post_id)? - .get_authors(conn)?[0]; - let not_author = m - .link_props - .href_string()? - != author.ap_url.clone(); - Ok(Mention::from_activity(conn, &m, comm.id, false, not_author)?) + let author = &Post::get(conn, comm.post_id)?.get_authors(conn)?[0]; + let not_author = m.link_props.href_string()? != author.ap_url.clone(); + Ok(Mention::from_activity( + conn, &m, comm.id, false, not_author, + )?) }) .ok(); } @@ -218,14 +211,21 @@ impl FromActivity for Comment { comm }; - if !comm.public_visibility { let receivers_ap_url = |v: Option| { - let filter = |e: serde_json::Value| if let serde_json::Value::String(s) = e { Some(s) } else { None }; + let filter = |e: serde_json::Value| { + if let serde_json::Value::String(s) = e { + Some(s) + } else { + None + } + }; match v.unwrap_or(serde_json::Value::Null) { serde_json::Value::Array(v) => v, v => vec![v], - }.into_iter().filter_map(filter) + } + .into_iter() + .filter_map(filter) }; let mut note = note; @@ -235,25 +235,30 @@ impl FromActivity for Comment { let bto = receivers_ap_url(note.object_props.bto.take()); let bcc = receivers_ap_url(note.object_props.bcc.take()); - let receivers_ap_url = to.chain(cc).chain(bto).chain(bcc) - .collect::>()//remove duplicates (don't do a query more than once) + let receivers_ap_url = to + .chain(cc) + .chain(bto) + .chain(bcc) + .collect::>() //remove duplicates (don't do a query more than once) .into_iter() - .map(|v| if let Ok(user) = User::from_url(conn,&v) { - vec![user] - } else { - vec![]// TODO try to fetch collection + .map(|v| { + if let Ok(user) = User::from_url(conn, &v) { + vec![user] + } else { + vec![] // TODO try to fetch collection + } }) .flatten() .filter(|u| u.get_instance(conn).map(|i| i.local).unwrap_or(false)) - .collect::>();//remove duplicates (prevent db error) + .collect::>(); //remove duplicates (prevent db error) for user in &receivers_ap_url { CommentSeers::insert( conn, NewCommentSeers { comment_id: comm.id, - user_id: user.id - } + user_id: user.id, + }, )?; } } @@ -288,7 +293,8 @@ pub struct CommentTree { impl CommentTree { pub fn from_post(conn: &Connection, p: &Post, user: Option<&User>) -> Result> { - Ok(Comment::list_by_post(conn, p.id)?.into_iter() + Ok(Comment::list_by_post(conn, p.id)? + .into_iter() .filter(|c| c.in_response_to_id.is_none()) .filter(|c| c.can_see(conn, user)) .filter_map(|c| Self::from_comment(conn, c, user).ok()) @@ -296,14 +302,13 @@ impl CommentTree { } pub fn from_comment(conn: &Connection, comment: Comment, user: Option<&User>) -> Result { - let responses = comment.get_responses(conn)?.into_iter() + let responses = comment + .get_responses(conn)? + .into_iter() .filter(|c| c.can_see(conn, user)) .filter_map(|c| Self::from_comment(conn, c, user).ok()) .collect(); - Ok(CommentTree { - comment, - responses, - }) + Ok(CommentTree { comment, responses }) } } @@ -316,11 +321,8 @@ impl<'a> Deletable for Comment { .set_actor_link(self.get_author(conn)?.into_id())?; let mut tombstone = Tombstone::default(); - tombstone - .object_props - .set_id_string(self.ap_url.clone()?)?; - act.delete_props - .set_object_object(tombstone)?; + tombstone.object_props.set_id_string(self.ap_url.clone()?)?; + act.delete_props.set_object_object(tombstone)?; act.object_props .set_id_string(format!("{}#delete", self.ap_url.clone().unwrap()))?; @@ -330,11 +332,11 @@ impl<'a> Deletable for Comment { for m in Mention::list_for_comment(&conn, self.id)? { m.delete(conn)?; } - diesel::update(comments::table).filter(comments::in_response_to_id.eq(self.id)) + diesel::update(comments::table) + .filter(comments::in_response_to_id.eq(self.id)) .set(comments::in_response_to_id.eq(self.in_response_to_id)) .execute(conn)?; - diesel::delete(self) - .execute(conn)?; + diesel::delete(self).execute(conn)?; Ok(act) } diff --git a/plume-models/src/db_conn.rs b/plume-models/src/db_conn.rs index 05378a63..dbb7ade1 100644 --- a/plume-models/src/db_conn.rs +++ b/plume-models/src/db_conn.rs @@ -1,4 +1,6 @@ -use diesel::{r2d2::{ConnectionManager, CustomizeConnection, Error as ConnError, Pool, PooledConnection}}; +use diesel::r2d2::{ + ConnectionManager, CustomizeConnection, Error as ConnError, Pool, PooledConnection, +}; #[cfg(feature = "sqlite")] use diesel::{dsl::sql_query, ConnectionError, RunQueryDsl}; use rocket::{ @@ -47,8 +49,13 @@ pub struct PragmaForeignKey; impl CustomizeConnection for PragmaForeignKey { #[cfg(feature = "sqlite")] // will default to an empty function for postgres fn on_acquire(&self, conn: &mut Connection) -> Result<(), ConnError> { - sql_query("PRAGMA foreign_keys = on;").execute(conn) + sql_query("PRAGMA foreign_keys = on;") + .execute(conn) .map(|_| ()) - .map_err(|_| ConnError::ConnectionError(ConnectionError::BadConnection(String::from("PRAGMA foreign_keys = on failed")))) + .map_err(|_| { + ConnError::ConnectionError(ConnectionError::BadConnection(String::from( + "PRAGMA foreign_keys = on failed", + ))) + }) } } diff --git a/plume-models/src/follows.rs b/plume-models/src/follows.rs index c4cf77b4..55cc62aa 100644 --- a/plume-models/src/follows.rs +++ b/plume-models/src/follows.rs @@ -14,7 +14,7 @@ use plume_common::activity_pub::{ }; use schema::follows; use users::User; -use {ap_url, Connection, BASE_URL, Error, Result}; +use {ap_url, Connection, Error, Result, BASE_URL}; #[derive(Clone, Queryable, Identifiable, Associations, AsChangeset)] #[belongs_to(User, foreign_key = "following_id")] @@ -62,12 +62,9 @@ impl Follow { .set_actor_link::(user.clone().into_id())?; act.follow_props .set_object_link::(target.clone().into_id())?; - act.object_props - .set_id_string(self.ap_url.clone())?; - act.object_props - .set_to_link(target.into_id())?; - act.object_props - .set_cc_link_vec::(vec![])?; + act.object_props.set_id_string(self.ap_url.clone())?; + act.object_props.set_to_link(target.into_id())?; + act.object_props.set_cc_link_vec::(vec![])?; Ok(act) } @@ -92,21 +89,13 @@ impl Follow { let mut accept = Accept::default(); let accept_id = ap_url(&format!("{}/follow/{}/accept", BASE_URL.as_str(), &res.id)); - accept - .object_props - .set_id_string(accept_id)?; - accept - .object_props - .set_to_link(from.clone().into_id())?; - accept - .object_props - .set_cc_link_vec::(vec![])?; + accept.object_props.set_id_string(accept_id)?; + accept.object_props.set_to_link(from.clone().into_id())?; + accept.object_props.set_cc_link_vec::(vec![])?; accept .accept_props .set_actor_link::(target.clone().into_id())?; - accept - .accept_props - .set_object_object(follow)?; + accept.accept_props.set_object_object(follow)?; broadcast(&*target, accept, vec![from.clone()]); Ok(res) } @@ -120,29 +109,18 @@ impl FromActivity for Follow { .follow_props .actor_link::() .map(|l| l.into()) - .or_else(|_| Ok(follow - .follow_props - .actor_object::()? - .object_props - .id_string()?) as Result)?; - let from = - User::from_url(conn, &from_id)?; - match User::from_url( - conn, - follow - .follow_props - .object - .as_str()?, - ) { + .or_else(|_| { + Ok(follow + .follow_props + .actor_object::()? + .object_props + .id_string()?) as Result + })?; + let from = User::from_url(conn, &from_id)?; + match User::from_url(conn, follow.follow_props.object.as_str()?) { Ok(user) => Follow::accept_follow(conn, &from, &user, follow, from.id, user.id), Err(_) => { - let blog = Blog::from_url( - conn, - follow - .follow_props - .object - .as_str()?, - )?; + let blog = Blog::from_url(conn, follow.follow_props.object.as_str()?)?; Follow::accept_follow(conn, &from, &blog, follow, from.id, blog.id) } } @@ -160,7 +138,8 @@ impl Notify for Follow { object_id: self.id, user_id: self.following_id, }, - ).map(|_| ()) + ) + .map(|_| ()) } } @@ -168,21 +147,16 @@ impl Deletable for Follow { type Error = Error; fn delete(&self, conn: &Connection) -> Result { - diesel::delete(self) - .execute(conn)?; + diesel::delete(self).execute(conn)?; // delete associated notification if any if let Ok(notif) = Notification::find(conn, notification_kind::FOLLOW, self.id) { - diesel::delete(¬if) - .execute(conn)?; + diesel::delete(¬if).execute(conn)?; } let mut undo = Undo::default(); undo.undo_props - .set_actor_link( - User::get(conn, self.follower_id)? - .into_id(), - )?; + .set_actor_link(User::get(conn, self.follower_id)?.into_id())?; undo.object_props .set_id_string(format!("{}/undo", self.ap_url))?; undo.undo_props @@ -209,8 +183,8 @@ impl IntoId for Follow { #[cfg(test)] mod tests { - use diesel::Connection; use super::*; + use diesel::Connection; use tests::db; use users::tests as user_tests; @@ -219,20 +193,31 @@ mod tests { let conn = db(); conn.test_transaction::<_, (), _>(|| { let users = user_tests::fill_database(&conn); - let follow = Follow::insert(&conn, NewFollow { - follower_id: users[0].id, - following_id: users[1].id, - ap_url: String::new(), - }).expect("Couldn't insert new follow"); - assert_eq!(follow.ap_url, format!("https://{}/follows/{}", *BASE_URL, follow.id)); + let follow = Follow::insert( + &conn, + NewFollow { + follower_id: users[0].id, + following_id: users[1].id, + ap_url: String::new(), + }, + ) + .expect("Couldn't insert new follow"); + assert_eq!( + follow.ap_url, + format!("https://{}/follows/{}", *BASE_URL, follow.id) + ); - let follow = Follow::insert(&conn, NewFollow { - follower_id: users[1].id, - following_id: users[0].id, - ap_url: String::from("https://some.url/"), - }).expect("Couldn't insert new follow"); + let follow = Follow::insert( + &conn, + NewFollow { + follower_id: users[1].id, + following_id: users[0].id, + ap_url: String::from("https://some.url/"), + }, + ) + .expect("Couldn't insert new follow"); assert_eq!(follow.ap_url, String::from("https://some.url/")); Ok(()) }); } -} \ No newline at end of file +} diff --git a/plume-models/src/instance.rs b/plume-models/src/instance.rs index a921d1f6..09e71684 100644 --- a/plume-models/src/instance.rs +++ b/plume-models/src/instance.rs @@ -20,7 +20,7 @@ pub struct Instance { pub open_registrations: bool, pub short_description: SafeString, pub long_description: SafeString, - pub default_license : String, + pub default_license: String, pub long_description_html: SafeString, pub short_description_html: SafeString, } @@ -46,7 +46,8 @@ impl Instance { .limit(1) .load::(conn)? .into_iter() - .nth(0).ok_or(Error::NotFound) + .nth(0) + .ok_or(Error::NotFound) } pub fn get_remotes(conn: &Connection) -> Result> { @@ -109,12 +110,7 @@ impl Instance { .map_err(Error::from) } - pub fn compute_box( - &self, - prefix: &str, - name: &str, - box_name: &str, - ) -> String { + pub fn compute_box(&self, prefix: &str, name: &str, box_name: &str) -> String { ap_url(&format!( "{instance}/{prefix}/{name}/{box_name}", instance = self.public_domain, @@ -209,15 +205,16 @@ pub(crate) mod tests { open_registrations: true, public_domain: "3plu.me".to_string(), }, - ].into_iter() - .map(|inst| { - ( - inst.clone(), - Instance::find_by_domain(conn, &inst.public_domain) - .unwrap_or_else(|_| Instance::insert(conn, inst).unwrap()), - ) - }) - .collect() + ] + .into_iter() + .map(|inst| { + ( + inst.clone(), + Instance::find_by_domain(conn, &inst.public_domain) + .unwrap_or_else(|_| Instance::insert(conn, inst).unwrap()), + ) + }) + .collect() } #[test] @@ -244,8 +241,14 @@ pub(crate) mod tests { public_domain ] ); - assert_eq!(res.long_description_html.get(), &inserted.long_description_html); - assert_eq!(res.short_description_html.get(), &inserted.short_description_html); + assert_eq!( + res.long_description_html.get(), + &inserted.long_description_html + ); + assert_eq!( + res.short_description_html.get(), + &inserted.short_description_html + ); Ok(()) }); @@ -282,8 +285,14 @@ pub(crate) mod tests { public_domain ] ); - assert_eq!(&newinst.long_description_html, inst.long_description_html.get()); - assert_eq!(&newinst.short_description_html, inst.short_description_html.get()); + assert_eq!( + &newinst.long_description_html, + inst.long_description_html.get() + ); + assert_eq!( + &newinst.short_description_html, + inst.short_description_html.get() + ); }); let page = Instance::page(conn, (0, 2)).unwrap(); @@ -292,7 +301,9 @@ pub(crate) mod tests { let page2 = &page[1]; assert!(page1.public_domain <= page2.public_domain); - let mut last_domaine: String = Instance::page(conn, (0, 1)).unwrap()[0].public_domain.clone(); + let mut last_domaine: String = Instance::page(conn, (0, 1)).unwrap()[0] + .public_domain + .clone(); for i in 1..inserted.len() as i32 { let page = Instance::page(conn, (i, i + 1)).unwrap(); assert_eq!(page.len(), 1); @@ -326,11 +337,13 @@ pub(crate) mod tests { 0 ); assert_eq!( - Instance::is_blocked(conn, &format!("https://{}/something", inst.public_domain)).unwrap(), + Instance::is_blocked(conn, &format!("https://{}/something", inst.public_domain)) + .unwrap(), inst.blocked ); assert_eq!( - Instance::is_blocked(conn, &format!("https://{}a/something", inst.public_domain)).unwrap(), + Instance::is_blocked(conn, &format!("https://{}a/something", inst.public_domain)) + .unwrap(), Instance::find_by_domain(conn, &format!("{}a", inst.public_domain)) .map(|inst| inst.blocked) .unwrap_or(false) @@ -340,11 +353,13 @@ pub(crate) mod tests { let inst = Instance::get(conn, inst.id).unwrap(); assert_eq!(inst.blocked, blocked); assert_eq!( - Instance::is_blocked(conn, &format!("https://{}/something", inst.public_domain)).unwrap(), + Instance::is_blocked(conn, &format!("https://{}/something", inst.public_domain)) + .unwrap(), inst.blocked ); assert_eq!( - Instance::is_blocked(conn, &format!("https://{}a/something", inst.public_domain)).unwrap(), + Instance::is_blocked(conn, &format!("https://{}a/something", inst.public_domain)) + .unwrap(), Instance::find_by_domain(conn, &format!("{}a", inst.public_domain)) .map(|inst| inst.blocked) .unwrap_or(false) @@ -375,7 +390,8 @@ pub(crate) mod tests { false, SafeString::new("[short](#link)"), SafeString::new("[long_description](/with_link)"), - ).unwrap(); + ) + .unwrap(); let inst = Instance::get(conn, inst.id).unwrap(); assert_eq!(inst.name, "NewName".to_owned()); assert_eq!(inst.open_registrations, false); diff --git a/plume-models/src/lib.rs b/plume-models/src/lib.rs index edf6c617..e606a148 100644 --- a/plume-models/src/lib.rs +++ b/plume-models/src/lib.rs @@ -292,8 +292,8 @@ static DB_NAME: &str = "plume_tests"; #[cfg(all(feature = "postgres", not(feature = "sqlite")))] lazy_static! { - pub static ref DATABASE_URL: String = - env::var("DATABASE_URL").unwrap_or_else(|_| format!("postgres://plume:plume@localhost/{}", DB_NAME)); + pub static ref DATABASE_URL: String = env::var("DATABASE_URL") + .unwrap_or_else(|_| format!("postgres://plume:plume@localhost/{}", DB_NAME)); } #[cfg(all(feature = "sqlite", not(feature = "postgres")))] @@ -336,7 +336,9 @@ mod tests { Conn::establish(&*DATABASE_URL.as_str()).expect("Couldn't connect to the database"); embedded_migrations::run(&conn).expect("Couldn't run migrations"); #[cfg(feature = "sqlite")] - sql_query("PRAGMA foreign_keys = on;").execute(&conn).expect("PRAGMA foreign_keys fail"); + sql_query("PRAGMA foreign_keys = on;") + .execute(&conn) + .expect("PRAGMA foreign_keys fail"); conn } } @@ -346,8 +348,8 @@ pub mod api_tokens; pub mod apps; pub mod blog_authors; pub mod blogs; -pub mod comments; pub mod comment_seers; +pub mod comments; pub mod db_conn; pub mod follows; pub mod headers; @@ -360,7 +362,7 @@ pub mod post_authors; pub mod posts; pub mod reshares; pub mod safe_string; -pub mod search; pub mod schema; +pub mod search; pub mod tags; pub mod users; diff --git a/plume-models/src/likes.rs b/plume-models/src/likes.rs index f501657a..555b8b8f 100644 --- a/plume-models/src/likes.rs +++ b/plume-models/src/likes.rs @@ -38,21 +38,13 @@ impl Like { pub fn to_activity(&self, conn: &Connection) -> Result { let mut act = activity::Like::default(); act.like_props - .set_actor_link( - User::get(conn, self.user_id)? - .into_id(), - )?; + .set_actor_link(User::get(conn, self.user_id)?.into_id())?; act.like_props - .set_object_link( - Post::get(conn, self.post_id)? - .into_id(), - )?; + .set_object_link(Post::get(conn, self.post_id)?.into_id())?; act.object_props .set_to_link(Id::new(PUBLIC_VISIBILTY.to_string()))?; - act.object_props - .set_cc_link_vec::(vec![])?; - act.object_props - .set_id_string(self.ap_url.clone())?; + act.object_props.set_cc_link_vec::(vec![])?; + act.object_props.set_id_string(self.ap_url.clone())?; Ok(act) } @@ -62,18 +54,8 @@ impl FromActivity for Like { type Error = Error; fn from_activity(conn: &Connection, like: activity::Like, _actor: Id) -> Result { - let liker = User::from_url( - conn, - like.like_props - .actor - .as_str()?, - )?; - let post = Post::find_by_ap_url( - conn, - like.like_props - .object - .as_str()?, - )?; + let liker = User::from_url(conn, like.like_props.actor.as_str()?)?; + let post = Post::find_by_ap_url(conn, like.like_props.object.as_str()?)?; let res = Like::insert( conn, NewLike { @@ -110,26 +92,22 @@ impl Deletable for Like { type Error = Error; fn delete(&self, conn: &Connection) -> Result { - diesel::delete(self) - .execute(conn)?; + diesel::delete(self).execute(conn)?; // delete associated notification if any if let Ok(notif) = Notification::find(conn, notification_kind::LIKE, self.id) { - diesel::delete(¬if) - .execute(conn)?; + diesel::delete(¬if).execute(conn)?; } let mut act = activity::Undo::default(); act.undo_props - .set_actor_link(User::get(conn, self.user_id)?.into_id(),)?; - act.undo_props - .set_object_object(self.to_activity(conn)?)?; + .set_actor_link(User::get(conn, self.user_id)?.into_id())?; + act.undo_props.set_object_object(self.to_activity(conn)?)?; act.object_props .set_id_string(format!("{}#delete", self.ap_url))?; act.object_props .set_to_link(Id::new(PUBLIC_VISIBILTY.to_string()))?; - act.object_props - .set_cc_link_vec::(vec![])?; + act.object_props.set_cc_link_vec::(vec![])?; Ok(act) } @@ -151,7 +129,7 @@ impl NewLike { NewLike { post_id: p.id, user_id: u.id, - ap_url + ap_url, } } } diff --git a/plume-models/src/medias.rs b/plume-models/src/medias.rs index 37fd1e2b..49962a40 100644 --- a/plume-models/src/medias.rs +++ b/plume-models/src/medias.rs @@ -62,12 +62,14 @@ impl Media { list_by!(medias, for_user, owner_id as i32); pub fn list_all_medias(conn: &Connection) -> Result> { - medias::table - .load::(conn) - .map_err(Error::from) + medias::table.load::(conn).map_err(Error::from) } - pub fn page_for_user(conn: &Connection, user: &User, (min, max): (i32, i32)) -> Result> { + pub fn page_for_user( + conn: &Connection, + user: &User, + (min, max): (i32, i32), + ) -> Result> { medias::table .filter(medias::owner_id.eq(user.id)) .offset(i64::from(min)) @@ -124,7 +126,9 @@ impl Media { pub fn markdown(&self, conn: &Connection) -> Result { let url = self.url(conn)?; Ok(match self.category() { - MediaCategory::Image => SafeString::new(&format!("![{}]({})", escape(&self.alt_text), url)), + MediaCategory::Image => { + SafeString::new(&format!("![{}]({})", escape(&self.alt_text), url)) + } MediaCategory::Audio | MediaCategory::Video => self.html(conn)?, MediaCategory::Unknown => SafeString::new(""), }) @@ -216,7 +220,8 @@ impl Media { .into_iter() .next()? .as_ref(), - )?.id, + )? + .id, }, ) } @@ -249,37 +254,41 @@ pub(crate) mod tests { let f2 = "static/media/2.mp3".to_owned(); fs::write(f1.clone(), []).unwrap(); fs::write(f2.clone(), []).unwrap(); - (users, vec![ - NewMedia { - file_path: f1, - alt_text: "some alt".to_owned(), - is_remote: false, - remote_url: None, - sensitive: false, - content_warning: None, - owner_id: user_one, - }, - NewMedia { - file_path: f2, - alt_text: "alt message".to_owned(), - is_remote: false, - remote_url: None, - sensitive: true, - content_warning: Some("Content warning".to_owned()), - owner_id: user_one, - }, - NewMedia { - file_path: "".to_owned(), - alt_text: "another alt".to_owned(), - is_remote: true, - remote_url: Some("https://example.com/".to_owned()), - sensitive: false, - content_warning: None, - owner_id: user_two, - }, - ].into_iter() + ( + users, + vec![ + NewMedia { + file_path: f1, + alt_text: "some alt".to_owned(), + is_remote: false, + remote_url: None, + sensitive: false, + content_warning: None, + owner_id: user_one, + }, + NewMedia { + file_path: f2, + alt_text: "alt message".to_owned(), + is_remote: false, + remote_url: None, + sensitive: true, + content_warning: Some("Content warning".to_owned()), + owner_id: user_one, + }, + NewMedia { + file_path: "".to_owned(), + alt_text: "another alt".to_owned(), + is_remote: true, + remote_url: Some("https://example.com/".to_owned()), + sensitive: false, + content_warning: None, + owner_id: user_two, + }, + ] + .into_iter() .map(|nm| Media::insert(conn, nm).unwrap()) - .collect()) + .collect(), + ) } pub(crate) fn clean(conn: &Conn) { @@ -311,7 +320,8 @@ pub(crate) mod tests { content_warning: None, owner_id: user, }, - ).unwrap(); + ) + .unwrap(); assert!(Path::new(&path).exists()); media.delete(conn).unwrap(); @@ -346,29 +356,26 @@ pub(crate) mod tests { content_warning: None, owner_id: u1.id, }, - ).unwrap(); + ) + .unwrap(); - assert!( - Media::for_user(conn, u1.id).unwrap() - .iter() - .any(|m| m.id == media.id) - ); - assert!( - !Media::for_user(conn, u2.id).unwrap() - .iter() - .any(|m| m.id == media.id) - ); + assert!(Media::for_user(conn, u1.id) + .unwrap() + .iter() + .any(|m| m.id == media.id)); + assert!(!Media::for_user(conn, u2.id) + .unwrap() + .iter() + .any(|m| m.id == media.id)); media.set_owner(conn, u2).unwrap(); - assert!( - !Media::for_user(conn, u1.id).unwrap() - .iter() - .any(|m| m.id == media.id) - ); - assert!( - Media::for_user(conn, u2.id).unwrap() - .iter() - .any(|m| m.id == media.id) - ); + assert!(!Media::for_user(conn, u1.id) + .unwrap() + .iter() + .any(|m| m.id == media.id)); + assert!(Media::for_user(conn, u2.id) + .unwrap() + .iter() + .any(|m| m.id == media.id)); clean(conn); diff --git a/plume-models/src/mentions.rs b/plume-models/src/mentions.rs index db4d9fd2..7fbbb1c2 100644 --- a/plume-models/src/mentions.rs +++ b/plume-models/src/mentions.rs @@ -37,11 +37,15 @@ impl Mention { } pub fn get_post(&self, conn: &Connection) -> Result { - self.post_id.ok_or(Error::NotFound).and_then(|id| Post::get(conn, id)) + self.post_id + .ok_or(Error::NotFound) + .and_then(|id| Post::get(conn, id)) } pub fn get_comment(&self, conn: &Connection) -> Result { - self.comment_id.ok_or(Error::NotFound).and_then(|id| Comment::get(conn, id)) + self.comment_id + .ok_or(Error::NotFound) + .and_then(|id| Comment::get(conn, id)) } pub fn get_user(&self, conn: &Connection) -> Result { @@ -54,21 +58,15 @@ impl Mention { pub fn build_activity(conn: &Connection, ment: &str) -> Result { let user = User::find_by_fqn(conn, ment)?; let mut mention = link::Mention::default(); - mention - .link_props - .set_href_string(user.ap_url)?; - mention - .link_props - .set_name_string(format!("@{}", ment))?; + mention.link_props.set_href_string(user.ap_url)?; + mention.link_props.set_name_string(format!("@{}", ment))?; Ok(mention) } pub fn to_activity(&self, conn: &Connection) -> Result { let user = self.get_mentioned(conn)?; let mut mention = link::Mention::default(); - mention - .link_props - .set_href_string(user.ap_url.clone())?; + mention.link_props.set_href_string(user.ap_url.clone())?; mention .link_props .set_name_string(format!("@{}", user.fqn))?; @@ -141,6 +139,7 @@ impl Notify for Mention { object_id: self.id, user_id: m.id, }, - ).map(|_| ()) + ) + .map(|_| ()) } } diff --git a/plume-models/src/notifications.rs b/plume-models/src/notifications.rs index 8d40294e..f68ccc69 100644 --- a/plume-models/src/notifications.rs +++ b/plume-models/src/notifications.rs @@ -80,24 +80,40 @@ impl Notification { pub fn get_url(&self, conn: &Connection) -> Option { match self.kind.as_ref() { - notification_kind::COMMENT => self.get_post(conn).and_then(|p| Some(format!("{}#comment-{}", p.url(conn).ok()?, self.object_id))), + notification_kind::COMMENT => self + .get_post(conn) + .and_then(|p| Some(format!("{}#comment-{}", p.url(conn).ok()?, self.object_id))), notification_kind::FOLLOW => Some(format!("/@/{}/", self.get_actor(conn).ok()?.fqn)), - notification_kind::MENTION => Mention::get(conn, self.object_id).and_then(|mention| - mention.get_post(conn).and_then(|p| p.url(conn)) - .or_else(|_| { - let comment = mention.get_comment(conn)?; - Ok(format!("{}#comment-{}", comment.get_post(conn)?.url(conn)?, comment.id)) - }) - ).ok(), + notification_kind::MENTION => Mention::get(conn, self.object_id) + .and_then(|mention| { + mention + .get_post(conn) + .and_then(|p| p.url(conn)) + .or_else(|_| { + let comment = mention.get_comment(conn)?; + Ok(format!( + "{}#comment-{}", + comment.get_post(conn)?.url(conn)?, + comment.id + )) + }) + }) + .ok(), _ => None, } } pub fn get_post(&self, conn: &Connection) -> Option { match self.kind.as_ref() { - notification_kind::COMMENT => Comment::get(conn, self.object_id).and_then(|comment| comment.get_post(conn)).ok(), - notification_kind::LIKE => Like::get(conn, self.object_id).and_then(|like| Post::get(conn, like.post_id)).ok(), - notification_kind::RESHARE => Reshare::get(conn, self.object_id).and_then(|reshare| reshare.get_post(conn)).ok(), + notification_kind::COMMENT => Comment::get(conn, self.object_id) + .and_then(|comment| comment.get_post(conn)) + .ok(), + notification_kind::LIKE => Like::get(conn, self.object_id) + .and_then(|like| Post::get(conn, like.post_id)) + .ok(), + notification_kind::RESHARE => Reshare::get(conn, self.object_id) + .and_then(|reshare| reshare.get_post(conn)) + .ok(), _ => None, } } @@ -105,7 +121,9 @@ impl Notification { pub fn get_actor(&self, conn: &Connection) -> Result { Ok(match self.kind.as_ref() { notification_kind::COMMENT => Comment::get(conn, self.object_id)?.get_author(conn)?, - notification_kind::FOLLOW => User::get(conn, Follow::get(conn, self.object_id)?.follower_id)?, + notification_kind::FOLLOW => { + User::get(conn, Follow::get(conn, self.object_id)?.follower_id)? + } notification_kind::LIKE => User::get(conn, Like::get(conn, self.object_id)?.user_id)?, notification_kind::MENTION => Mention::get(conn, self.object_id)?.get_user(conn)?, notification_kind::RESHARE => Reshare::get(conn, self.object_id)?.get_user(conn)?, diff --git a/plume-models/src/posts.rs b/plume-models/src/posts.rs index 6091ca6b..6bfee689 100644 --- a/plume-models/src/posts.rs +++ b/plume-models/src/posts.rs @@ -1,8 +1,8 @@ use activitypub::{ - CustomObject, activity::{Create, Delete, Update}, link, object::{Article, Image, Tombstone}, + CustomObject, }; use canapi::{Error as ApiError, Provider}; use chrono::{NaiveDateTime, TimeZone, Utc}; @@ -12,25 +12,26 @@ use scheduled_thread_pool::ScheduledThreadPool as Worker; use serde_json; use std::collections::HashSet; -use plume_api::posts::PostEndpoint; -use plume_common::{ - activity_pub::{ - inbox::{Deletable, FromActivity}, - broadcast, Hashtag, Id, IntoId, Licensed, Source, PUBLIC_VISIBILTY, - }, - utils::md_to_html, -}; use blogs::Blog; use instance::Instance; use medias::Media; use mentions::Mention; +use plume_api::posts::PostEndpoint; +use plume_common::{ + activity_pub::{ + broadcast, + inbox::{Deletable, FromActivity}, + Hashtag, Id, IntoId, Licensed, Source, PUBLIC_VISIBILTY, + }, + utils::md_to_html, +}; use post_authors::*; use safe_string::SafeString; -use search::Searcher; use schema::posts; +use search::Searcher; use tags::*; use users::User; -use {ap_url, Connection, BASE_URL, Error, Result, ApiResult}; +use {ap_url, ApiResult, Connection, Error, Result, BASE_URL}; pub type LicensedArticle = CustomObject; @@ -75,7 +76,11 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P id: i32, ) -> ApiResult { if let Ok(post) = Post::get(conn, id) { - if !post.published && !user_id.map(|u| post.is_author(conn, u).unwrap_or(false)).unwrap_or(false) { + if !post.published + && !user_id + .map(|u| post.is_author(conn, u).unwrap_or(false)) + .unwrap_or(false) + { return Err(ApiError::Authorization( "You are not authorized to access this post yet.".to_string(), )); @@ -86,12 +91,23 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P subtitle: Some(post.subtitle.clone()), content: Some(post.content.get().clone()), source: Some(post.source.clone()), - author: Some(post.get_authors(conn).map_err(|_| ApiError::NotFound("Authors not found".into()))?[0].username.clone()), + author: Some( + post.get_authors(conn) + .map_err(|_| ApiError::NotFound("Authors not found".into()))?[0] + .username + .clone(), + ), blog_id: Some(post.blog_id), published: Some(post.published), creation_date: Some(post.creation_date.format("%Y-%m-%d").to_string()), license: Some(post.license.clone()), - tags: Some(Tag::for_post(conn, post.id).map_err(|_| ApiError::NotFound("Tags not found".into()))?.into_iter().map(|t| t.tag).collect()), + tags: Some( + Tag::for_post(conn, post.id) + .map_err(|_| ApiError::NotFound("Tags not found".into()))? + .into_iter() + .map(|t| t.tag) + .collect(), + ), cover_id: post.cover_id, }) } else { @@ -114,24 +130,39 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P query = query.filter(posts::content.eq(content)); } - query.get_results::(*conn).map(|ps| ps.into_iter() - .filter(|p| p.published || user_id.map(|u| p.is_author(conn, u).unwrap_or(false)).unwrap_or(false)) - .map(|p| PostEndpoint { - id: Some(p.id), - title: Some(p.title.clone()), - subtitle: Some(p.subtitle.clone()), - content: Some(p.content.get().clone()), - source: Some(p.source.clone()), - author: Some(p.get_authors(conn).unwrap_or_default()[0].username.clone()), - blog_id: Some(p.blog_id), - published: Some(p.published), - creation_date: Some(p.creation_date.format("%Y-%m-%d").to_string()), - license: Some(p.license.clone()), - tags: Some(Tag::for_post(conn, p.id).unwrap_or_else(|_| vec![]).into_iter().map(|t| t.tag).collect()), - cover_id: p.cover_id, + query + .get_results::(*conn) + .map(|ps| { + ps.into_iter() + .filter(|p| { + p.published + || user_id + .map(|u| p.is_author(conn, u).unwrap_or(false)) + .unwrap_or(false) + }) + .map(|p| PostEndpoint { + id: Some(p.id), + title: Some(p.title.clone()), + subtitle: Some(p.subtitle.clone()), + content: Some(p.content.get().clone()), + source: Some(p.source.clone()), + author: Some(p.get_authors(conn).unwrap_or_default()[0].username.clone()), + blog_id: Some(p.blog_id), + published: Some(p.published), + creation_date: Some(p.creation_date.format("%Y-%m-%d").to_string()), + license: Some(p.license.clone()), + tags: Some( + Tag::for_post(conn, p.id) + .unwrap_or_else(|_| vec![]) + .into_iter() + .map(|t| t.tag) + .collect(), + ), + cover_id: p.cover_id, + }) + .collect() }) - .collect() - ).unwrap_or_else(|_| vec![]) + .unwrap_or_else(|_| vec![]) } fn update( @@ -142,11 +173,15 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P unimplemented!() } - fn delete((conn, _worker, search, user_id): &(&Connection, &Worker, &Searcher, Option), id: i32) { + fn delete( + (conn, _worker, search, user_id): &(&Connection, &Worker, &Searcher, Option), + id: i32, + ) { let user_id = user_id.expect("Post as Provider::delete: not authenticated"); if let Ok(post) = Post::get(conn, id) { if post.is_author(conn, user_id).unwrap_or(false) { - post.delete(&(conn, search)).expect("Post as Provider::delete: delete error"); + post.delete(&(conn, search)) + .expect("Post as Provider::delete: delete error"); } } } @@ -156,84 +191,124 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P query: PostEndpoint, ) -> ApiResult { if user_id.is_none() { - return Err(ApiError::Authorization("You are not authorized to create new articles.".to_string())); + return Err(ApiError::Authorization( + "You are not authorized to create new articles.".to_string(), + )); } let title = query.title.clone().expect("No title for new post in API"); let slug = query.title.unwrap().to_kebab_case(); - let date = query.creation_date.clone() - .and_then(|d| NaiveDateTime::parse_from_str(format!("{} 00:00:00", d).as_ref(), "%Y-%m-%d %H:%M:%S").ok()); + let date = query.creation_date.clone().and_then(|d| { + NaiveDateTime::parse_from_str(format!("{} 00:00:00", d).as_ref(), "%Y-%m-%d %H:%M:%S") + .ok() + }); let domain = &Instance::get_local(&conn) .map_err(|_| ApiError::NotFound("posts::update: Error getting local instance".into()))? .public_domain; - let (content, mentions, hashtags) = md_to_html(query.source.clone().unwrap_or_default().clone().as_ref(), domain); + let (content, mentions, hashtags) = md_to_html( + query.source.clone().unwrap_or_default().clone().as_ref(), + domain, + ); - let author = User::get(conn, user_id.expect("::create: no user_id error")) - .map_err(|_| ApiError::NotFound("Author not found".into()))?; + let author = User::get( + conn, + user_id.expect("::create: no user_id error"), + ) + .map_err(|_| ApiError::NotFound("Author not found".into()))?; let blog = match query.blog_id { Some(x) => x, - None => Blog::find_for_author(conn, &author).map_err(|_| ApiError::NotFound("No default blog".into()))?[0].id + None => { + Blog::find_for_author(conn, &author) + .map_err(|_| ApiError::NotFound("No default blog".into()))?[0] + .id + } }; if Post::find_by_slug(conn, &slug, blog).is_ok() { // Not an actual authorization problem, but we have nothing better for now… // TODO: add another error variant to canapi and add it there - return Err(ApiError::Authorization("A post with the same slug already exists".to_string())); + return Err(ApiError::Authorization( + "A post with the same slug already exists".to_string(), + )); } - let post = Post::insert(conn, NewPost { - blog_id: blog, - slug, - title, - content: SafeString::new(content.as_ref()), - published: query.published.unwrap_or(true), - license: query.license.unwrap_or_else(|| Instance::get_local(conn) - .map(|i| i.default_license) - .unwrap_or_else(|_| String::from("CC-BY-SA"))), - creation_date: date, - ap_url: String::new(), - subtitle: query.subtitle.unwrap_or_default(), - source: query.source.expect("Post API::create: no source error"), - cover_id: query.cover_id, - }, search).map_err(|_| ApiError::NotFound("Creation error".into()))?; + let post = Post::insert( + conn, + NewPost { + blog_id: blog, + slug, + title, + content: SafeString::new(content.as_ref()), + published: query.published.unwrap_or(true), + license: query.license.unwrap_or_else(|| { + Instance::get_local(conn) + .map(|i| i.default_license) + .unwrap_or_else(|_| String::from("CC-BY-SA")) + }), + creation_date: date, + ap_url: String::new(), + subtitle: query.subtitle.unwrap_or_default(), + source: query.source.expect("Post API::create: no source error"), + cover_id: query.cover_id, + }, + search, + ) + .map_err(|_| ApiError::NotFound("Creation error".into()))?; - PostAuthor::insert(conn, NewPostAuthor { - author_id: author.id, - post_id: post.id - }).map_err(|_| ApiError::NotFound("Error saving authors".into()))?; + PostAuthor::insert( + conn, + NewPostAuthor { + author_id: author.id, + post_id: post.id, + }, + ) + .map_err(|_| ApiError::NotFound("Error saving authors".into()))?; if let Some(tags) = query.tags { for tag in tags { - Tag::insert(conn, NewTag { - tag, - is_hashtag: false, - post_id: post.id - }).map_err(|_| ApiError::NotFound("Error saving tags".into()))?; + Tag::insert( + conn, + NewTag { + tag, + is_hashtag: false, + post_id: post.id, + }, + ) + .map_err(|_| ApiError::NotFound("Error saving tags".into()))?; } } for hashtag in hashtags { - Tag::insert(conn, NewTag { - tag: hashtag.to_camel_case(), - is_hashtag: true, - post_id: post.id - }).map_err(|_| ApiError::NotFound("Error saving hashtags".into()))?; + Tag::insert( + conn, + NewTag { + tag: hashtag.to_camel_case(), + is_hashtag: true, + post_id: post.id, + }, + ) + .map_err(|_| ApiError::NotFound("Error saving hashtags".into()))?; } if post.published { for m in mentions.into_iter() { Mention::from_activity( &*conn, - &Mention::build_activity(&*conn, &m).map_err(|_| ApiError::NotFound("Couldn't build mentions".into()))?, + &Mention::build_activity(&*conn, &m) + .map_err(|_| ApiError::NotFound("Couldn't build mentions".into()))?, post.id, true, - true - ).map_err(|_| ApiError::NotFound("Error saving mentions".into()))?; + true, + ) + .map_err(|_| ApiError::NotFound("Error saving mentions".into()))?; } - let act = post.create_activity(&*conn).map_err(|_| ApiError::NotFound("Couldn't create activity".into()))?; - let dest = User::one_by_instance(&*conn).map_err(|_| ApiError::NotFound("Couldn't list remote instances".into()))?; + let act = post + .create_activity(&*conn) + .map_err(|_| ApiError::NotFound("Couldn't create activity".into()))?; + let dest = User::one_by_instance(&*conn) + .map_err(|_| ApiError::NotFound("Couldn't list remote instances".into()))?; worker.execute(move || broadcast(&author, act, dest)); } @@ -243,12 +318,23 @@ impl<'a> Provider<(&'a Connection, &'a Worker, &'a Searcher, Option)> for P subtitle: Some(post.subtitle.clone()), content: Some(post.content.get().clone()), source: Some(post.source.clone()), - author: Some(post.get_authors(conn).map_err(|_| ApiError::NotFound("No authors".into()))?[0].username.clone()), + author: Some( + post.get_authors(conn) + .map_err(|_| ApiError::NotFound("No authors".into()))?[0] + .username + .clone(), + ), blog_id: Some(post.blog_id), published: Some(post.published), creation_date: Some(post.creation_date.format("%Y-%m-%d").to_string()), license: Some(post.license.clone()), - tags: Some(Tag::for_post(conn, post.id).map_err(|_| ApiError::NotFound("Tags not found".into()))?.into_iter().map(|t| t.tag).collect()), + tags: Some( + Tag::for_post(conn, post.id) + .map_err(|_| ApiError::NotFound("Tags not found".into()))? + .into_iter() + .map(|t| t.tag) + .collect(), + ), cover_id: post.cover_id, }) } @@ -279,15 +365,17 @@ impl Post { Ok(post) } pub fn update(&self, conn: &Connection, searcher: &Searcher) -> Result { - diesel::update(self) - .set(self) - .execute(conn)?; + diesel::update(self).set(self).execute(conn)?; let post = Self::get(conn, self.id)?; searcher.update_document(conn, &post)?; Ok(post) } - pub fn list_by_tag(conn: &Connection, tag: String, (min, max): (i32, i32)) -> Result> { + pub fn list_by_tag( + conn: &Connection, + tag: String, + (min, max): (i32, i32), + ) -> Result> { use schema::tags; let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id); @@ -349,7 +437,11 @@ impl Post { .map_err(Error::from) } - pub fn get_recents_for_author(conn: &Connection, author: &User, limit: i64) -> Result> { + pub fn get_recents_for_author( + conn: &Connection, + author: &User, + limit: i64, + ) -> Result> { use schema::post_authors; let posts = PostAuthor::belonging_to(author).select(post_authors::post_id); @@ -481,7 +573,8 @@ impl Post { Ok(PostAuthor::belonging_to(self) .filter(post_authors::author_id.eq(author_id)) .count() - .get_result::(conn)? > 0) + .get_result::(conn)? + > 0) } pub fn get_blog(&self, conn: &Connection) -> Result { @@ -529,7 +622,7 @@ impl Post { pub fn to_activity(&self, conn: &Connection) -> Result { let cc = self.get_receivers_urls(conn)?; - let to = vec![PUBLIC_VISIBILTY.to_string()]; + let to = vec![PUBLIC_VISIBILTY.to_string()]; let mut mentions_json = Mention::list_for_post(conn, self.id)? .into_iter() @@ -542,12 +635,8 @@ impl Post { mentions_json.append(&mut tags_json); let mut article = Article::default(); - article - .object_props - .set_name_string(self.title.clone())?; - article - .object_props - .set_id_string(self.ap_url.clone())?; + article.object_props.set_name_string(self.title.clone())?; + article.object_props.set_id_string(self.ap_url.clone())?; let mut authors = self .get_authors(conn)? @@ -561,12 +650,10 @@ impl Post { article .object_props .set_content_string(self.content.get().clone())?; - article - .ap_object_props - .set_source_object(Source { - content: self.source.clone(), - media_type: String::from("text/markdown"), - })?; + article.ap_object_props.set_source_object(Source { + content: self.source.clone(), + media_type: String::from("text/markdown"), + })?; article .object_props .set_published_utctime(Utc.from_utc_datetime(&self.creation_date))?; @@ -578,31 +665,20 @@ impl Post { if let Some(media_id) = self.cover_id { let media = Media::get(conn, media_id)?; let mut cover = Image::default(); - cover - .object_props - .set_url_string(media.url(conn)?)?; + cover.object_props.set_url_string(media.url(conn)?)?; if media.sensitive { cover .object_props .set_summary_string(media.content_warning.unwrap_or_default())?; } + cover.object_props.set_content_string(media.alt_text)?; cover .object_props - .set_content_string(media.alt_text)?; - cover - .object_props - .set_attributed_to_link_vec(vec![ - User::get(conn, media.owner_id)? - .into_id(), - ])?; - article - .object_props - .set_icon_object(cover)?; + .set_attributed_to_link_vec(vec![User::get(conn, media.owner_id)?.into_id()])?; + article.object_props.set_icon_object(cover)?; } - article - .object_props - .set_url_string(self.ap_url.clone())?; + article.object_props.set_url_string(self.ap_url.clone())?; article .object_props .set_to_link_vec::(to.into_iter().map(Id::new).collect())?; @@ -620,52 +696,39 @@ impl Post { act.object_props .set_id_string(format!("{}activity", self.ap_url))?; act.object_props - .set_to_link_vec::( - article.object - .object_props - .to_link_vec()?, - )?; + .set_to_link_vec::(article.object.object_props.to_link_vec()?)?; act.object_props - .set_cc_link_vec::( - article.object - .object_props - .cc_link_vec()?, - )?; + .set_cc_link_vec::(article.object.object_props.cc_link_vec()?)?; act.create_props .set_actor_link(Id::new(self.get_authors(conn)?[0].clone().ap_url))?; - act.create_props - .set_object_object(article)?; + act.create_props.set_object_object(article)?; Ok(act) } pub fn update_activity(&self, conn: &Connection) -> Result { let article = self.to_activity(conn)?; let mut act = Update::default(); + act.object_props.set_id_string(format!( + "{}/update-{}", + self.ap_url, + Utc::now().timestamp() + ))?; act.object_props - .set_id_string(format!("{}/update-{}", self.ap_url, Utc::now().timestamp()))?; + .set_to_link_vec::(article.object.object_props.to_link_vec()?)?; act.object_props - .set_to_link_vec::( - article.object - .object_props - .to_link_vec()?, - )?; - act.object_props - .set_cc_link_vec::( - article.object - .object_props - .cc_link_vec()?, - )?; + .set_cc_link_vec::(article.object.object_props.cc_link_vec()?)?; act.update_props .set_actor_link(Id::new(self.get_authors(conn)?[0].clone().ap_url))?; - act.update_props - .set_object_object(article)?; + act.update_props.set_object_object(article)?; Ok(act) } - pub fn handle_update(conn: &Connection, updated: &LicensedArticle, searcher: &Searcher) -> Result<()> { - let id = updated.object - .object_props - .id_string()?; + pub fn handle_update( + conn: &Connection, + updated: &LicensedArticle, + searcher: &Searcher, + ) -> Result<()> { + let id = updated.object.object_props.id_string()?; let mut post = Post::find_by_ap_url(conn, &id)?; if let Ok(title) = updated.object.object_props.name_string() { @@ -698,7 +761,9 @@ impl Post { .into_iter() .map(|s| s.to_camel_case()) .collect::>(); - if let Some(serde_json::Value::Array(mention_tags)) = updated.object.object_props.tag.clone() { + if let Some(serde_json::Value::Array(mention_tags)) = + updated.object.object_props.tag.clone() + { let mut mentions = vec![]; let mut tags = vec![]; let mut hashtags = vec![]; @@ -710,8 +775,7 @@ impl Post { serde_json::from_value::(tag.clone()) .map_err(Error::from) .and_then(|t| { - let tag_name = t - .name_string()?; + let tag_name = t.name_string()?; if txt_hashtags.remove(&tag_name) { hashtags.push(t); } else { @@ -854,20 +918,25 @@ impl Post { } pub fn cover_url(&self, conn: &Connection) -> Option { - self.cover_id.and_then(|i| Media::get(conn, i).ok()).and_then(|c| c.url(conn).ok()) + self.cover_id + .and_then(|i| Media::get(conn, i).ok()) + .and_then(|c| c.url(conn).ok()) } } impl<'a> FromActivity for Post { type Error = Error; - fn from_activity((conn, searcher): &(&'a Connection, &'a Searcher), article: LicensedArticle, _actor: Id) -> Result { + fn from_activity( + (conn, searcher): &(&'a Connection, &'a Searcher), + article: LicensedArticle, + _actor: Id, + ) -> Result { let license = article.custom_props.license_string().unwrap_or_default(); let article = article.object; - if let Ok(post) = Post::find_by_ap_url( - conn, - &article.object_props.id_string().unwrap_or_default(), - ) { + if let Ok(post) = + Post::find_by_ap_url(conn, &article.object_props.id_string().unwrap_or_default()) + { Ok(post) } else { let (blog, authors) = article @@ -880,10 +949,8 @@ impl<'a> FromActivity for Post Ok(u) => { authors.push(u); (blog, authors) - }, - Err(_) => { - (blog.or_else(|| Blog::from_url(conn, &url).ok()), authors) - }, + } + Err(_) => (blog.or_else(|| Blog::from_url(conn, &url).ok()), authors), } }); @@ -893,41 +960,24 @@ impl<'a> FromActivity for Post .ok() .and_then(|img| Media::from_activity(conn, &img).ok().map(|m| m.id)); - let title = article - .object_props - .name_string()?; + let title = article.object_props.name_string()?; let post = Post::insert( conn, NewPost { blog_id: blog?.id, slug: title.to_kebab_case(), title, - content: SafeString::new( - &article - .object_props - .content_string()?, - ), + content: SafeString::new(&article.object_props.content_string()?), published: true, license, // FIXME: This is wrong: with this logic, we may use the display URL as the AP ID. We need two different fields - ap_url: article.object_props.url_string().or_else(|_| - article - .object_props - .id_string() - )?, - creation_date: Some( - article - .object_props - .published_utctime()? - .naive_utc(), - ), - subtitle: article + ap_url: article .object_props - .summary_string()?, - source: article - .ap_object_props - .source_object::()? - .content, + .url_string() + .or_else(|_| article.object_props.id_string())?, + creation_date: Some(article.object_props.published_utctime()?.naive_utc()), + subtitle: article.object_props.summary_string()?, + source: article.ap_object_props.source_object::()?.content, cover_id: cover, }, searcher, @@ -959,7 +1009,12 @@ impl<'a> FromActivity for Post .map_err(Error::from) .and_then(|t| { let tag_name = t.name_string()?; - Ok(Tag::from_activity(conn, &t, post.id, hashtags.remove(&tag_name))) + Ok(Tag::from_activity( + conn, + &t, + post.id, + hashtags.remove(&tag_name), + )) }) .ok(); } @@ -978,11 +1033,8 @@ impl<'a> Deletable<(&'a Connection, &'a Searcher), Delete> for Post { .set_actor_link(self.get_authors(conn)?[0].clone().into_id())?; let mut tombstone = Tombstone::default(); - tombstone - .object_props - .set_id_string(self.ap_url.clone())?; - act.delete_props - .set_object_object(tombstone)?; + tombstone.object_props.set_id_string(self.ap_url.clone())?; + act.delete_props.set_object_object(tombstone)?; act.object_props .set_id_string(format!("{}#delete", self.ap_url))?; @@ -992,16 +1044,22 @@ impl<'a> Deletable<(&'a Connection, &'a Searcher), Delete> for Post { for m in Mention::list_for_post(&conn, self.id)? { m.delete(conn)?; } - diesel::delete(self) - .execute(*conn)?; + diesel::delete(self).execute(*conn)?; searcher.delete_document(self); Ok(act) } - fn delete_id(id: &str, actor_id: &str, (conn, searcher): &(&Connection, &Searcher)) -> Result { + fn delete_id( + id: &str, + actor_id: &str, + (conn, searcher): &(&Connection, &Searcher), + ) -> Result { let actor = User::find_by_ap_url(conn, actor_id)?; let post = Post::find_by_ap_url(conn, id)?; - let can_delete = post.get_authors(conn)?.into_iter().any(|a| actor.id == a.id); + let can_delete = post + .get_authors(conn)? + .into_iter() + .any(|a| actor.id == a.id); if can_delete { post.delete(&(conn, searcher)) } else { diff --git a/plume-models/src/reshares.rs b/plume-models/src/reshares.rs index 829b37ec..cdf72d1e 100644 --- a/plume-models/src/reshares.rs +++ b/plume-models/src/reshares.rs @@ -40,7 +40,11 @@ impl Reshare { post_id as i32 ); - pub fn get_recents_for_author(conn: &Connection, user: &User, limit: i64) -> Result> { + pub fn get_recents_for_author( + conn: &Connection, + user: &User, + limit: i64, + ) -> Result> { reshares::table .filter(reshares::user_id.eq(user.id)) .order(reshares::creation_date.desc()) @@ -63,12 +67,10 @@ impl Reshare { .set_actor_link(User::get(conn, self.user_id)?.into_id())?; act.announce_props .set_object_link(Post::get(conn, self.post_id)?.into_id())?; - act.object_props - .set_id_string(self.ap_url.clone())?; + act.object_props.set_id_string(self.ap_url.clone())?; act.object_props .set_to_link(Id::new(PUBLIC_VISIBILTY.to_string()))?; - act.object_props - .set_cc_link_vec::(vec![])?; + act.object_props.set_cc_link_vec::(vec![])?; Ok(act) } @@ -78,29 +80,15 @@ impl FromActivity for Reshare { type Error = Error; fn from_activity(conn: &Connection, announce: Announce, _actor: Id) -> Result { - let user = User::from_url( - conn, - announce - .announce_props - .actor_link::()? - .as_ref(), - )?; - let post = Post::find_by_ap_url( - conn, - announce - .announce_props - .object_link::()? - .as_ref(), - )?; + let user = User::from_url(conn, announce.announce_props.actor_link::()?.as_ref())?; + let post = + Post::find_by_ap_url(conn, announce.announce_props.object_link::()?.as_ref())?; let reshare = Reshare::insert( conn, NewReshare { post_id: post.id, user_id: user.id, - ap_url: announce - .object_props - .id_string() - .unwrap_or_default(), + ap_url: announce.object_props.id_string().unwrap_or_default(), }, )?; reshare.notify(conn)?; @@ -131,26 +119,22 @@ impl Deletable for Reshare { type Error = Error; fn delete(&self, conn: &Connection) -> Result { - diesel::delete(self) - .execute(conn)?; + diesel::delete(self).execute(conn)?; // delete associated notification if any if let Ok(notif) = Notification::find(conn, notification_kind::RESHARE, self.id) { - diesel::delete(¬if) - .execute(conn)?; + diesel::delete(¬if).execute(conn)?; } let mut act = Undo::default(); act.undo_props .set_actor_link(User::get(conn, self.user_id)?.into_id())?; - act.undo_props - .set_object_object(self.to_activity(conn)?)?; + act.undo_props.set_object_object(self.to_activity(conn)?)?; act.object_props .set_id_string(format!("{}#delete", self.ap_url))?; act.object_props .set_to_link(Id::new(PUBLIC_VISIBILTY.to_string()))?; - act.object_props - .set_cc_link_vec::(vec![])?; + act.object_props.set_cc_link_vec::(vec![])?; Ok(act) } @@ -172,7 +156,7 @@ impl NewReshare { NewReshare { post_id: p.id, user_id: u.id, - ap_url + ap_url, } } } diff --git a/plume-models/src/safe_string.rs b/plume-models/src/safe_string.rs index 2f6fcdea..6527b86b 100644 --- a/plume-models/src/safe_string.rs +++ b/plume-models/src/safe_string.rs @@ -19,21 +19,15 @@ lazy_static! { static ref CLEAN: Builder<'static> = { let mut b = Builder::new(); b.add_generic_attributes(iter::once("id")) - .add_tags(&[ "iframe", "video", "audio" ]) + .add_tags(&["iframe", "video", "audio"]) .id_prefix(Some("postcontent-")) .url_relative(UrlRelative::Custom(Box::new(url_add_prefix))) .add_tag_attributes( "iframe", - [ "width", "height", "src", "frameborder" ].iter().cloned(), + ["width", "height", "src", "frameborder"].iter().cloned(), ) - .add_tag_attributes( - "video", - [ "src", "title", "controls" ].iter(), - ) - .add_tag_attributes( - "audio", - [ "src", "title", "controls" ].iter(), - ); + .add_tag_attributes("video", ["src", "title", "controls"].iter()) + .add_tag_attributes("audio", ["src", "title", "controls"].iter()); b }; } @@ -69,7 +63,7 @@ impl SafeString { /// Prefer `SafeString::new` as much as possible. pub fn trusted(value: impl AsRef) -> Self { SafeString { - value: value.as_ref().to_string() + value: value.as_ref().to_string(), } } diff --git a/plume-models/src/search/mod.rs b/plume-models/src/search/mod.rs index 83515e76..48e0dc1b 100644 --- a/plume-models/src/search/mod.rs +++ b/plume-models/src/search/mod.rs @@ -1,33 +1,32 @@ -mod searcher; mod query; +mod searcher; mod tokenizer; -pub use self::searcher::*; pub use self::query::PlumeQuery as Query; - +pub use self::searcher::*; #[cfg(test)] pub(crate) mod tests { use super::{Query, Searcher}; + use diesel::Connection; use std::env::temp_dir; use std::str::FromStr; - use diesel::Connection; + use blogs::tests::fill_database; use plume_common::activity_pub::inbox::Deletable; use plume_common::utils::random_hex; - use blogs::tests::fill_database; - use posts::{NewPost, Post}; use post_authors::*; + use posts::{NewPost, Post}; use safe_string::SafeString; use tests::db; - pub(crate) fn get_searcher() -> Searcher { let dir = temp_dir().join("plume-test"); if dir.exists() { Searcher::open(&dir) } else { Searcher::create(&dir) - }.unwrap() + } + .unwrap() } #[test] @@ -98,7 +97,9 @@ pub(crate) mod tests { #[test] fn open() { - {get_searcher()};//make sure $tmp/plume-test-tantivy exist + { + get_searcher() + }; //make sure $tmp/plume-test-tantivy exist let dir = temp_dir().join("plume-test"); Searcher::open(&dir).unwrap(); @@ -109,8 +110,10 @@ pub(crate) mod tests { let dir = temp_dir().join(format!("plume-test-{}", random_hex())); assert!(Searcher::open(&dir).is_err()); - {Searcher::create(&dir).unwrap();} - Searcher::open(&dir).unwrap();//verify it's well created + { + Searcher::create(&dir).unwrap(); + } + Searcher::open(&dir).unwrap(); //verify it's well created } #[test] @@ -123,37 +126,56 @@ pub(crate) mod tests { let title = random_hex()[..8].to_owned(); - let mut post = Post::insert(conn, NewPost { - blog_id: blog.id, - slug: title.clone(), - title: title.clone(), - content: SafeString::new(""), - published: true, - license: "CC-BY-SA".to_owned(), - ap_url: "".to_owned(), - creation_date: None, - subtitle: "".to_owned(), - source: "".to_owned(), - cover_id: None, - }, &searcher).unwrap(); - PostAuthor::insert(conn, NewPostAuthor { - post_id: post.id, - author_id: author.id, - }).unwrap(); + let mut post = Post::insert( + conn, + NewPost { + blog_id: blog.id, + slug: title.clone(), + title: title.clone(), + content: SafeString::new(""), + published: true, + license: "CC-BY-SA".to_owned(), + ap_url: "".to_owned(), + creation_date: None, + subtitle: "".to_owned(), + source: "".to_owned(), + cover_id: None, + }, + &searcher, + ) + .unwrap(); + PostAuthor::insert( + conn, + NewPostAuthor { + post_id: post.id, + author_id: author.id, + }, + ) + .unwrap(); searcher.commit(); - assert_eq!(searcher.search_document(conn, Query::from_str(&title).unwrap(), (0,1))[0].id, post.id); + assert_eq!( + searcher.search_document(conn, Query::from_str(&title).unwrap(), (0, 1))[0].id, + post.id + ); let newtitle = random_hex()[..8].to_owned(); post.title = newtitle.clone(); post.update(conn, &searcher).unwrap(); searcher.commit(); - assert_eq!(searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0,1))[0].id, post.id); - assert!(searcher.search_document(conn, Query::from_str(&title).unwrap(), (0,1)).is_empty()); + assert_eq!( + searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1))[0].id, + post.id + ); + assert!(searcher + .search_document(conn, Query::from_str(&title).unwrap(), (0, 1)) + .is_empty()); post.delete(&(conn, &searcher)).unwrap(); searcher.commit(); - assert!(searcher.search_document(conn, Query::from_str(&newtitle).unwrap(), (0,1)).is_empty()); + assert!(searcher + .search_document(conn, Query::from_str(&newtitle).unwrap(), (0, 1)) + .is_empty()); Ok(()) }); diff --git a/plume-models/src/search/query.rs b/plume-models/src/search/query.rs index 2984ae6c..8c815c27 100644 --- a/plume-models/src/search/query.rs +++ b/plume-models/src/search/query.rs @@ -1,8 +1,7 @@ -use chrono::{Datelike, naive::NaiveDate, offset::Utc}; -use tantivy::{query::*, schema::*, Term}; -use std::{cmp,ops::Bound}; +use chrono::{naive::NaiveDate, offset::Utc, Datelike}; use search::searcher::Searcher; - +use std::{cmp, ops::Bound}; +use tantivy::{query::*, schema::*, Term}; //Generate functions for advanced search macro_rules! gen_func { @@ -142,13 +141,11 @@ pub struct PlumeQuery { } impl PlumeQuery { - /// Create a new empty Query pub fn new() -> Self { Default::default() } - /// Parse a query string into this Query pub fn parse_query(&mut self, query: &str) -> &mut Self { self.from_str_req(&query.trim()) @@ -160,9 +157,11 @@ impl PlumeQuery { gen_to_query!(self, result; normal: title, subtitle, content, tag; oneoff: instance, author, blog, lang, license); - for (occur, token) in self.text { // text entries need to be added as multiple Terms + for (occur, token) in self.text { + // text entries need to be added as multiple Terms match occur { - Occur::Must => { // a Must mean this must be in one of title subtitle or content, not in all 3 + Occur::Must => { + // a Must mean this must be in one of title subtitle or content, not in all 3 let subresult = vec![ (Occur::Should, Self::token_to_query(&token, "title")), (Occur::Should, Self::token_to_query(&token, "subtitle")), @@ -170,20 +169,26 @@ impl PlumeQuery { ]; result.push((Occur::Must, Box::new(BooleanQuery::from(subresult)))); - }, + } occur => { result.push((occur, Self::token_to_query(&token, "title"))); result.push((occur, Self::token_to_query(&token, "subtitle"))); result.push((occur, Self::token_to_query(&token, "content"))); - }, + } } } - if self.before.is_some() || self.after.is_some() { // if at least one range bound is provided - let after = self.after.unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); - let before = self.before.unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); + if self.before.is_some() || self.after.is_some() { + // if at least one range bound is provided + let after = self + .after + .unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); + let before = self + .before + .unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); let field = Searcher::schema().get_field("creation_date").unwrap(); - let range = RangeQuery::new_i64_bounds(field, Bound::Included(after), Bound::Included(before)); + let range = + RangeQuery::new_i64_bounds(field, Bound::Included(after), Bound::Included(before)); result.push((Occur::Must, Box::new(range))); } @@ -195,14 +200,18 @@ impl PlumeQuery { // documents newer than the provided date will be ignored pub fn before(&mut self, date: &D) -> &mut Self { - let before = self.before.unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); + let before = self + .before + .unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); self.before = Some(cmp::min(before, i64::from(date.num_days_from_ce()))); self } // documents older than the provided date will be ignored pub fn after(&mut self, date: &D) -> &mut Self { - let after = self.after.unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); + let after = self + .after + .unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); self.after = Some(cmp::max(after, i64::from(date.num_days_from_ce()))); self } @@ -212,18 +221,22 @@ impl PlumeQuery { query = query.trim(); if query.is_empty() { ("", "") - } else if query.get(0..1).map(|v| v=="\"").unwrap_or(false) { - if let Some(index) = query[1..].find('"') { - query.split_at(index+2) - } else { - (query, "") - } - } else if query.get(0..2).map(|v| v=="+\"" || v=="-\"").unwrap_or(false) { - if let Some(index) = query[2..].find('"') { - query.split_at(index+3) - } else { - (query, "") - } + } else if query.get(0..1).map(|v| v == "\"").unwrap_or(false) { + if let Some(index) = query[1..].find('"') { + query.split_at(index + 2) + } else { + (query, "") + } + } else if query + .get(0..2) + .map(|v| v == "+\"" || v == "-\"") + .unwrap_or(false) + { + if let Some(index) = query[2..].find('"') { + query.split_at(index + 3) + } else { + (query, "") + } } else if let Some(index) = query.find(' ') { query.split_at(index) } else { @@ -247,13 +260,13 @@ impl PlumeQuery { fn from_str_req(&mut self, mut query: &str) -> &mut Self { query = query.trim_left(); if query.is_empty() { - return self + return self; } - let occur = if query.get(0..1).map(|v| v=="+").unwrap_or(false) { + let occur = if query.get(0..1).map(|v| v == "+").unwrap_or(false) { query = &query[1..]; Occur::Must - } else if query.get(0..1).map(|v| v=="-").unwrap_or(false) { + } else if query.get(0..1).map(|v| v == "-").unwrap_or(false) { query = &query[1..]; Occur::MustNot } else { @@ -270,31 +283,59 @@ impl PlumeQuery { let token = token.to_lowercase(); let token = token.as_str(); let field = Searcher::schema().get_field(field_name).unwrap(); - if token.contains('@') && (field_name=="author" || field_name=="blog") { + if token.contains('@') && (field_name == "author" || field_name == "blog") { let pos = token.find('@').unwrap(); let user_term = Term::from_field_text(field, &token[..pos]); - let instance_term = Term::from_field_text(Searcher::schema().get_field("instance").unwrap(), &token[pos+1..]); + let instance_term = Term::from_field_text( + Searcher::schema().get_field("instance").unwrap(), + &token[pos + 1..], + ); Box::new(BooleanQuery::from(vec![ - (Occur::Must, Box::new(TermQuery::new(user_term, if field_name=="author" { IndexRecordOption::Basic } - else { IndexRecordOption::WithFreqsAndPositions } - )) as Box), - (Occur::Must, Box::new(TermQuery::new(instance_term, IndexRecordOption::Basic))), + ( + Occur::Must, + Box::new(TermQuery::new( + user_term, + if field_name == "author" { + IndexRecordOption::Basic + } else { + IndexRecordOption::WithFreqsAndPositions + }, + )) as Box, + ), + ( + Occur::Must, + Box::new(TermQuery::new(instance_term, IndexRecordOption::Basic)), + ), ])) - } else if token.contains(' ') { // phrase query + } else if token.contains(' ') { + // phrase query match field_name { - "instance" | "author" | "tag" => // phrase query are not available on these fields, treat it as multiple Term queries - Box::new(BooleanQuery::from(token.split_whitespace() - .map(|token| { - let term = Term::from_field_text(field, token); - (Occur::Should, Box::new(TermQuery::new(term, IndexRecordOption::Basic)) - as Box) - }) - .collect::>())), - _ => Box::new(PhraseQuery::new(token.split_whitespace() - .map(|token| Term::from_field_text(field, token)) - .collect())) + "instance" | "author" | "tag" => + // phrase query are not available on these fields, treat it as multiple Term queries + { + Box::new(BooleanQuery::from( + token + .split_whitespace() + .map(|token| { + let term = Term::from_field_text(field, token); + ( + Occur::Should, + Box::new(TermQuery::new(term, IndexRecordOption::Basic)) + as Box, + ) + }) + .collect::>(), + )) + } + _ => Box::new(PhraseQuery::new( + token + .split_whitespace() + .map(|token| Term::from_field_text(field, token)) + .collect(), + )), } - } else { // Term Query + } else { + // Term Query let term = Term::from_field_text(field, token); let index_option = match field_name { "instance" | "author" | "tag" => IndexRecordOption::Basic, @@ -306,7 +347,6 @@ impl PlumeQuery { } impl std::str::FromStr for PlumeQuery { - type Err = !; /// Create a new Query from &str @@ -340,7 +380,7 @@ impl ToString for PlumeQuery { instance, author, blog, lang, license; date: before, after); - result.pop();// remove trailing ' ' + result.pop(); // remove trailing ' ' result } } diff --git a/plume-models/src/search/searcher.rs b/plume-models/src/search/searcher.rs index 295626e4..2fa6fc22 100644 --- a/plume-models/src/search/searcher.rs +++ b/plume-models/src/search/searcher.rs @@ -5,15 +5,14 @@ use Connection; use chrono::Datelike; use itertools::Itertools; +use std::{cmp, fs::create_dir_all, path::Path, sync::Mutex}; use tantivy::{ - collector::TopDocs, directory::MmapDirectory, - schema::*, tokenizer::*, Index, IndexWriter, Term + collector::TopDocs, directory::MmapDirectory, schema::*, tokenizer::*, Index, IndexWriter, Term, }; use whatlang::{detect as detect_lang, Lang}; -use std::{cmp, fs::create_dir_all, path::Path, sync::Mutex}; -use search::query::PlumeQuery; use super::tokenizer; +use search::query::PlumeQuery; use Result; #[derive(Debug)] @@ -31,20 +30,23 @@ pub struct Searcher { impl Searcher { pub fn schema() -> Schema { - let tag_indexing = TextOptions::default() - .set_indexing_options(TextFieldIndexing::default() - .set_tokenizer("whitespace_tokenizer") - .set_index_option(IndexRecordOption::Basic)); + let tag_indexing = TextOptions::default().set_indexing_options( + TextFieldIndexing::default() + .set_tokenizer("whitespace_tokenizer") + .set_index_option(IndexRecordOption::Basic), + ); - let content_indexing = TextOptions::default() - .set_indexing_options(TextFieldIndexing::default() - .set_tokenizer("content_tokenizer") - .set_index_option(IndexRecordOption::WithFreqsAndPositions)); + let content_indexing = TextOptions::default().set_indexing_options( + TextFieldIndexing::default() + .set_tokenizer("content_tokenizer") + .set_index_option(IndexRecordOption::WithFreqsAndPositions), + ); - let property_indexing = TextOptions::default() - .set_indexing_options(TextFieldIndexing::default() - .set_tokenizer("property_tokenizer") - .set_index_option(IndexRecordOption::WithFreqsAndPositions)); + let property_indexing = TextOptions::default().set_indexing_options( + TextFieldIndexing::default() + .set_tokenizer("property_tokenizer") + .set_index_option(IndexRecordOption::WithFreqsAndPositions), + ); let mut schema_builder = SchemaBuilder::default(); @@ -66,56 +68,65 @@ impl Searcher { schema_builder.build() } - pub fn create(path: &AsRef) -> Result { - let whitespace_tokenizer = tokenizer::WhitespaceTokenizer - .filter(LowerCaser); + let whitespace_tokenizer = tokenizer::WhitespaceTokenizer.filter(LowerCaser); let content_tokenizer = SimpleTokenizer .filter(RemoveLongFilter::limit(40)) .filter(LowerCaser); - let property_tokenizer = NgramTokenizer::new(2, 8, false) - .filter(LowerCaser); + let property_tokenizer = NgramTokenizer::new(2, 8, false).filter(LowerCaser); let schema = Self::schema(); create_dir_all(path).map_err(|_| SearcherError::IndexCreationError)?; - let index = Index::create(MmapDirectory::open(path).map_err(|_| SearcherError::IndexCreationError)?, schema).map_err(|_| SearcherError::IndexCreationError)?; + let index = Index::create( + MmapDirectory::open(path).map_err(|_| SearcherError::IndexCreationError)?, + schema, + ) + .map_err(|_| SearcherError::IndexCreationError)?; { let tokenizer_manager = index.tokenizers(); tokenizer_manager.register("whitespace_tokenizer", whitespace_tokenizer); tokenizer_manager.register("content_tokenizer", content_tokenizer); tokenizer_manager.register("property_tokenizer", property_tokenizer); - }//to please the borrow checker + } //to please the borrow checker Ok(Self { - writer: Mutex::new(Some(index.writer(50_000_000).map_err(|_| SearcherError::WriteLockAcquisitionError)?)), - index + writer: Mutex::new(Some( + index + .writer(50_000_000) + .map_err(|_| SearcherError::WriteLockAcquisitionError)?, + )), + index, }) } pub fn open(path: &AsRef) -> Result { - let whitespace_tokenizer = tokenizer::WhitespaceTokenizer - .filter(LowerCaser); + let whitespace_tokenizer = tokenizer::WhitespaceTokenizer.filter(LowerCaser); let content_tokenizer = SimpleTokenizer .filter(RemoveLongFilter::limit(40)) .filter(LowerCaser); - let property_tokenizer = NgramTokenizer::new(2, 8, false) - .filter(LowerCaser); + let property_tokenizer = NgramTokenizer::new(2, 8, false).filter(LowerCaser); - let index = Index::open(MmapDirectory::open(path).map_err(|_| SearcherError::IndexOpeningError)?).map_err(|_| SearcherError::IndexOpeningError)?; + let index = + Index::open(MmapDirectory::open(path).map_err(|_| SearcherError::IndexOpeningError)?) + .map_err(|_| SearcherError::IndexOpeningError)?; { let tokenizer_manager = index.tokenizers(); tokenizer_manager.register("whitespace_tokenizer", whitespace_tokenizer); tokenizer_manager.register("content_tokenizer", content_tokenizer); tokenizer_manager.register("property_tokenizer", property_tokenizer); - }//to please the borrow checker - let mut writer = index.writer(50_000_000).map_err(|_| SearcherError::WriteLockAcquisitionError)?; - writer.garbage_collect_files().map_err(|_| SearcherError::IndexEditionError)?; + } //to please the borrow checker + let mut writer = index + .writer(50_000_000) + .map_err(|_| SearcherError::WriteLockAcquisitionError)?; + writer + .garbage_collect_files() + .map_err(|_| SearcherError::IndexEditionError)?; Ok(Self { writer: Mutex::new(Some(writer)), index, @@ -173,18 +184,24 @@ impl Searcher { self.add_document(conn, post) } - pub fn search_document(&self, conn: &Connection, query: PlumeQuery, (min, max): (i32, i32)) -> Vec{ + pub fn search_document( + &self, + conn: &Connection, + query: PlumeQuery, + (min, max): (i32, i32), + ) -> Vec { let schema = self.index.schema(); let post_id = schema.get_field("post_id").unwrap(); - let collector = TopDocs::with_limit(cmp::max(1,max) as usize); + let collector = TopDocs::with_limit(cmp::max(1, max) as usize); let searcher = self.index.searcher(); let res = searcher.search(&query.into_query(), &collector).unwrap(); - res.get(min as usize..).unwrap_or(&[]) + res.get(min as usize..) + .unwrap_or(&[]) .iter() - .filter_map(|(_,doc_add)| { + .filter_map(|(_, doc_add)| { let doc = searcher.doc(*doc_add).ok()?; let id = doc.get_first(post_id)?; Post::get(conn, id.i64_value() as i32).ok() diff --git a/plume-models/src/tags.rs b/plume-models/src/tags.rs index 1b4f412b..5711853c 100644 --- a/plume-models/src/tags.rs +++ b/plume-models/src/tags.rs @@ -38,7 +38,12 @@ impl Tag { Ok(ht) } - pub fn from_activity(conn: &Connection, tag: &Hashtag, post: i32, is_hashtag: bool) -> Result { + pub fn from_activity( + conn: &Connection, + tag: &Hashtag, + post: i32, + is_hashtag: bool, + ) -> Result { Tag::insert( conn, NewTag { diff --git a/plume-models/src/users.rs b/plume-models/src/users.rs index f2510499..b0b5b677 100644 --- a/plume-models/src/users.rs +++ b/plume-models/src/users.rs @@ -1,6 +1,5 @@ use activitypub::{ - actor::Person, collection::OrderedCollection, object::Image, Activity, CustomObject, - Endpoint, + actor::Person, collection::OrderedCollection, object::Image, Activity, CustomObject, Endpoint, }; use bcrypt; use chrono::{NaiveDateTime, Utc}; @@ -27,7 +26,10 @@ use rocket::{ request::{self, FromRequest, Request}, }; use serde_json; -use std::{cmp::PartialEq, hash::{Hash, Hasher}}; +use std::{ + cmp::PartialEq, + hash::{Hash, Hasher}, +}; use url::Url; use webfinger::*; @@ -41,7 +43,7 @@ use posts::Post; use safe_string::SafeString; use schema::users; use search::Searcher; -use {ap_url, Connection, BASE_URL, USE_HTTPS, Error, Result}; +use {ap_url, Connection, Error, Result, BASE_URL, USE_HTTPS}; pub type CustomPerson = CustomObject; @@ -97,42 +99,24 @@ impl User { insert!(users, NewUser, |inserted, conn| { let instance = inserted.get_instance(conn)?; if inserted.outbox_url.is_empty() { - inserted.outbox_url = instance.compute_box( - USER_PREFIX, - &inserted.username, - "outbox", - ); + inserted.outbox_url = instance.compute_box(USER_PREFIX, &inserted.username, "outbox"); } if inserted.inbox_url.is_empty() { - inserted.inbox_url = instance.compute_box( - USER_PREFIX, - &inserted.username, - "inbox", - ); + inserted.inbox_url = instance.compute_box(USER_PREFIX, &inserted.username, "inbox"); } if inserted.ap_url.is_empty() { - inserted.ap_url = instance.compute_box( - USER_PREFIX, - &inserted.username, - "", - ); + inserted.ap_url = instance.compute_box(USER_PREFIX, &inserted.username, ""); } if inserted.shared_inbox_url.is_none() { - inserted.shared_inbox_url = Some(ap_url(&format!( - "{}/inbox", - instance.public_domain - ))); + inserted.shared_inbox_url = Some(ap_url(&format!("{}/inbox", instance.public_domain))); } if inserted.followers_endpoint.is_empty() { - inserted.followers_endpoint = instance.compute_box( - USER_PREFIX, - &inserted.username, - "followers", - ); + inserted.followers_endpoint = + instance.compute_box(USER_PREFIX, &inserted.username, "followers"); } if inserted.fqn.is_empty() { @@ -162,7 +146,8 @@ impl User { for blog in Blog::find_for_author(conn, self)? .iter() - .filter(|b| b.count_authors(conn).map(|c| c <= 1).unwrap_or(false)) { + .filter(|b| b.count_authors(conn).map(|c| c <= 1).unwrap_or(false)) + { blog.delete(conn, searcher)?; } // delete the posts if they is the only author @@ -180,10 +165,10 @@ impl User { .count() .load(conn)? .first() - .unwrap_or(&0) > &0; + .unwrap_or(&0) + > &0; if !has_other_authors { - Post::get(conn, post_id)? - .delete(&(conn, searcher))?; + Post::get(conn, post_id)?.delete(&(conn, searcher))?; } } @@ -213,12 +198,18 @@ impl User { .map_err(Error::from) } - pub fn update(&self, conn: &Connection, name: String, email: String, summary: String) -> Result { + pub fn update( + &self, + conn: &Connection, + name: String, + email: String, + summary: String, + ) -> Result { diesel::update(self) .set(( users::display_name.eq(name), users::email.eq(email), - users::summary_html.eq(utils::md_to_html(&summary,"").0), + users::summary_html.eq(utils::md_to_html(&summary, "").0), users::summary.eq(summary), )) .execute(conn)?; @@ -278,16 +269,13 @@ impl User { } pub fn fetch_from_url(conn: &Connection, url: &str) -> Result { - User::fetch(url).and_then(|json| User::from_activity( - conn, - &json, - Url::parse(url)?.host_str()?, - )) + User::fetch(url) + .and_then(|json| User::from_activity(conn, &json, Url::parse(url)?.host_str()?)) } fn from_activity(conn: &Connection, acct: &CustomPerson, inst: &str) -> Result { - let instance = Instance::find_by_domain(conn, inst) - .or_else(|_| Instance::insert( + let instance = Instance::find_by_domain(conn, inst).or_else(|_| { + Instance::insert( conn, NewInstance { name: inst.to_owned(), @@ -301,9 +289,15 @@ impl User { short_description_html: String::new(), long_description_html: String::new(), }, - ))?; + ) + })?; - if acct.object.ap_actor_props.preferred_username_string()?.contains(&['<', '>', '&', '@', '\'', '"'][..]) { + if acct + .object + .ap_actor_props + .preferred_username_string()? + .contains(&['<', '>', '&', '@', '\'', '"'][..]) + { return Err(Error::InvalidValue); } let user = User::insert( @@ -314,20 +308,11 @@ impl User { .ap_actor_props .preferred_username_string() .unwrap(), - display_name: acct - .object - .object_props - .name_string()?, - outbox_url: acct - .object - .ap_actor_props - .outbox_string()?, - inbox_url: acct - .object - .ap_actor_props - .inbox_string()?, + display_name: acct.object.object_props.name_string()?, + outbox_url: acct.object.ap_actor_props.outbox_string()?, + inbox_url: acct.object.ap_actor_props.inbox_string()?, is_admin: false, - summary:acct + summary: acct .object .object_props .summary_string() @@ -342,10 +327,7 @@ impl User { email: None, hashed_password: None, instance_id: instance.id, - ap_url: acct - .object - .object_props - .id_string()?, + ap_url: acct.object.object_props.id_string()?, public_key: acct .custom_props .public_key_publickey()? @@ -357,10 +339,7 @@ impl User { .endpoints_endpoint() .and_then(|e| e.shared_inbox_string()) .ok(), - followers_endpoint: acct - .object - .ap_actor_props - .followers_string()?, + followers_endpoint: acct.object.ap_actor_props.followers_string()?, avatar_id: None, }, )?; @@ -392,26 +371,15 @@ impl User { .object_props .url_string()?, &self, - ).ok(); + ) + .ok(); diesel::update(self) .set(( - users::username.eq(json - .object - .ap_actor_props - .preferred_username_string()?), - users::display_name.eq(json - .object - .object_props - .name_string()?), - users::outbox_url.eq(json - .object - .ap_actor_props - .outbox_string()?), - users::inbox_url.eq(json - .object - .ap_actor_props - .inbox_string()?), + users::username.eq(json.object.ap_actor_props.preferred_username_string()?), + users::display_name.eq(json.object.object_props.name_string()?), + users::outbox_url.eq(json.object.ap_actor_props.outbox_string()?), + users::inbox_url.eq(json.object.ap_actor_props.inbox_string()?), users::summary.eq(SafeString::new( &json .object @@ -419,10 +387,7 @@ impl User { .summary_string() .unwrap_or_default(), )), - users::followers_endpoint.eq(json - .object - .ap_actor_props - .followers_string()?), + users::followers_endpoint.eq(json.object.ap_actor_props.followers_string()?), users::avatar_id.eq(avatar.map(|a| a.id)), users::last_fetched_date.eq(Utc::now().naive_utc()), users::public_key.eq(json @@ -441,7 +406,8 @@ impl User { } pub fn auth(&self, pass: &str) -> bool { - self.hashed_password.clone() + self.hashed_password + .clone() .map(|hashed| bcrypt::verify(pass, hashed.as_ref()).unwrap_or(false)) .unwrap_or(false) } @@ -468,8 +434,7 @@ impl User { let n_acts = acts.len(); let mut coll = OrderedCollection::default(); coll.collection_props.items = serde_json::to_value(acts)?; - coll.collection_props - .set_total_items_u64(n_acts as u64)?; + coll.collection_props.set_total_items_u64(n_acts as u64)?; Ok(ActivityStream::new(coll)) } @@ -483,12 +448,11 @@ impl User { .into_iter() .collect::>() .join(", "), - )? + )?, ) .send()?; let text = &res.text()?; - let json: serde_json::Value = - serde_json::from_str(text)?; + let json: serde_json::Value = serde_json::from_str(text)?; Ok(json["items"] .as_array() .unwrap_or(&vec![]) @@ -507,7 +471,7 @@ impl User { .into_iter() .collect::>() .join(", "), - )? + )?, ) .send()?; let text = &res.text()?; @@ -531,7 +495,9 @@ impl User { Ok(posts .into_iter() .filter_map(|p| { - p.create_activity(conn).ok().and_then(|a| serde_json::to_value(a).ok()) + p.create_activity(conn) + .ok() + .and_then(|a| serde_json::to_value(a).ok()) }) .collect::>()) } @@ -555,7 +521,11 @@ impl User { .map_err(Error::from) } - pub fn get_followers_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Result> { + pub fn get_followers_page( + &self, + conn: &Connection, + (min, max): (i32, i32), + ) -> Result> { use schema::follows; let follows = Follow::belonging_to(self).select(follows::follower_id); users::table @@ -584,7 +554,11 @@ impl User { .map_err(Error::from) } - pub fn get_followed_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Result> { + pub fn get_followed_page( + &self, + conn: &Connection, + (min, max): (i32, i32), + ) -> Result> { use schema::follows; let follows = follows::table .filter(follows::follower_id.eq(self.id)) @@ -653,33 +627,32 @@ impl User { } pub fn get_keypair(&self) -> Result> { - PKey::from_rsa( - Rsa::private_key_from_pem( - self.private_key - .clone()? - .as_ref(), - )?, - ).map_err(Error::from) + PKey::from_rsa(Rsa::private_key_from_pem( + self.private_key.clone()?.as_ref(), + )?) + .map_err(Error::from) } pub fn rotate_keypair(&self, conn: &Connection) -> Result> { if self.private_key.is_none() { - return Err(Error::InvalidValue) + return Err(Error::InvalidValue); } if (Utc::now().naive_utc() - self.last_fetched_date).num_minutes() < 10 { //rotated recently self.get_keypair() } else { let (public_key, private_key) = gen_keypair(); - let public_key = String::from_utf8(public_key).expect("NewUser::new_local: public key error"); - let private_key = String::from_utf8(private_key).expect("NewUser::new_local: private key error"); - let res = PKey::from_rsa( - Rsa::private_key_from_pem(private_key.as_ref())? - )?; + let public_key = + String::from_utf8(public_key).expect("NewUser::new_local: public key error"); + let private_key = + String::from_utf8(private_key).expect("NewUser::new_local: private key error"); + let res = PKey::from_rsa(Rsa::private_key_from_pem(private_key.as_ref())?)?; diesel::update(self) - .set((users::public_key.eq(public_key), + .set(( + users::public_key.eq(public_key), users::private_key.eq(Some(private_key)), - users::last_fetched_date.eq(Utc::now().naive_utc()))) + users::last_fetched_date.eq(Utc::now().naive_utc()), + )) .execute(conn) .map_err(Error::from) .map(|_| res) @@ -688,18 +661,14 @@ impl User { pub fn to_activity(&self, conn: &Connection) -> Result { let mut actor = Person::default(); - actor - .object_props - .set_id_string(self.ap_url.clone())?; + actor.object_props.set_id_string(self.ap_url.clone())?; actor .object_props .set_name_string(self.display_name.clone())?; actor .object_props .set_summary_string(self.summary_html.get().clone())?; - actor - .object_props - .set_url_string(self.ap_url.clone())?; + actor.object_props.set_url_string(self.ap_url.clone())?; actor .ap_actor_props .set_inbox_string(self.inbox_url.clone())?; @@ -714,42 +683,31 @@ impl User { .set_followers_string(self.followers_endpoint.clone())?; let mut endpoints = Endpoint::default(); - endpoints - .set_shared_inbox_string(ap_url(&format!("{}/inbox/", BASE_URL.as_str())))?; - actor - .ap_actor_props - .set_endpoints_endpoint(endpoints)?; + endpoints.set_shared_inbox_string(ap_url(&format!("{}/inbox/", BASE_URL.as_str())))?; + actor.ap_actor_props.set_endpoints_endpoint(endpoints)?; let mut public_key = PublicKey::default(); - public_key - .set_id_string(format!("{}#main-key", self.ap_url))?; - public_key - .set_owner_string(self.ap_url.clone())?; - public_key - .set_public_key_pem_string(self.public_key.clone())?; + public_key.set_id_string(format!("{}#main-key", self.ap_url))?; + public_key.set_owner_string(self.ap_url.clone())?; + public_key.set_public_key_pem_string(self.public_key.clone())?; let mut ap_signature = ApSignature::default(); - ap_signature - .set_public_key_publickey(public_key)?; + ap_signature.set_public_key_publickey(public_key)?; let mut avatar = Image::default(); - avatar - .object_props - .set_url_string( - self.avatar_id - .and_then(|id| Media::get(conn, id).and_then(|m| m.url(conn)).ok()) - .unwrap_or_default(), - )?; - actor - .object_props - .set_icon_object(avatar)?; + avatar.object_props.set_url_string( + self.avatar_id + .and_then(|id| Media::get(conn, id).and_then(|m| m.url(conn)).ok()) + .unwrap_or_default(), + )?; + actor.object_props.set_icon_object(avatar)?; Ok(CustomPerson::new(actor, ap_signature)) } pub fn avatar_url(&self, conn: &Connection) -> String { - self.avatar_id.and_then(|id| - Media::get(conn, id).and_then(|m| m.url(conn)).ok() - ).unwrap_or_else(|| "/static/default-avatar.png".to_string()) + self.avatar_id + .and_then(|id| Media::get(conn, id).and_then(|m| m.url(conn)).ok()) + .unwrap_or_else(|| "/static/default-avatar.png".to_string()) } pub fn webfinger(&self, conn: &Connection) -> Result { @@ -866,21 +824,15 @@ impl Signer for User { fn sign(&self, to_sign: &str) -> Result> { let key = self.get_keypair()?; let mut signer = sign::Signer::new(MessageDigest::sha256(), &key)?; - signer - .update(to_sign.as_bytes())?; - signer - .sign_to_vec() - .map_err(Error::from) + signer.update(to_sign.as_bytes())?; + signer.sign_to_vec().map_err(Error::from) } fn verify(&self, data: &str, signature: &[u8]) -> Result { let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?; let mut verifier = sign::Verifier::new(MessageDigest::sha256(), &key)?; - verifier - .update(data.as_bytes())?; - verifier - .verify(&signature) - .map_err(Error::from) + verifier.update(data.as_bytes())?; + verifier.verify(&signature).map_err(Error::from) } } @@ -915,7 +867,7 @@ impl NewUser { display_name, is_admin, summary: summary.to_owned(), - summary_html: SafeString::new(&utils::md_to_html(&summary,"").0), + summary_html: SafeString::new(&utils::md_to_html(&summary, "").0), email: Some(email), hashed_password: Some(password), instance_id: Instance::get_local(conn)?.id, @@ -947,7 +899,8 @@ pub(crate) mod tests { "Hello there, I'm the admin", "admin@example.com".to_owned(), "invalid_admin_password".to_owned(), - ).unwrap(); + ) + .unwrap(); let user = NewUser::new_local( conn, "user".to_owned(), @@ -956,7 +909,8 @@ pub(crate) mod tests { "Hello there, I'm no one", "user@example.com".to_owned(), "invalid_user_password".to_owned(), - ).unwrap(); + ) + .unwrap(); let other = NewUser::new_local( conn, "other".to_owned(), @@ -965,8 +919,9 @@ pub(crate) mod tests { "Hello there, I'm someone else", "other@example.com".to_owned(), "invalid_other_password".to_owned(), - ).unwrap(); - vec![ admin, user, other ] + ) + .unwrap(); + vec![admin, user, other] } #[test] @@ -982,7 +937,8 @@ pub(crate) mod tests { "Hello I'm a test", "test@example.com".to_owned(), User::hash_pass("test_password").unwrap(), - ).unwrap(); + ) + .unwrap(); assert_eq!( test_user.id, @@ -996,9 +952,7 @@ pub(crate) mod tests { ); assert_eq!( test_user.id, - User::find_by_email(conn, "test@example.com") - .unwrap() - .id + User::find_by_email(conn, "test@example.com").unwrap().id ); assert_eq!( test_user.id, @@ -1009,8 +963,9 @@ pub(crate) mod tests { Instance::get_local(conn).unwrap().public_domain, "test" ) - ).unwrap() - .id + ) + .unwrap() + .id ); Ok(()) @@ -1040,7 +995,11 @@ pub(crate) mod tests { let mut i = 0; while local_inst.has_admin(conn).unwrap() { assert!(i < 100); //prevent from looping indefinitelly - local_inst.main_admin(conn).unwrap().revoke_admin_rights(conn).unwrap(); + local_inst + .main_admin(conn) + .unwrap() + .revoke_admin_rights(conn) + .unwrap(); i += 1; } inserted[0].grant_admin_rights(conn).unwrap(); @@ -1055,12 +1014,14 @@ pub(crate) mod tests { let conn = &db(); conn.test_transaction::<_, (), _>(|| { let inserted = fill_database(conn); - let updated = inserted[0].update( - conn, - "new name".to_owned(), - "em@il".to_owned(), - "

summary

".to_owned(), - ).unwrap(); + let updated = inserted[0] + .update( + conn, + "new name".to_owned(), + "em@il".to_owned(), + "

summary

".to_owned(), + ) + .unwrap(); assert_eq!(updated.display_name, "new name"); assert_eq!(updated.email.unwrap(), "em@il"); assert_eq!(updated.summary_html.get(), "

summary

"); @@ -1082,7 +1043,8 @@ pub(crate) mod tests { "Hello I'm a test", "test@example.com".to_owned(), User::hash_pass("test_password").unwrap(), - ).unwrap(); + ) + .unwrap(); assert!(test_user.auth("test_password")); assert!(!test_user.auth("other_password")); @@ -1101,7 +1063,9 @@ pub(crate) mod tests { assert_eq!(page.len(), 2); assert!(page[0].username <= page[1].username); - let mut last_username = User::get_local_page(conn, (0, 1)).unwrap()[0].username.clone(); + let mut last_username = User::get_local_page(conn, (0, 1)).unwrap()[0] + .username + .clone(); for i in 1..User::count_local(conn).unwrap() as i32 { let page = User::get_local_page(conn, (i, i + 1)).unwrap(); assert_eq!(page.len(), 1); @@ -1109,7 +1073,9 @@ pub(crate) mod tests { last_username = page[0].username.clone(); } assert_eq!( - User::get_local_page(conn, (0, User::count_local(conn).unwrap() as i32 + 10)).unwrap().len() as i64, + User::get_local_page(conn, (0, User::count_local(conn).unwrap() as i32 + 10)) + .unwrap() + .len() as i64, User::count_local(conn).unwrap() ); diff --git a/src/api/apps.rs b/src/api/apps.rs index 808636f1..3b24fd9c 100644 --- a/src/api/apps.rs +++ b/src/api/apps.rs @@ -3,11 +3,7 @@ use rocket_contrib::json::Json; use serde_json; use plume_api::apps::AppEndpoint; -use plume_models::{ - Connection, - db_conn::DbConn, - apps::App, -}; +use plume_models::{apps::App, db_conn::DbConn, Connection}; #[post("/apps", data = "")] pub fn create(conn: DbConn, data: Json) -> Json { diff --git a/src/api/authorization.rs b/src/api/authorization.rs index 7d5afc0b..8a23ada5 100644 --- a/src/api/authorization.rs +++ b/src/api/authorization.rs @@ -1,10 +1,10 @@ +use plume_models::{self, api_tokens::ApiToken}; use rocket::{ - Outcome, http::Status, - request::{self, FromRequest, Request} + request::{self, FromRequest, Request}, + Outcome, }; use std::marker::PhantomData; -use plume_models::{self, api_tokens::ApiToken}; // Actions pub trait Action { @@ -33,22 +33,25 @@ impl Scope for plume_models::posts::Post { } } -pub struct Authorization (pub ApiToken, PhantomData<(A, S)>); +pub struct Authorization(pub ApiToken, PhantomData<(A, S)>); impl<'a, 'r, A, S> FromRequest<'a, 'r> for Authorization -where A: Action, - S: Scope +where + A: Action, + S: Scope, { type Error = (); fn from_request(request: &'a Request<'r>) -> request::Outcome, ()> { - request.guard::() + request + .guard::() .map_failure(|_| (Status::Unauthorized, ())) - .and_then(|token| if token.can(A::to_str(), S::to_str()) { - Outcome::Success(Authorization(token, PhantomData)) - } else { - Outcome::Failure((Status::Unauthorized, ())) + .and_then(|token| { + if token.can(A::to_str(), S::to_str()) { + Outcome::Success(Authorization(token, PhantomData)) + } else { + Outcome::Failure((Status::Unauthorized, ())) + } }) } } - diff --git a/src/api/mod.rs b/src/api/mod.rs index 25a9d653..bc83ff99 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,16 +1,13 @@ #![warn(clippy::too_many_arguments)] -use rocket::{response::{self, Responder}, request::{Form, Request}}; +use rocket::{ + request::{Form, Request}, + response::{self, Responder}, +}; use rocket_contrib::json::Json; use serde_json; use plume_common::utils::random_hex; -use plume_models::{ - Error, - apps::App, - api_tokens::*, - db_conn::DbConn, - users::User, -}; +use plume_models::{api_tokens::*, apps::App, db_conn::DbConn, users::User, Error}; #[derive(Debug)] pub struct ApiError(Error); @@ -26,13 +23,16 @@ impl<'r> Responder<'r> for ApiError { match self.0 { Error::NotFound => Json(json!({ "error": "Not found" - })).respond_to(req), + })) + .respond_to(req), Error::Unauthorized => Json(json!({ "error": "You are not authorized to access this resource" - })).respond_to(req), + })) + .respond_to(req), _ => Json(json!({ "error": "Server error" - })).respond_to(req) + })) + .respond_to(req), } } } @@ -52,12 +52,15 @@ pub fn oauth(query: Form, conn: DbConn) -> Result")] -pub fn get(id: i32, conn: DbConn, worker: Worker, auth: Option>, search: Searcher) -> Json { - let post = )>> - ::get(&(&*conn, &worker, &search, auth.map(|a| a.0.user_id)), id).ok(); +pub fn get( + id: i32, + conn: DbConn, + worker: Worker, + auth: Option>, + search: Searcher, +) -> Json { + let post = , + )>>::get(&(&*conn, &worker, &search, auth.map(|a| a.0.user_id)), id) + .ok(); Json(json!(post)) } #[get("/posts")] -pub fn list(conn: DbConn, uri: &Origin, worker: Worker, auth: Option>, search: Searcher) -> Json { - let query: PostEndpoint = serde_qs::from_str(uri.query().unwrap_or("")).expect("api::list: invalid query error"); - let post = )>> - ::list(&(&*conn, &worker, &search, auth.map(|a| a.0.user_id)), query); +pub fn list( + conn: DbConn, + uri: &Origin, + worker: Worker, + auth: Option>, + search: Searcher, +) -> Json { + let query: PostEndpoint = + serde_qs::from_str(uri.query().unwrap_or("")).expect("api::list: invalid query error"); + let post = , + )>>::list( + &(&*conn, &worker, &search, auth.map(|a| a.0.user_id)), + query, + ); Json(json!(post)) } #[post("/posts", data = "")] -pub fn create(conn: DbConn, payload: Json, worker: Worker, auth: Authorization, search: Searcher) -> Json { - let new_post = )>> - ::create(&(&*conn, &worker, &search, Some(auth.0.user_id)), (*payload).clone()); - Json(new_post.map(|p| json!(p)).unwrap_or_else(|e| json!({ - "error": "Invalid data, couldn't create new post", - "details": match e { - ApiError::Fetch(msg) => msg, - ApiError::SerDe(msg) => msg, - ApiError::NotFound(msg) => msg, - ApiError::Authorization(msg) => msg, - } - }))) +pub fn create( + conn: DbConn, + payload: Json, + worker: Worker, + auth: Authorization, + search: Searcher, +) -> Json { + let new_post = , + )>>::create( + &(&*conn, &worker, &search, Some(auth.0.user_id)), + (*payload).clone(), + ); + Json(new_post.map(|p| json!(p)).unwrap_or_else(|e| { + json!({ + "error": "Invalid data, couldn't create new post", + "details": match e { + ApiError::Fetch(msg) => msg, + ApiError::SerDe(msg) => msg, + ApiError::NotFound(msg) => msg, + ApiError::Authorization(msg) => msg, + } + }) + })) } - diff --git a/src/inbox.rs b/src/inbox.rs index 8eb5898b..e61e2801 100644 --- a/src/inbox.rs +++ b/src/inbox.rs @@ -1,23 +1,10 @@ #![warn(clippy::too_many_arguments)] use activitypub::{ - activity::{ - Announce, - Create, - Delete, - Follow as FollowAct, - Like, - Undo, - Update - }, - object::Tombstone + activity::{Announce, Create, Delete, Follow as FollowAct, Like, Undo, Update}, + object::Tombstone, }; use failure::Error; -use rocket::{ - data::*, - http::Status, - Outcome::*, - Request, -}; +use rocket::{data::*, http::Status, Outcome::*, Request}; use rocket_contrib::json::*; use serde::Deserialize; use serde_json; @@ -26,15 +13,21 @@ use std::io::Read; use plume_common::activity_pub::{ inbox::{Deletable, FromActivity, InboxError, Notify}, - Id,request::Digest, + request::Digest, + Id, }; use plume_models::{ comments::Comment, follows::Follow, instance::Instance, likes, posts::Post, reshares::Reshare, - users::User, search::Searcher, Connection, + search::Searcher, users::User, Connection, }; pub trait Inbox { - fn received(&self, conn: &Connection, searcher: &Searcher, act: serde_json::Value) -> Result<(), Error> { + fn received( + &self, + conn: &Connection, + searcher: &Searcher, + act: serde_json::Value, + ) -> Result<(), Error> { let actor_id = Id::new(act["actor"].as_str().unwrap_or_else(|| { act["actor"]["id"] .as_str() @@ -66,7 +59,8 @@ pub trait Inbox { .id_string()?, actor_id.as_ref(), &(conn, searcher), - ).ok(); + ) + .ok(); Comment::delete_id( &act.delete_props .object_object::()? @@ -74,12 +68,14 @@ pub trait Inbox { .id_string()?, actor_id.as_ref(), conn, - ).ok(); + ) + .ok(); Ok(()) } "Follow" => { Follow::from_activity(conn, serde_json::from_value(act.clone())?, actor_id) - .and_then(|f| f.notify(conn)).expect("Inbox::received: follow from activity error");; + .and_then(|f| f.notify(conn)) + .expect("Inbox::received: follow from activity error");; Ok(()) } "Like" => { @@ -87,7 +83,8 @@ pub trait Inbox { conn, serde_json::from_value(act.clone())?, actor_id, - ).expect("Inbox::received: like from activity error");; + ) + .expect("Inbox::received: like from activity error");; Ok(()) } "Undo" => { @@ -102,7 +99,8 @@ pub trait Inbox { .id_string()?, actor_id.as_ref(), conn, - ).expect("Inbox::received: undo like fail");; + ) + .expect("Inbox::received: undo like fail");; Ok(()) } "Announce" => { @@ -113,7 +111,8 @@ pub trait Inbox { .id_string()?, actor_id.as_ref(), conn, - ).expect("Inbox::received: undo reshare fail");; + ) + .expect("Inbox::received: undo reshare fail");; Ok(()) } "Follow" => { @@ -124,21 +123,28 @@ pub trait Inbox { .id_string()?, actor_id.as_ref(), conn, - ).expect("Inbox::received: undo follow error");; + ) + .expect("Inbox::received: undo follow error");; Ok(()) } _ => Err(InboxError::CantUndo)?, } } else { - let link = act.undo_props.object.as_str().expect("Inbox::received: undo doesn't contain a type and isn't Link"); + let link = + act.undo_props.object.as_str().expect( + "Inbox::received: undo doesn't contain a type and isn't Link", + ); if let Ok(like) = likes::Like::find_by_ap_url(conn, link) { - likes::Like::delete_id(&like.ap_url, actor_id.as_ref(), conn).expect("Inbox::received: delete Like error"); + likes::Like::delete_id(&like.ap_url, actor_id.as_ref(), conn) + .expect("Inbox::received: delete Like error"); Ok(()) } else if let Ok(reshare) = Reshare::find_by_ap_url(conn, link) { - Reshare::delete_id(&reshare.ap_url, actor_id.as_ref(), conn).expect("Inbox::received: delete Announce error"); + Reshare::delete_id(&reshare.ap_url, actor_id.as_ref(), conn) + .expect("Inbox::received: delete Announce error"); Ok(()) } else if let Ok(follow) = Follow::find_by_ap_url(conn, link) { - Follow::delete_id(&follow.ap_url, actor_id.as_ref(), conn).expect("Inbox::received: delete Follow error"); + Follow::delete_id(&follow.ap_url, actor_id.as_ref(), conn) + .expect("Inbox::received: delete Follow error"); Ok(()) } else { Err(InboxError::NoType)? @@ -147,7 +153,8 @@ pub trait Inbox { } "Update" => { let act: Update = serde_json::from_value(act.clone())?; - Post::handle_update(conn, &act.update_props.object_object()?, searcher).expect("Inbox::received: post update error"); + Post::handle_update(conn, &act.update_props.object_object()?, searcher) + .expect("Inbox::received: post update error"); Ok(()) } _ => Err(InboxError::InvalidType)?, @@ -169,19 +176,25 @@ impl<'a, T: Deserialize<'a>> FromData<'a> for SignedJson { type Owned = String; type Borrowed = str; - fn transform(r: &Request, d: Data) -> Transform> { + fn transform( + r: &Request, + d: Data, + ) -> Transform> { let size_limit = r.limits().get("json").unwrap_or(JSON_LIMIT); let mut s = String::with_capacity(512); match d.open().take(size_limit).read_to_string(&mut s) { Ok(_) => Transform::Borrowed(Success(s)), - Err(e) => Transform::Borrowed(Failure((Status::BadRequest, JsonError::Io(e)))) + Err(e) => Transform::Borrowed(Failure((Status::BadRequest, JsonError::Io(e)))), } } - fn from_data(_: &Request, o: Transformed<'a, Self>) -> rocket::data::Outcome { + fn from_data( + _: &Request, + o: Transformed<'a, Self>, + ) -> rocket::data::Outcome { let string = o.borrowed()?; match serde_json::from_str(&string) { - Ok(v) => Success(SignedJson(Digest::from_body(&string),Json(v))), + Ok(v) => Success(SignedJson(Digest::from_body(&string), Json(v))), Err(e) => { if e.is_data() { Failure((Status::UnprocessableEntity, JsonError::Parse(string, e))) diff --git a/src/mail.rs b/src/mail.rs index 893927a6..42e1cebf 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -6,8 +6,8 @@ pub use self::mailer::*; #[cfg(feature = "debug-mailer")] mod mailer { - use lettre::{Transport, SendableEmail}; - use std::{io::Read}; + use lettre::{SendableEmail, Transport}; + use std::io::Read; pub struct DebugTransport; @@ -18,11 +18,18 @@ mod mailer { println!( "{}: from=<{}> to=<{:?}>\n{:#?}", email.message_id().to_string(), - email.envelope().from().map(ToString::to_string).unwrap_or_default(), + email + .envelope() + .from() + .map(ToString::to_string) + .unwrap_or_default(), email.envelope().to().to_vec(), { let mut message = String::new(); - email.message().read_to_string(&mut message).map_err(|_| ())?; + email + .message() + .read_to_string(&mut message) + .map_err(|_| ())?; message }, ); @@ -40,13 +47,12 @@ mod mailer { #[cfg(not(feature = "debug-mailer"))] mod mailer { use lettre::{ - SmtpTransport, - SmtpClient, smtp::{ authentication::{Credentials, Mechanism}, extension::ClientId, ConnectionReuseParameters, }, + SmtpClient, SmtpTransport, }; use std::env; @@ -57,7 +63,8 @@ mod mailer { let helo_name = env::var("MAIL_HELO_NAME").unwrap_or_else(|_| "localhost".to_owned()); let username = env::var("MAIL_USER").ok()?; let password = env::var("MAIL_PASSWORD").ok()?; - let mail = SmtpClient::new_simple(&server).unwrap() + let mail = SmtpClient::new_simple(&server) + .unwrap() .hello_name(ClientId::Domain(helo_name)) .credentials(Credentials::new(username, password)) .smtp_utf8(true) @@ -70,9 +77,17 @@ mod mailer { pub fn build_mail(dest: String, subject: String, body: String) -> Option { Email::builder() - .from(env::var("MAIL_ADDRESS") - .or_else(|_| Ok(format!("{}@{}", env::var("MAIL_USER")?, env::var("MAIL_SERVER")?)) as Result<_, env::VarError>) - .expect("Mail server is not correctly configured")) + .from( + env::var("MAIL_ADDRESS") + .or_else(|_| { + Ok(format!( + "{}@{}", + env::var("MAIL_USER")?, + env::var("MAIL_SERVER")? + )) as Result<_, env::VarError> + }) + .expect("Mail server is not correctly configured"), + ) .to(dest) .subject(subject) .text(body) diff --git a/src/main.rs b/src/main.rs index 16c3a914..2d077cd5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -39,16 +39,13 @@ extern crate validator_derive; extern crate webfinger; use diesel::r2d2::ConnectionManager; -use rocket::{ - Config, State, - config::Limits -}; -use rocket_csrf::CsrfFairingBuilder; use plume_models::{ - DATABASE_URL, Connection, Error, db_conn::{DbPool, PragmaForeignKey}, search::{Searcher as UnmanagedSearcher, SearcherError}, + Connection, Error, DATABASE_URL, }; +use rocket::{config::Limits, Config, State}; +use rocket_csrf::CsrfFairingBuilder; use scheduled_thread_pool::ScheduledThreadPool; use std::env; use std::process::exit; @@ -78,7 +75,8 @@ fn init_pool() -> Option { let manager = ConnectionManager::::new(DATABASE_URL.as_str()); DbPool::builder() .connection_customizer(Box::new(PragmaForeignKey)) - .build(manager).ok() + .build(manager) + .ok() } fn main() { @@ -89,37 +87,58 @@ fn main() { let searcher = match UnmanagedSearcher::open(&"search_index") { Err(Error::Search(e)) => match e { SearcherError::WriteLockAcquisitionError => panic!( -r#"Your search index is locked. Plume can't start. To fix this issue + r#"Your search index is locked. Plume can't start. To fix this issue make sure no other Plume instance is started, and run: plm search unlock Then try to restart Plume. -"#), - e => Err(e).unwrap() +"# + ), + e => Err(e).unwrap(), }, Err(_) => panic!("Unexpected error while opening search index"), - Ok(s) => Arc::new(s) + Ok(s) => Arc::new(s), }; let commiter = searcher.clone(); - workpool.execute_with_fixed_delay(Duration::from_secs(5), Duration::from_secs(60*30), move || commiter.commit()); + workpool.execute_with_fixed_delay( + Duration::from_secs(5), + Duration::from_secs(60 * 30), + move || commiter.commit(), + ); let search_unlocker = searcher.clone(); ctrlc::set_handler(move || { search_unlocker.drop_writer(); exit(0); - }).expect("Error setting Ctrl-c handler"); + }) + .expect("Error setting Ctrl-c handler"); let mut config = Config::active().unwrap(); - config.set_address(env::var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned())).unwrap(); - config.set_port(env::var("ROCKET_PORT").ok().map(|s| s.parse::().unwrap()).unwrap_or(7878)); + config + .set_address(env::var("ROCKET_ADDRESS").unwrap_or_else(|_| "localhost".to_owned())) + .unwrap(); + config.set_port( + env::var("ROCKET_PORT") + .ok() + .map(|s| s.parse::().unwrap()) + .unwrap_or(7878), + ); let _ = env::var("ROCKET_SECRET_KEY").map(|k| config.set_secret_key(k).unwrap()); - let form_size = &env::var("FORM_SIZE").unwrap_or_else(|_| "32".to_owned()).parse::().unwrap(); - let activity_size = &env::var("ACTIVITY_SIZE").unwrap_or_else(|_| "1024".to_owned()).parse::().unwrap(); - config.set_limits(Limits::new() - .limit("forms", form_size * 1024) - .limit("json", activity_size * 1024)); + let form_size = &env::var("FORM_SIZE") + .unwrap_or_else(|_| "32".to_owned()) + .parse::() + .unwrap(); + let activity_size = &env::var("ACTIVITY_SIZE") + .unwrap_or_else(|_| "1024".to_owned()) + .parse::() + .unwrap(); + config.set_limits( + Limits::new() + .limit("forms", form_size * 1024) + .limit("json", activity_size * 1024), + ); let mail = mail::init(); if mail.is_none() && config.environment.is_prod() { @@ -128,110 +147,100 @@ Then try to restart Plume. } rocket::custom(config) - .mount("/", routes![ - routes::blogs::details, - routes::blogs::activity_details, - routes::blogs::outbox, - routes::blogs::new, - routes::blogs::new_auth, - routes::blogs::create, - routes::blogs::delete, - routes::blogs::atom_feed, - - routes::comments::create, - routes::comments::delete, - routes::comments::activity_pub, - - routes::instance::index, - routes::instance::local, - routes::instance::feed, - routes::instance::federated, - routes::instance::admin, - routes::instance::admin_instances, - routes::instance::admin_users, - routes::instance::ban, - routes::instance::toggle_block, - routes::instance::update_settings, - routes::instance::shared_inbox, - routes::instance::nodeinfo, - routes::instance::about, - routes::instance::web_manifest, - - routes::likes::create, - routes::likes::create_auth, - - routes::medias::list, - routes::medias::new, - routes::medias::upload, - routes::medias::details, - routes::medias::delete, - routes::medias::set_avatar, - - routes::notifications::notifications, - routes::notifications::notifications_auth, - - routes::posts::details, - routes::posts::activity_details, - routes::posts::edit, - routes::posts::update, - routes::posts::new, - routes::posts::new_auth, - routes::posts::create, - routes::posts::delete, - - routes::reshares::create, - routes::reshares::create_auth, - - routes::search::search, - - routes::session::new, - routes::session::create, - routes::session::delete, - routes::session::password_reset_request_form, - routes::session::password_reset_request, - routes::session::password_reset_form, - routes::session::password_reset, - - routes::plume_static_files, - routes::static_files, - - routes::tags::tag, - - routes::user::me, - routes::user::details, - routes::user::dashboard, - routes::user::dashboard_auth, - routes::user::followers, - routes::user::followed, - routes::user::edit, - routes::user::edit_auth, - routes::user::update, - routes::user::delete, - routes::user::follow, - routes::user::follow_auth, - routes::user::activity_details, - routes::user::outbox, - routes::user::inbox, - routes::user::ap_followers, - routes::user::new, - routes::user::create, - routes::user::atom_feed, - - routes::well_known::host_meta, - routes::well_known::nodeinfo, - routes::well_known::webfinger, - - routes::errors::csrf_violation - ]) - .mount("/api/v1", routes![ - api::oauth, - - api::apps::create, - - api::posts::get, - api::posts::list, - api::posts::create, - ]) + .mount( + "/", + routes![ + routes::blogs::details, + routes::blogs::activity_details, + routes::blogs::outbox, + routes::blogs::new, + routes::blogs::new_auth, + routes::blogs::create, + routes::blogs::delete, + routes::blogs::atom_feed, + routes::comments::create, + routes::comments::delete, + routes::comments::activity_pub, + routes::instance::index, + routes::instance::local, + routes::instance::feed, + routes::instance::federated, + routes::instance::admin, + routes::instance::admin_instances, + routes::instance::admin_users, + routes::instance::ban, + routes::instance::toggle_block, + routes::instance::update_settings, + routes::instance::shared_inbox, + routes::instance::nodeinfo, + routes::instance::about, + routes::instance::web_manifest, + routes::likes::create, + routes::likes::create_auth, + routes::medias::list, + routes::medias::new, + routes::medias::upload, + routes::medias::details, + routes::medias::delete, + routes::medias::set_avatar, + routes::notifications::notifications, + routes::notifications::notifications_auth, + routes::posts::details, + routes::posts::activity_details, + routes::posts::edit, + routes::posts::update, + routes::posts::new, + routes::posts::new_auth, + routes::posts::create, + routes::posts::delete, + routes::reshares::create, + routes::reshares::create_auth, + routes::search::search, + routes::session::new, + routes::session::create, + routes::session::delete, + routes::session::password_reset_request_form, + routes::session::password_reset_request, + routes::session::password_reset_form, + routes::session::password_reset, + routes::plume_static_files, + routes::static_files, + routes::tags::tag, + routes::user::me, + routes::user::details, + routes::user::dashboard, + routes::user::dashboard_auth, + routes::user::followers, + routes::user::followed, + routes::user::edit, + routes::user::edit_auth, + routes::user::update, + routes::user::delete, + routes::user::follow, + routes::user::follow_auth, + routes::user::activity_details, + routes::user::outbox, + routes::user::inbox, + routes::user::ap_followers, + routes::user::new, + routes::user::create, + routes::user::atom_feed, + routes::well_known::host_meta, + routes::well_known::nodeinfo, + routes::well_known::webfinger, + routes::errors::csrf_violation + ], + ) + .mount( + "/api/v1", + routes![ + api::oauth, + api::apps::create, + api::posts::get, + api::posts::list, + api::posts::create, + ], + ) .register(catchers![ routes::errors::not_found, routes::errors::unprocessable_entity, @@ -243,15 +252,41 @@ Then try to restart Plume. .manage(workpool) .manage(searcher) .manage(include_i18n!()) - .attach(CsrfFairingBuilder::new() - .set_default_target("/csrf-violation?target=".to_owned(), rocket::http::Method::Post) + .attach( + CsrfFairingBuilder::new() + .set_default_target( + "/csrf-violation?target=".to_owned(), + rocket::http::Method::Post, + ) .add_exceptions(vec![ - ("/inbox".to_owned(), "/inbox".to_owned(), rocket::http::Method::Post), - ("/@//inbox".to_owned(), "/@//inbox".to_owned(), rocket::http::Method::Post), - ("/login".to_owned(), "/login".to_owned(), rocket::http::Method::Post), - ("/users/new".to_owned(), "/users/new".to_owned(), rocket::http::Method::Post), - ("/api/".to_owned(), "/api/".to_owned(), rocket::http::Method::Post) + ( + "/inbox".to_owned(), + "/inbox".to_owned(), + rocket::http::Method::Post, + ), + ( + "/@//inbox".to_owned(), + "/@//inbox".to_owned(), + rocket::http::Method::Post, + ), + ( + "/login".to_owned(), + "/login".to_owned(), + rocket::http::Method::Post, + ), + ( + "/users/new".to_owned(), + "/users/new".to_owned(), + rocket::http::Method::Post, + ), + ( + "/api/".to_owned(), + "/api/".to_owned(), + rocket::http::Method::Post, + ), ]) - .finalize().expect("main: csrf fairing creation error")) + .finalize() + .expect("main: csrf fairing creation error"), + ) .launch(); } diff --git a/src/routes/blogs.rs b/src/routes/blogs.rs index 57fb6b5f..9624a3fb 100644 --- a/src/routes/blogs.rs +++ b/src/routes/blogs.rs @@ -3,22 +3,16 @@ use atom_syndication::{Entry, FeedBuilder}; use rocket::{ http::ContentType, request::LenientForm, - response::{Redirect, Flash, content::Content} + response::{content::Content, Flash, Redirect}, }; use rocket_i18n::I18n; -use std::{collections::HashMap, borrow::Cow}; +use std::{borrow::Cow, collections::HashMap}; use validator::{Validate, ValidationError, ValidationErrors}; use plume_common::activity_pub::{ActivityStream, ApRequest}; use plume_common::utils; -use plume_models::{ - blog_authors::*, - blogs::*, - db_conn::DbConn, - instance::Instance, - posts::Post, -}; -use routes::{Page, PlumeRocket, errors::ErrorPage}; +use plume_models::{blog_authors::*, blogs::*, db_conn::DbConn, instance::Instance, posts::Post}; +use routes::{errors::ErrorPage, Page, PlumeRocket}; use template_utils::Ructe; #[get("/~/?", rank = 2)] @@ -39,13 +33,18 @@ pub fn details(name: String, page: Option, rockets: PlumeRocket) -> Result articles_count, page.0, Page::total(articles_count as i32), - user.and_then(|x| x.is_author_in(&*conn, &blog).ok()).unwrap_or(false), + user.and_then(|x| x.is_author_in(&*conn, &blog).ok()) + .unwrap_or(false), posts ))) } #[get("/~/", rank = 1)] -pub fn activity_details(name: String, conn: DbConn, _ap: ApRequest) -> Option> { +pub fn activity_details( + name: String, + conn: DbConn, + _ap: ApRequest, +) -> Option> { let blog = Blog::find_by_fqn(&*conn, &name).ok()?; Some(ActivityStream::new(blog.to_activity(&*conn).ok()?)) } @@ -64,10 +63,13 @@ pub fn new(rockets: PlumeRocket) -> Ructe { } #[get("/blogs/new", rank = 2)] -pub fn new_auth(i18n: I18n) -> Flash{ +pub fn new_auth(i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to create a new blog"), - uri!(new) + &i18n!( + i18n.catalog, + "You need to be logged in order to create a new blog" + ), + uri!(new), ) } @@ -95,29 +97,43 @@ pub fn create(form: LenientForm, rockets: PlumeRocket) -> Result ValidationErrors::new(), - Err(e) => e + Err(e) => e, }; if Blog::find_by_fqn(&*conn, &slug).is_ok() { - errors.add("title", ValidationError { - code: Cow::from("existing_slug"), - message: Some(Cow::from("A blog with the same name already exists.")), - params: HashMap::new() - }); + errors.add( + "title", + ValidationError { + code: Cow::from("existing_slug"), + message: Some(Cow::from("A blog with the same name already exists.")), + params: HashMap::new(), + }, + ); } if errors.is_empty() { - let blog = Blog::insert(&*conn, NewBlog::new_local( - slug.clone(), - form.title.to_string(), - String::from(""), - Instance::get_local(&*conn).expect("blog::create: instance error").id - ).expect("blog::create: new local error")).expect("blog::create: error"); + let blog = Blog::insert( + &*conn, + NewBlog::new_local( + slug.clone(), + form.title.to_string(), + String::from(""), + Instance::get_local(&*conn) + .expect("blog::create: instance error") + .id, + ) + .expect("blog::create: new local error"), + ) + .expect("blog::create: error"); - BlogAuthor::insert(&*conn, NewBlogAuthor { - blog_id: blog.id, - author_id: user.id, - is_owner: true - }).expect("blog::create: author error"); + BlogAuthor::insert( + &*conn, + NewBlogAuthor { + blog_id: blog.id, + author_id: user.id, + is_owner: true, + }, + ) + .expect("blog::create: author error"); Ok(Redirect::to(uri!(details: name = slug.clone(), page = _))) } else { @@ -130,15 +146,20 @@ pub fn create(form: LenientForm, rockets: PlumeRocket) -> Result/delete")] -pub fn delete(name: String, rockets: PlumeRocket) -> Result{ +pub fn delete(name: String, rockets: PlumeRocket) -> Result { let conn = rockets.conn; let blog = Blog::find_by_fqn(&*conn, &name).expect("blog::delete: blog not found"); let user = rockets.user; let intl = rockets.intl; let searcher = rockets.searcher; - if user.clone().and_then(|u| u.is_author_in(&*conn, &blog).ok()).unwrap_or(false) { - blog.delete(&conn, &searcher).expect("blog::expect: deletion error"); + if user + .clone() + .and_then(|u| u.is_author_in(&*conn, &blog).ok()) + .unwrap_or(false) + { + blog.delete(&conn, &searcher) + .expect("blog::expect: deletion error"); Ok(Redirect::to(uri!(super::instance::index))) } else { // TODO actually return 403 error code @@ -160,12 +181,20 @@ pub fn atom_feed(name: String, conn: DbConn) -> Option> { let blog = Blog::find_by_fqn(&*conn, &name).ok()?; let feed = FeedBuilder::default() .title(blog.title.clone()) - .id(Instance::get_local(&*conn).ok()? + .id(Instance::get_local(&*conn) + .ok()? .compute_box("~", &name, "atom.xml")) - .entries(Post::get_recents_for_blog(&*conn, &blog, 15).ok()? - .into_iter() - .map(|p| super::post_to_atom(p, &*conn)) - .collect::>()) - .build().ok()?; - Some(Content(ContentType::new("application", "atom+xml"), feed.to_string())) + .entries( + Post::get_recents_for_blog(&*conn, &blog, 15) + .ok()? + .into_iter() + .map(|p| super::post_to_atom(p, &*conn)) + .collect::>(), + ) + .build() + .ok()?; + Some(Content( + ContentType::new("application", "atom+xml"), + feed.to_string(), + )) } diff --git a/src/routes/comments.rs b/src/routes/comments.rs index 69ee487a..1d40f95d 100644 --- a/src/routes/comments.rs +++ b/src/routes/comments.rs @@ -1,29 +1,21 @@ use activitypub::object::Note; -use rocket::{ - request::LenientForm, - response::Redirect -}; +use rocket::{request::LenientForm, response::Redirect}; use rocket_i18n::I18n; -use validator::Validate; use template_utils::Ructe; +use validator::Validate; use std::time::Duration; -use plume_common::{utils, activity_pub::{broadcast, ApRequest, - ActivityStream, inbox::Deletable}}; -use plume_models::{ - blogs::Blog, - comments::*, - db_conn::DbConn, - instance::Instance, - mentions::Mention, - posts::Post, - safe_string::SafeString, - tags::Tag, - users::User +use plume_common::{ + activity_pub::{broadcast, inbox::Deletable, ActivityStream, ApRequest}, + utils, +}; +use plume_models::{ + blogs::Blog, comments::*, db_conn::DbConn, instance::Instance, mentions::Mention, posts::Post, + safe_string::SafeString, tags::Tag, users::User, }; -use Worker; use routes::errors::ErrorPage; +use Worker; #[derive(Default, FromForm, Debug, Validate)] pub struct NewCommentForm { @@ -34,37 +26,54 @@ pub struct NewCommentForm { } #[post("/~///comment", data = "
")] -pub fn create(blog_name: String, slug: String, form: LenientForm, user: User, conn: DbConn, worker: Worker, intl: I18n) - -> Result { +pub fn create( + blog_name: String, + slug: String, + form: LenientForm, + user: User, + conn: DbConn, + worker: Worker, + intl: I18n, +) -> Result { let blog = Blog::find_by_fqn(&*conn, &blog_name).expect("comments::create: blog error"); let post = Post::find_by_slug(&*conn, &slug, blog.id).expect("comments::create: post error"); form.validate() .map(|_| { let (html, mentions, _hashtags) = utils::md_to_html( form.content.as_ref(), - &Instance::get_local(&conn).expect("comments::create: local instance error").public_domain + &Instance::get_local(&conn) + .expect("comments::create: local instance error") + .public_domain, ); - let comm = Comment::insert(&*conn, NewComment { - content: SafeString::new(html.as_ref()), - in_response_to_id: form.responding_to, - post_id: post.id, - author_id: user.id, - ap_url: None, - sensitive: !form.warning.is_empty(), - spoiler_text: form.warning.clone(), - public_visibility: true - }).expect("comments::create: insert error"); - let new_comment = comm.create_activity(&*conn).expect("comments::create: activity error"); + let comm = Comment::insert( + &*conn, + NewComment { + content: SafeString::new(html.as_ref()), + in_response_to_id: form.responding_to, + post_id: post.id, + author_id: user.id, + ap_url: None, + sensitive: !form.warning.is_empty(), + spoiler_text: form.warning.clone(), + public_visibility: true, + }, + ) + .expect("comments::create: insert error"); + let new_comment = comm + .create_activity(&*conn) + .expect("comments::create: activity error"); // save mentions for ment in mentions { Mention::from_activity( &*conn, - &Mention::build_activity(&*conn, &ment).expect("comments::create: build mention error"), + &Mention::build_activity(&*conn, &ment) + .expect("comments::create: build mention error"), post.id, true, - true - ).expect("comments::create: mention save error"); + true, + ) + .expect("comments::create: mention save error"); } // federate @@ -72,13 +81,18 @@ pub fn create(blog_name: String, slug: String, form: LenientForm let user_clone = user.clone(); worker.execute(move || broadcast(&user_clone, new_comment, dest)); - Redirect::to(uri!(super::posts::details: blog = blog_name, slug = slug, responding_to = _)) + Redirect::to( + uri!(super::posts::details: blog = blog_name, slug = slug, responding_to = _), + ) }) .map_err(|errors| { // TODO: de-duplicate this code - let comments = CommentTree::from_post(&*conn, &post, Some(&user)).expect("comments::create: comments error"); + let comments = CommentTree::from_post(&*conn, &post, Some(&user)) + .expect("comments::create: comments error"); - let previous = form.responding_to.and_then(|r| Comment::get(&*conn, r).ok()); + let previous = form + .responding_to + .and_then(|r| Comment::get(&*conn, r).ok()); render!(posts::details( &(&*conn, &intl.catalog, Some(user.clone())), @@ -89,33 +103,62 @@ pub fn create(blog_name: String, slug: String, form: LenientForm Tag::for_post(&*conn, post.id).expect("comments::create: tags error"), comments, previous, - post.count_likes(&*conn).expect("comments::create: count likes error"), - post.count_reshares(&*conn).expect("comments::create: count reshares error"), - user.has_liked(&*conn, &post).expect("comments::create: liked error"), - user.has_reshared(&*conn, &post).expect("comments::create: reshared error"), - user.is_following(&*conn, post.get_authors(&*conn).expect("comments::create: authors error")[0].id) - .expect("comments::create: following error"), - post.get_authors(&*conn).expect("comments::create: authors error")[0].clone() + post.count_likes(&*conn) + .expect("comments::create: count likes error"), + post.count_reshares(&*conn) + .expect("comments::create: count reshares error"), + user.has_liked(&*conn, &post) + .expect("comments::create: liked error"), + user.has_reshared(&*conn, &post) + .expect("comments::create: reshared error"), + user.is_following( + &*conn, + post.get_authors(&*conn) + .expect("comments::create: authors error")[0] + .id + ) + .expect("comments::create: following error"), + post.get_authors(&*conn) + .expect("comments::create: authors error")[0] + .clone() )) }) } #[post("/~///comment//delete")] -pub fn delete(blog: String, slug: String, id: i32, user: User, conn: DbConn, worker: Worker) -> Result { +pub fn delete( + blog: String, + slug: String, + id: i32, + user: User, + conn: DbConn, + worker: Worker, +) -> Result { if let Ok(comment) = Comment::get(&*conn, id) { if comment.author_id == user.id { let dest = User::one_by_instance(&*conn)?; let delete_activity = comment.delete(&*conn)?; let user_c = user.clone(); worker.execute(move || broadcast(&user_c, delete_activity, dest)); - worker.execute_after(Duration::from_secs(10*60), move || {user.rotate_keypair(&conn).expect("Failed to rotate keypair");}); + worker.execute_after(Duration::from_secs(10 * 60), move || { + user.rotate_keypair(&conn) + .expect("Failed to rotate keypair"); + }); } } - Ok(Redirect::to(uri!(super::posts::details: blog = blog, slug = slug, responding_to = _))) + Ok(Redirect::to( + uri!(super::posts::details: blog = blog, slug = slug, responding_to = _), + )) } #[get("/~/<_blog>/<_slug>/comment/")] -pub fn activity_pub(_blog: String, _slug: String, id: i32, _ap: ApRequest, conn: DbConn) -> Option> { +pub fn activity_pub( + _blog: String, + _slug: String, + id: i32, + _ap: ApRequest, + conn: DbConn, +) -> Option> { Comment::get(&*conn, id) .and_then(|c| c.to_activity(&*conn)) .ok() diff --git a/src/routes/errors.rs b/src/routes/errors.rs index ec2e510c..a088af9e 100644 --- a/src/routes/errors.rs +++ b/src/routes/errors.rs @@ -1,11 +1,11 @@ +use plume_models::users::User; +use plume_models::{db_conn::DbConn, Error}; use rocket::{ - Request, request::FromRequest, response::{self, Responder}, + Request, }; use rocket_i18n::I18n; -use plume_models::{Error, db_conn::DbConn}; -use plume_models::users::User; use template_utils::Ructe; #[derive(Debug)] @@ -24,15 +24,24 @@ impl<'r> Responder<'r> for ErrorPage { let user = User::from_request(req).succeeded(); match self.0 { - Error::NotFound => render!(errors::not_found( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )).respond_to(req), - Error::Unauthorized => render!(errors::not_found( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )).respond_to(req), - _ => render!(errors::not_found( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )).respond_to(req) + Error::NotFound => render!(errors::not_found(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) + .respond_to(req), + Error::Unauthorized => render!(errors::not_found(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) + .respond_to(req), + _ => render!(errors::not_found(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) + .respond_to(req), } } } @@ -42,9 +51,11 @@ pub fn not_found(req: &Request) -> Ructe { let conn = req.guard::().succeeded(); let intl = req.guard::().succeeded(); let user = User::from_request(req).succeeded(); - render!(errors::not_found( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )) + render!(errors::not_found(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) } #[catch(422)] @@ -52,9 +63,11 @@ pub fn unprocessable_entity(req: &Request) -> Ructe { let conn = req.guard::().succeeded(); let intl = req.guard::().succeeded(); let user = User::from_request(req).succeeded(); - render!(errors::unprocessable_entity( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )) + render!(errors::unprocessable_entity(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) } #[catch(500)] @@ -62,17 +75,22 @@ pub fn server_error(req: &Request) -> Ructe { let conn = req.guard::().succeeded(); let intl = req.guard::().succeeded(); let user = User::from_request(req).succeeded(); - render!(errors::server_error( - &(&*conn.unwrap(), &intl.unwrap().catalog, user) - )) + render!(errors::server_error(&( + &*conn.unwrap(), + &intl.unwrap().catalog, + user + ))) } #[post("/csrf-violation?")] -pub fn csrf_violation(target: Option, conn: DbConn, intl: I18n, user: Option) -> Ructe { +pub fn csrf_violation( + target: Option, + conn: DbConn, + intl: I18n, + user: Option, +) -> Ructe { if let Some(uri) = target { eprintln!("Csrf violation while acceding \"{}\"", uri) } - render!(errors::csrf( - &(&*conn, &intl.catalog, user) - )) + render!(errors::csrf(&(&*conn, &intl.catalog, user))) } diff --git a/src/routes/instance.rs b/src/routes/instance.rs index 5f9121d1..b4e3a470 100644 --- a/src/routes/instance.rs +++ b/src/routes/instance.rs @@ -1,24 +1,19 @@ -use rocket::{request::LenientForm, response::{status, Redirect}}; +use rocket::{ + request::LenientForm, + response::{status, Redirect}, +}; use rocket_contrib::json::Json; use rocket_i18n::I18n; use serde_json; use validator::{Validate, ValidationErrors}; -use plume_common::activity_pub::sign::{Signable, - verify_http_headers}; -use plume_models::{ - admin::Admin, - comments::Comment, - db_conn::DbConn, - Error, - headers::Headers, - posts::Post, - users::User, - safe_string::SafeString, - instance::* -}; use inbox::{Inbox, SignedJson}; -use routes::{Page, errors::ErrorPage}; +use plume_common::activity_pub::sign::{verify_http_headers, Signable}; +use plume_models::{ + admin::Admin, comments::Comment, db_conn::DbConn, headers::Headers, instance::*, posts::Post, + safe_string::SafeString, users::User, Error, +}; +use routes::{errors::ErrorPage, Page}; use template_utils::Ructe; use Searcher; @@ -46,7 +41,12 @@ pub fn index(conn: DbConn, user: Option, intl: I18n) -> Result")] -pub fn local(conn: DbConn, user: Option, page: Option, intl: I18n) -> Result { +pub fn local( + conn: DbConn, + user: Option, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let instance = Instance::get_local(&*conn)?; let articles = Post::get_instance_page(&*conn, instance.id, page.limits())?; @@ -75,7 +75,12 @@ pub fn feed(conn: DbConn, user: User, page: Option, intl: I18n) -> Result< } #[get("/federated?")] -pub fn federated(conn: DbConn, user: Option, page: Option, intl: I18n) -> Result { +pub fn federated( + conn: DbConn, + user: Option, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let articles = Post::get_recents_page(&*conn, page.limits())?; Ok(render!(instance::federated( @@ -111,23 +116,34 @@ pub struct InstanceSettingsForm { pub short_description: SafeString, pub long_description: SafeString, #[validate(length(min = "1"))] - pub default_license: String + pub default_license: String, } #[post("/admin", data = "")] -pub fn update_settings(conn: DbConn, admin: Admin, form: LenientForm, intl: I18n) -> Result { +pub fn update_settings( + conn: DbConn, + admin: Admin, + form: LenientForm, + intl: I18n, +) -> Result { form.validate() .and_then(|_| { - let instance = Instance::get_local(&*conn).expect("instance::update_settings: local instance error"); - instance.update(&*conn, - form.name.clone(), - form.open_registrations, - form.short_description.clone(), - form.long_description.clone()).expect("instance::update_settings: save error"); + let instance = Instance::get_local(&*conn) + .expect("instance::update_settings: local instance error"); + instance + .update( + &*conn, + form.name.clone(), + form.open_registrations, + form.short_description.clone(), + form.long_description.clone(), + ) + .expect("instance::update_settings: save error"); Ok(Redirect::to(uri!(admin))) }) - .or_else(|e| { - let local_inst = Instance::get_local(&*conn).expect("instance::update_settings: local instance error"); + .or_else(|e| { + let local_inst = Instance::get_local(&*conn) + .expect("instance::update_settings: local instance error"); Err(render!(instance::admin( &(&*conn, &intl.catalog, Some(admin.0)), local_inst, @@ -138,7 +154,12 @@ pub fn update_settings(conn: DbConn, admin: Admin, form: LenientForm")] -pub fn admin_instances(admin: Admin, conn: DbConn, page: Option, intl: I18n) -> Result { +pub fn admin_instances( + admin: Admin, + conn: DbConn, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let instances = Instance::page(&*conn, page.limits())?; Ok(render!(instance::list( @@ -160,7 +181,12 @@ pub fn toggle_block(_admin: Admin, conn: DbConn, id: i32) -> Result")] -pub fn admin_users(admin: Admin, conn: DbConn, page: Option, intl: I18n) -> Result { +pub fn admin_users( + admin: Admin, + conn: DbConn, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); Ok(render!(instance::users( &(&*conn, &intl.catalog, Some(admin.0)), @@ -171,7 +197,12 @@ pub fn admin_users(admin: Admin, conn: DbConn, page: Option, intl: I18n) - } #[post("/admin/users//ban")] -pub fn ban(_admin: Admin, conn: DbConn, id: i32, searcher: Searcher) -> Result { +pub fn ban( + _admin: Admin, + conn: DbConn, + id: i32, + searcher: Searcher, +) -> Result { if let Ok(u) = User::get(&*conn, id) { u.delete(&*conn, &searcher)?; } @@ -179,34 +210,50 @@ pub fn ban(_admin: Admin, conn: DbConn, id: i32, searcher: Searcher) -> Result, headers: Headers, searcher: Searcher) -> Result> { +pub fn shared_inbox( + conn: DbConn, + data: SignedJson, + headers: Headers, + searcher: Searcher, +) -> Result> { let act = data.1.into_inner(); let sig = data.0; let activity = act.clone(); - let actor_id = activity["actor"].as_str() - .or_else(|| activity["actor"]["id"].as_str()).ok_or(status::BadRequest(Some("Missing actor id for activity")))?; + let actor_id = activity["actor"] + .as_str() + .or_else(|| activity["actor"]["id"].as_str()) + .ok_or(status::BadRequest(Some("Missing actor id for activity")))?; let actor = User::from_url(&conn, actor_id).expect("instance::shared_inbox: user error"); - if !verify_http_headers(&actor, &headers.0, &sig).is_secure() && - !act.clone().verify(&actor) { + if !verify_http_headers(&actor, &headers.0, &sig).is_secure() && !act.clone().verify(&actor) { // maybe we just know an old key? - actor.refetch(&conn).and_then(|_| User::get(&conn, actor.id)) - .and_then(|u| if verify_http_headers(&u, &headers.0, &sig).is_secure() || - act.clone().verify(&u) { - Ok(()) - } else { - Err(Error::Signature) - }) + actor + .refetch(&conn) + .and_then(|_| User::get(&conn, actor.id)) + .and_then(|u| { + if verify_http_headers(&u, &headers.0, &sig).is_secure() || act.clone().verify(&u) { + Ok(()) + } else { + Err(Error::Signature) + } + }) .map_err(|_| { - println!("Rejected invalid activity supposedly from {}, with headers {:?}", actor.username, headers.0); - status::BadRequest(Some("Invalid signature"))})?; + println!( + "Rejected invalid activity supposedly from {}, with headers {:?}", + actor.username, headers.0 + ); + status::BadRequest(Some("Invalid signature")) + })?; } - if Instance::is_blocked(&*conn, actor_id).map_err(|_| status::BadRequest(Some("Can't tell if instance is blocked")))? { + if Instance::is_blocked(&*conn, actor_id) + .map_err(|_| status::BadRequest(Some("Can't tell if instance is blocked")))? + { return Ok(String::new()); } - let instance = Instance::get_local(&*conn).expect("instance::shared_inbox: local instance not found error"); + let instance = Instance::get_local(&*conn) + .expect("instance::shared_inbox: local instance not found error"); Ok(match instance.received(&*conn, &searcher, act) { Ok(_) => String::new(), Err(e) => { diff --git a/src/routes/likes.rs b/src/routes/likes.rs index 8e3a3334..d06dda29 100644 --- a/src/routes/likes.rs +++ b/src/routes/likes.rs @@ -1,25 +1,28 @@ -use rocket::response::{Redirect, Flash}; +use rocket::response::{Flash, Redirect}; use rocket_i18n::I18n; -use plume_common::activity_pub::{broadcast, inbox::{Notify, Deletable}}; -use plume_common::utils; -use plume_models::{ - blogs::Blog, - db_conn::DbConn, - likes, - posts::Post, - users::User +use plume_common::activity_pub::{ + broadcast, + inbox::{Deletable, Notify}, }; -use Worker; +use plume_common::utils; +use plume_models::{blogs::Blog, db_conn::DbConn, likes, posts::Post, users::User}; use routes::errors::ErrorPage; +use Worker; #[post("/~///like")] -pub fn create(blog: String, slug: String, user: User, conn: DbConn, worker: Worker) -> Result { +pub fn create( + blog: String, + slug: String, + user: User, + conn: DbConn, + worker: Worker, +) -> Result { let b = Blog::find_by_fqn(&*conn, &blog)?; let post = Post::find_by_slug(&*conn, &slug, b.id)?; if !user.has_liked(&*conn, &post)? { - let like = likes::Like::insert(&*conn, likes::NewLike::new(&post ,&user))?; + let like = likes::Like::insert(&*conn, likes::NewLike::new(&post, &user))?; like.notify(&*conn)?; let dest = User::one_by_instance(&*conn)?; @@ -32,13 +35,18 @@ pub fn create(blog: String, slug: String, user: User, conn: DbConn, worker: Work worker.execute(move || broadcast(&user, delete_act, dest)); } - Ok(Redirect::to(uri!(super::posts::details: blog = blog, slug = slug, responding_to = _))) + Ok(Redirect::to( + uri!(super::posts::details: blog = blog, slug = slug, responding_to = _), + )) } #[post("/~///like", rank = 2)] -pub fn create_auth(blog: String, slug: String, i18n: I18n) -> Flash{ +pub fn create_auth(blog: String, slug: String, i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to like a post"), - uri!(create: blog = blog, slug = slug) + &i18n!( + i18n.catalog, + "You need to be logged in order to like a post" + ), + uri!(create: blog = blog, slug = slug), ) } diff --git a/src/routes/medias.rs b/src/routes/medias.rs index 097b0218..8918c29a 100644 --- a/src/routes/medias.rs +++ b/src/routes/medias.rs @@ -1,11 +1,18 @@ use guid_create::GUID; -use multipart::server::{Multipart, save::{SavedData, SaveResult}}; -use rocket::{Data, http::ContentType, response::{Redirect, status}}; +use multipart::server::{ + save::{SaveResult, SavedData}, + Multipart, +}; +use plume_models::{db_conn::DbConn, medias::*, users::User, Error}; +use rocket::{ + http::ContentType, + response::{status, Redirect}, + Data, +}; use rocket_i18n::I18n; +use routes::{errors::ErrorPage, Page}; use std::fs; -use plume_models::{Error, db_conn::DbConn, medias::*, users::User}; use template_utils::Ructe; -use routes::{Page, errors::ErrorPage}; #[get("/medias?")] pub fn list(user: User, conn: DbConn, intl: I18n, page: Option) -> Result { @@ -21,64 +28,85 @@ pub fn list(user: User, conn: DbConn, intl: I18n, page: Option) -> Result< #[get("/medias/new")] pub fn new(user: User, conn: DbConn, intl: I18n) -> Ructe { - render!(medias::new( - &(&*conn, &intl.catalog, Some(user)) - )) + render!(medias::new(&(&*conn, &intl.catalog, Some(user)))) } #[post("/medias/new", data = "")] -pub fn upload(user: User, data: Data, ct: &ContentType, conn: DbConn) -> Result> { +pub fn upload( + user: User, + data: Data, + ct: &ContentType, + conn: DbConn, +) -> Result> { if ct.is_form_data() { - let (_, boundary) = ct.params().find(|&(k, _)| k == "boundary").ok_or_else(|| status::BadRequest(Some("No boundary")))?; + let (_, boundary) = ct + .params() + .find(|&(k, _)| k == "boundary") + .ok_or_else(|| status::BadRequest(Some("No boundary")))?; match Multipart::with_body(data.open(), boundary).save().temp() { SaveResult::Full(entries) => { let fields = entries.fields; - let filename = fields.get("file").and_then(|v| v.iter().next()) - .ok_or_else(|| status::BadRequest(Some("No file uploaded")))?.headers - .filename.clone(); + let filename = fields + .get("file") + .and_then(|v| v.iter().next()) + .ok_or_else(|| status::BadRequest(Some("No file uploaded")))? + .headers + .filename + .clone(); // Remove extension if it contains something else than just letters and numbers let ext = filename - .and_then(|f| f - .rsplit('.') - .next() - .and_then(|ext| if ext.chars().any(|c| !c.is_alphanumeric()) { - None - } else { - Some(ext.to_lowercase()) - }) - .map(|ext| format!(".{}", ext)) - ).unwrap_or_default(); + .and_then(|f| { + f.rsplit('.') + .next() + .and_then(|ext| { + if ext.chars().any(|c| !c.is_alphanumeric()) { + None + } else { + Some(ext.to_lowercase()) + } + }) + .map(|ext| format!(".{}", ext)) + }) + .unwrap_or_default(); let dest = format!("static/media/{}{}", GUID::rand().to_string(), ext); match fields["file"][0].data { - SavedData::Bytes(ref bytes) => fs::write(&dest, bytes).map_err(|_| status::BadRequest(Some("Couldn't save upload")))?, - SavedData::File(ref path, _) => {fs::copy(path, &dest).map_err(|_| status::BadRequest(Some("Couldn't copy upload")))?;}, + SavedData::Bytes(ref bytes) => fs::write(&dest, bytes) + .map_err(|_| status::BadRequest(Some("Couldn't save upload")))?, + SavedData::File(ref path, _) => { + fs::copy(path, &dest) + .map_err(|_| status::BadRequest(Some("Couldn't copy upload")))?; + } _ => { return Ok(Redirect::to(uri!(new))); } } - let has_cw = !read(&fields["cw"][0].data).map(|cw| cw.is_empty()).unwrap_or(false); - let media = Media::insert(&*conn, NewMedia { - file_path: dest, - alt_text: read(&fields["alt"][0].data)?, - is_remote: false, - remote_url: None, - sensitive: has_cw, - content_warning: if has_cw { - Some(read(&fields["cw"][0].data)?) - } else { - None + let has_cw = !read(&fields["cw"][0].data) + .map(|cw| cw.is_empty()) + .unwrap_or(false); + let media = Media::insert( + &*conn, + NewMedia { + file_path: dest, + alt_text: read(&fields["alt"][0].data)?, + is_remote: false, + remote_url: None, + sensitive: has_cw, + content_warning: if has_cw { + Some(read(&fields["cw"][0].data)?) + } else { + None + }, + owner_id: user.id, }, - owner_id: user.id - }).map_err(|_| status::BadRequest(Some("Error while saving media")))?; + ) + .map_err(|_| status::BadRequest(Some("Error while saving media")))?; Ok(Redirect::to(uri!(details: id = media.id))) - }, - SaveResult::Partial(_, _) | SaveResult::Error(_) => { - Ok(Redirect::to(uri!(new))) } + SaveResult::Partial(_, _) | SaveResult::Error(_) => Ok(Redirect::to(uri!(new))), } } else { Ok(Redirect::to(uri!(new))) diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 0aa402c3..6e17736c 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -2,25 +2,21 @@ use atom_syndication::{ContentBuilder, Entry, EntryBuilder, LinkBuilder, Person, PersonBuilder}; use rocket::{ http::{ - RawStr, Status, uri::{FromUriParam, Query}, - hyper::header::{CacheControl, CacheDirective} + hyper::header::{CacheControl, CacheDirective}, + uri::{FromUriParam, Query}, + RawStr, Status, }, - Outcome, request::{self, FromFormValue, FromRequest, Request}, response::NamedFile, + Outcome, }; use rocket_i18n::I18n; use std::path::{Path, PathBuf}; -use plume_models::{ - Connection, - users::User, - posts::Post, - db_conn::DbConn, -}; +use plume_models::{db_conn::DbConn, posts::Post, users::User, Connection}; -use Worker; use Searcher; +use Worker; pub struct PlumeRocket<'a> { conn: DbConn, @@ -100,7 +96,6 @@ impl<'a, 'r> FromRequest<'a, 'r> for ContentLen { } } - impl Default for Page { fn default() -> Self { Page(1) @@ -110,20 +105,33 @@ impl Default for Page { pub fn post_to_atom(post: Post, conn: &Connection) -> Entry { EntryBuilder::default() .title(format!("", post.title)) - .content(ContentBuilder::default() - .value(format!("", *post.content.get())) - .src(post.ap_url.clone()) - .content_type("html".to_string()) - .build().expect("Atom feed: content error")) - .authors(post.get_authors(&*conn).expect("Atom feed: author error") - .into_iter() - .map(|a| PersonBuilder::default() - .name(a.display_name) - .uri(a.ap_url) - .build().expect("Atom feed: author error")) - .collect::>()) - .links(vec![LinkBuilder::default().href(post.ap_url).build().expect("Atom feed: link error")]) - .build().expect("Atom feed: entry error") + .content( + ContentBuilder::default() + .value(format!("", *post.content.get())) + .src(post.ap_url.clone()) + .content_type("html".to_string()) + .build() + .expect("Atom feed: content error"), + ) + .authors( + post.get_authors(&*conn) + .expect("Atom feed: author error") + .into_iter() + .map(|a| { + PersonBuilder::default() + .name(a.display_name) + .uri(a.ap_url) + .build() + .expect("Atom feed: author error") + }) + .collect::>(), + ) + .links(vec![LinkBuilder::default() + .href(post.ap_url) + .build() + .expect("Atom feed: link error")]) + .build() + .expect("Atom feed: entry error") } pub mod blogs; @@ -135,17 +143,17 @@ pub mod medias; pub mod notifications; pub mod posts; pub mod reshares; +pub mod search; pub mod session; pub mod tags; pub mod user; -pub mod search; pub mod well_known; #[derive(Responder)] #[response()] pub struct CachedFile { inner: NamedFile, - cache_control: CacheControl + cache_control: CacheControl, } #[get("/static/cached/<_build_id>/", rank = 2)] @@ -155,10 +163,10 @@ pub fn plume_static_files(file: PathBuf, _build_id: &RawStr) -> Option", rank = 3)] pub fn static_files(file: PathBuf) -> Option { - NamedFile::open(Path::new("static/").join(file)).ok() - .map(|f| - CachedFile { - inner: f, - cache_control: CacheControl(vec![CacheDirective::MaxAge(60*60*24*30)]) - }) + NamedFile::open(Path::new("static/").join(file)) + .ok() + .map(|f| CachedFile { + inner: f, + cache_control: CacheControl(vec![CacheDirective::MaxAge(60 * 60 * 24 * 30)]), + }) } diff --git a/src/routes/notifications.rs b/src/routes/notifications.rs index 9ce82644..fd279009 100644 --- a/src/routes/notifications.rs +++ b/src/routes/notifications.rs @@ -1,13 +1,18 @@ -use rocket::response::{Redirect, Flash}; +use rocket::response::{Flash, Redirect}; use rocket_i18n::I18n; use plume_common::utils; use plume_models::{db_conn::DbConn, notifications::Notification, users::User}; -use routes::{Page, errors::ErrorPage}; +use routes::{errors::ErrorPage, Page}; use template_utils::Ructe; #[get("/notifications?")] -pub fn notifications(conn: DbConn, user: User, page: Option, intl: I18n) -> Result { +pub fn notifications( + conn: DbConn, + user: User, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); Ok(render!(notifications::index( &(&*conn, &intl.catalog, Some(user.clone())), @@ -18,9 +23,12 @@ pub fn notifications(conn: DbConn, user: User, page: Option, intl: I18n) - } #[get("/notifications?", rank = 2)] -pub fn notifications_auth(i18n: I18n, page: Option) -> Flash{ +pub fn notifications_auth(i18n: I18n, page: Option) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to see your notifications"), - uri!(notifications: page = page) + &i18n!( + i18n.catalog, + "You need to be logged in order to see your notifications" + ), + uri!(notifications: page = page), ) } diff --git a/src/routes/posts.rs b/src/routes/posts.rs index 380c65d8..edcb06c0 100644 --- a/src/routes/posts.rs +++ b/src/routes/posts.rs @@ -1,20 +1,21 @@ use chrono::Utc; use heck::{CamelCase, KebabCase}; use rocket::request::LenientForm; -use rocket::response::{Redirect, Flash}; +use rocket::response::{Flash, Redirect}; use rocket_i18n::I18n; use std::{ + borrow::Cow, collections::{HashMap, HashSet}, - borrow::Cow, time::Duration, + time::Duration, }; use validator::{Validate, ValidationError, ValidationErrors}; -use plume_common::activity_pub::{broadcast, ActivityStream, ApRequest, inbox::Deletable}; +use plume_common::activity_pub::{broadcast, inbox::Deletable, ActivityStream, ApRequest}; use plume_common::utils; use plume_models::{ blogs::*, - db_conn::DbConn, comments::{Comment, CommentTree}, + db_conn::DbConn, instance::Instance, medias::Media, mentions::Mention, @@ -22,16 +23,28 @@ use plume_models::{ posts::*, safe_string::SafeString, tags::*, - users::User + users::User, }; -use routes::{PlumeRocket, errors::ErrorPage, comments::NewCommentForm, ContentLen}; +use routes::{comments::NewCommentForm, errors::ErrorPage, ContentLen, PlumeRocket}; use template_utils::Ructe; #[get("/~//?", rank = 4)] -pub fn details(blog: String, slug: String, conn: DbConn, user: Option, responding_to: Option, intl: I18n) -> Result { +pub fn details( + blog: String, + slug: String, + conn: DbConn, + user: Option, + responding_to: Option, + intl: I18n, +) -> Result { let blog = Blog::find_by_fqn(&*conn, &blog)?; let post = Post::find_by_slug(&*conn, &slug, blog.id)?; - if post.published || post.get_authors(&*conn)?.into_iter().any(|a| a.id == user.clone().map(|u| u.id).unwrap_or(0)) { + if post.published + || post + .get_authors(&*conn)? + .into_iter() + .any(|a| a.id == user.clone().map(|u| u.id).unwrap_or(0)) + { let comments = CommentTree::from_post(&*conn, &post, user.as_ref())?; let previous = responding_to.and_then(|r| Comment::get(&*conn, r).ok()); @@ -82,11 +95,19 @@ pub fn details(blog: String, slug: String, conn: DbConn, user: Option, res } #[get("/~//", rank = 3)] -pub fn activity_details(blog: String, slug: String, conn: DbConn, _ap: ApRequest) -> Result, Option> { +pub fn activity_details( + blog: String, + slug: String, + conn: DbConn, + _ap: ApRequest, +) -> Result, Option> { let blog = Blog::find_by_fqn(&*conn, &blog).map_err(|_| None)?; let post = Post::find_by_slug(&*conn, &slug, blog.id).map_err(|_| None)?; if post.published { - Ok(ActivityStream::new(post.to_activity(&*conn).map_err(|_| String::from("Post serialization error"))?)) + Ok(ActivityStream::new( + post.to_activity(&*conn) + .map_err(|_| String::from("Post serialization error"))?, + )) } else { Err(Some(String::from("Not published yet."))) } @@ -95,8 +116,11 @@ pub fn activity_details(blog: String, slug: String, conn: DbConn, _ap: ApRequest #[get("/~//new", rank = 2)] pub fn new_auth(blog: String, i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to write a new post"), - uri!(new: blog = blog) + &i18n!( + i18n.catalog, + "You need to be logged in order to write a new post" + ), + uri!(new: blog = blog), ) } @@ -112,7 +136,7 @@ pub fn new(blog: String, cl: ContentLen, rockets: PlumeRocket) -> Result Result//edit")] -pub fn edit(blog: String, slug: String, cl: ContentLen, rockets: PlumeRocket) -> Result { +pub fn edit( + blog: String, + slug: String, + cl: ContentLen, + rockets: PlumeRocket, +) -> Result { let conn = rockets.conn; let intl = rockets.intl; let b = Blog::find_by_fqn(&*conn, &blog)?; @@ -145,10 +174,9 @@ pub fn edit(blog: String, slug: String, cl: ContentLen, rockets: PlumeRocket) -> return Ok(render!(errors::not_authorized( &(&*conn, &intl.catalog, Some(user)), i18n!(intl.catalog, "You are not author in this blog.") - ))) + ))); } - let source = if !post.source.is_empty() { post.source.clone() } else { @@ -168,7 +196,7 @@ pub fn edit(blog: String, slug: String, cl: ContentLen, rockets: PlumeRocket) -> content: source, tags: Tag::for_post(&*conn, post.id)? .into_iter() - .filter_map(|t| if !t.is_hashtag {Some(t.tag)} else {None}) + .filter_map(|t| if !t.is_hashtag { Some(t.tag) } else { None }) .collect::>() .join(", "), license: post.license.clone(), @@ -184,11 +212,17 @@ pub fn edit(blog: String, slug: String, cl: ContentLen, rockets: PlumeRocket) -> } #[post("/~///edit", data = "")] -pub fn update(blog: String, slug: String, cl: ContentLen, form: LenientForm, rockets: PlumeRocket) - -> Result { +pub fn update( + blog: String, + slug: String, + cl: ContentLen, + form: LenientForm, + rockets: PlumeRocket, +) -> Result { let conn = rockets.conn; let b = Blog::find_by_fqn(&*conn, &blog).expect("post::update: blog error"); - let mut post = Post::find_by_slug(&*conn, &slug, b.id).expect("post::update: find by slug error"); + let mut post = + Post::find_by_slug(&*conn, &slug, b.id).expect("post::update: find by slug error"); let user = rockets.user.unwrap(); let intl = rockets.intl; @@ -200,23 +234,36 @@ pub fn update(blog: String, slug: String, cl: ContentLen, form: LenientForm ValidationErrors::new(), - Err(e) => e + Err(e) => e, }; if new_slug != slug && Post::find_by_slug(&*conn, &new_slug, b.id).is_ok() { - errors.add("title", ValidationError { - code: Cow::from("existing_slug"), - message: Some(Cow::from("A post with the same title already exists.")), - params: HashMap::new() - }); + errors.add( + "title", + ValidationError { + code: Cow::from("existing_slug"), + message: Some(Cow::from("A post with the same title already exists.")), + params: HashMap::new(), + }, + ); } if errors.is_empty() { - if !user.is_author_in(&*conn, &b).expect("posts::update: is author in error") { + if !user + .is_author_in(&*conn, &b) + .expect("posts::update: is author in error") + { // actually it's not "Ok"… - Ok(Redirect::to(uri!(super::blogs::details: name = blog, page = _))) + Ok(Redirect::to( + uri!(super::blogs::details: name = blog, page = _), + )) } else { - let (content, mentions, hashtags) = utils::md_to_html(form.content.to_string().as_ref(), &Instance::get_local(&conn).expect("posts::update: Error getting local instance").public_domain); + let (content, mentions, hashtags) = utils::md_to_html( + form.content.to_string().as_ref(), + &Instance::get_local(&conn) + .expect("posts::update: Error getting local instance") + .public_domain, + ); // update publication date if when this article is no longer a draft let newly_published = if !post.published && !form.draft { @@ -236,34 +283,61 @@ pub fn update(blog: String, slug: String, cl: ContentLen, form: LenientForm>().into_iter().filter_map(|t| Tag::build_activity(&conn, t).ok()).collect::>(); - post.update_tags(&conn, tags).expect("post::update: tags error"); + let tags = form + .tags + .split(',') + .map(|t| t.trim().to_camel_case()) + .filter(|t| !t.is_empty()) + .collect::>() + .into_iter() + .filter_map(|t| Tag::build_activity(&conn, t).ok()) + .collect::>(); + post.update_tags(&conn, tags) + .expect("post::update: tags error"); - let hashtags = hashtags.into_iter().map(|h| h.to_camel_case()).collect::>() - .into_iter().filter_map(|t| Tag::build_activity(&conn, t).ok()).collect::>(); - post.update_hashtags(&conn, hashtags).expect("post::update: hashtags error"); + let hashtags = hashtags + .into_iter() + .map(|h| h.to_camel_case()) + .collect::>() + .into_iter() + .filter_map(|t| Tag::build_activity(&conn, t).ok()) + .collect::>(); + post.update_hashtags(&conn, hashtags) + .expect("post::update: hashtags error"); if post.published { if newly_published { - let act = post.create_activity(&conn).expect("post::update: act error"); + let act = post + .create_activity(&conn) + .expect("post::update: act error"); let dest = User::one_by_instance(&*conn).expect("post::update: dest error"); worker.execute(move || broadcast(&user, act, dest)); } else { - let act = post.update_activity(&*conn).expect("post::update: act error"); + let act = post + .update_activity(&*conn) + .expect("post::update: act error"); let dest = User::one_by_instance(&*conn).expect("posts::update: dest error"); worker.execute(move || broadcast(&user, act, dest)); } } - Ok(Redirect::to(uri!(details: blog = blog, slug = new_slug, responding_to = _))) + Ok(Redirect::to( + uri!(details: blog = blog, slug = new_slug, responding_to = _), + )) } } else { let medias = Media::for_user(&*conn, user.id).expect("posts:update: medias error"); @@ -306,7 +380,12 @@ pub fn valid_slug(title: &str) -> Result<(), ValidationError> { } #[post("/~//new", data = "")] -pub fn create(blog_name: String, form: LenientForm, cl: ContentLen, rockets: PlumeRocket) -> Result> { +pub fn create( + blog_name: String, + form: LenientForm, + cl: ContentLen, + rockets: PlumeRocket, +) -> Result> { let conn = rockets.conn; let blog = Blog::find_by_fqn(&*conn, &blog_name).expect("post::create: blog error");; let slug = form.title.to_string().to_kebab_case(); @@ -314,86 +393,119 @@ pub fn create(blog_name: String, form: LenientForm, cl: ContentLen, let mut errors = match form.validate() { Ok(_) => ValidationErrors::new(), - Err(e) => e + Err(e) => e, }; if Post::find_by_slug(&*conn, &slug, blog.id).is_ok() { - errors.add("title", ValidationError { - code: Cow::from("existing_slug"), - message: Some(Cow::from("A post with the same title already exists.")), - params: HashMap::new() - }); + errors.add( + "title", + ValidationError { + code: Cow::from("existing_slug"), + message: Some(Cow::from("A post with the same title already exists.")), + params: HashMap::new(), + }, + ); } if errors.is_empty() { - if !user.is_author_in(&*conn, &blog).expect("post::create: is author in error") { + if !user + .is_author_in(&*conn, &blog) + .expect("post::create: is author in error") + { // actually it's not "Ok"… - return Ok(Redirect::to(uri!(super::blogs::details: name = blog_name, page = _))) + return Ok(Redirect::to( + uri!(super::blogs::details: name = blog_name, page = _), + )); } let (content, mentions, hashtags) = utils::md_to_html( form.content.to_string().as_ref(), - &Instance::get_local(&conn).expect("post::create: local instance error").public_domain + &Instance::get_local(&conn) + .expect("post::create: local instance error") + .public_domain, ); let searcher = rockets.searcher; - let post = Post::insert(&*conn, NewPost { - blog_id: blog.id, - slug: slug.to_string(), - title: form.title.to_string(), - content: SafeString::new(&content), - published: !form.draft, - license: form.license.clone(), - ap_url: "".to_string(), - creation_date: None, - subtitle: form.subtitle.clone(), - source: form.content.clone(), - cover_id: form.cover, + let post = Post::insert( + &*conn, + NewPost { + blog_id: blog.id, + slug: slug.to_string(), + title: form.title.to_string(), + content: SafeString::new(&content), + published: !form.draft, + license: form.license.clone(), + ap_url: "".to_string(), + creation_date: None, + subtitle: form.subtitle.clone(), + source: form.content.clone(), + cover_id: form.cover, }, &searcher, - ).expect("post::create: post save error"); + ) + .expect("post::create: post save error"); - PostAuthor::insert(&*conn, NewPostAuthor { - post_id: post.id, - author_id: user.id - }).expect("post::create: author save error"); + PostAuthor::insert( + &*conn, + NewPostAuthor { + post_id: post.id, + author_id: user.id, + }, + ) + .expect("post::create: author save error"); - let tags = form.tags.split(',') + let tags = form + .tags + .split(',') .map(|t| t.trim().to_camel_case()) .filter(|t| !t.is_empty()) .collect::>(); for tag in tags { - Tag::insert(&*conn, NewTag { - tag, - is_hashtag: false, - post_id: post.id - }).expect("post::create: tags save error"); + Tag::insert( + &*conn, + NewTag { + tag, + is_hashtag: false, + post_id: post.id, + }, + ) + .expect("post::create: tags save error"); } for hashtag in hashtags { - Tag::insert(&*conn, NewTag { - tag: hashtag.to_camel_case(), - is_hashtag: true, - post_id: post.id - }).expect("post::create: hashtags save error"); + Tag::insert( + &*conn, + NewTag { + tag: hashtag.to_camel_case(), + is_hashtag: true, + post_id: post.id, + }, + ) + .expect("post::create: hashtags save error"); } if post.published { for m in mentions { Mention::from_activity( &*conn, - &Mention::build_activity(&*conn, &m).expect("post::create: mention build error"), + &Mention::build_activity(&*conn, &m) + .expect("post::create: mention build error"), post.id, true, - true - ).expect("post::create: mention save error"); + true, + ) + .expect("post::create: mention save error"); } - let act = post.create_activity(&*conn).expect("posts::create: activity error"); + let act = post + .create_activity(&*conn) + .expect("posts::create: activity error"); let dest = User::one_by_instance(&*conn).expect("posts::create: dest error"); let worker = rockets.worker; worker.execute(move || broadcast(&user, act, dest)); } - Ok(Redirect::to(uri!(details: blog = blog_name, slug = slug, responding_to = _))) + Ok(Redirect::to( + uri!(details: blog = blog_name, slug = slug, responding_to = _), + )) } else { let medias = Media::for_user(&*conn, user.id).expect("posts::create: medias error"); let intl = rockets.intl; @@ -413,15 +525,25 @@ pub fn create(blog_name: String, form: LenientForm, cl: ContentLen, } #[post("/~///delete")] -pub fn delete(blog_name: String, slug: String, rockets: PlumeRocket) -> Result { +pub fn delete( + blog_name: String, + slug: String, + rockets: PlumeRocket, +) -> Result { let conn = rockets.conn; let user = rockets.user.unwrap(); let post = Blog::find_by_fqn(&*conn, &blog_name) .and_then(|blog| Post::find_by_slug(&*conn, &slug, blog.id)); if let Ok(post) = post { - if !post.get_authors(&*conn)?.into_iter().any(|a| a.id == user.id) { - return Ok(Redirect::to(uri!(details: blog = blog_name.clone(), slug = slug.clone(), responding_to = _))) + if !post + .get_authors(&*conn)? + .into_iter() + .any(|a| a.id == user.id) + { + return Ok(Redirect::to( + uri!(details: blog = blog_name.clone(), slug = slug.clone(), responding_to = _), + )); } let searcher = rockets.searcher; @@ -432,10 +554,17 @@ pub fn delete(blog_name: String, slug: String, rockets: PlumeRocket) -> Result//reshare")] -pub fn create(blog: String, slug: String, user: User, conn: DbConn, worker: Worker) -> Result { +pub fn create( + blog: String, + slug: String, + user: User, + conn: DbConn, + worker: Worker, +) -> Result { let b = Blog::find_by_fqn(&*conn, &blog)?; let post = Post::find_by_slug(&*conn, &slug, b.id)?; @@ -32,13 +35,18 @@ pub fn create(blog: String, slug: String, user: User, conn: DbConn, worker: Work worker.execute(move || broadcast(&user, delete_act, dest)); } - Ok(Redirect::to(uri!(super::posts::details: blog = blog, slug = slug, responding_to = _))) + Ok(Redirect::to( + uri!(super::posts::details: blog = blog, slug = slug, responding_to = _), + )) } -#[post("/~///reshare", rank=1)] +#[post("/~///reshare", rank = 1)] pub fn create_auth(blog: String, slug: String, i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to reshare a post"), - uri!(create: blog = blog, slug = slug) + &i18n!( + i18n.catalog, + "You need to be logged in order to reshare a post" + ), + uri!(create: blog = blog, slug = slug), ) } diff --git a/src/routes/search.rs b/src/routes/search.rs index 7ddafb69..d9d27888 100644 --- a/src/routes/search.rs +++ b/src/routes/search.rs @@ -2,13 +2,11 @@ use chrono::offset::Utc; use rocket::request::Form; use rocket_i18n::I18n; -use plume_models::{ - db_conn::DbConn, users::User, - search::Query}; +use plume_models::{db_conn::DbConn, search::Query, users::User}; use routes::Page; +use std::str::FromStr; use template_utils::Ructe; use Searcher; -use std::str::FromStr; #[derive(Default, FromForm)] pub struct SearchQuery { @@ -53,12 +51,19 @@ macro_rules! param_to_query { } } - #[get("/search?")] -pub fn search(query: Option>, conn: DbConn, searcher: Searcher, user: Option, intl: I18n) -> Ructe { +pub fn search( + query: Option>, + conn: DbConn, + searcher: Searcher, + user: Option, + intl: I18n, +) -> Ructe { let query = query.map(|f| f.into_inner()).unwrap_or_default(); let page = query.page.unwrap_or_default(); - let mut parsed_query = Query::from_str(&query.q.as_ref().map(|q| q.as_str()).unwrap_or_default()).unwrap_or_default(); + let mut parsed_query = + Query::from_str(&query.q.as_ref().map(|q| q.as_str()).unwrap_or_default()) + .unwrap_or_default(); param_to_query!(query, parsed_query; normal: title, subtitle, content, tag, instance, author, blog, lang, license; diff --git a/src/routes/session.rs b/src/routes/session.rs index 6157ed8e..c0dc5644 100644 --- a/src/routes/session.rs +++ b/src/routes/session.rs @@ -1,22 +1,26 @@ use lettre::Transport; -use rocket::{ - State, - http::{Cookie, Cookies, SameSite, uri::Uri}, - response::Redirect, - request::{LenientForm, FlashMessage, Form} -}; use rocket::http::ext::IntoOwned; -use rocket_i18n::I18n; -use std::{borrow::Cow, sync::{Arc, Mutex}, time::Instant}; -use validator::{Validate, ValidationError, ValidationErrors}; -use template_utils::Ructe; - -use plume_models::{ - BASE_URL, Error, - db_conn::DbConn, - users::{User, AUTH_COOKIE} +use rocket::{ + http::{uri::Uri, Cookie, Cookies, SameSite}, + request::{FlashMessage, Form, LenientForm}, + response::Redirect, + State, }; +use rocket_i18n::I18n; +use std::{ + borrow::Cow, + sync::{Arc, Mutex}, + time::Instant, +}; +use template_utils::Ructe; +use validator::{Validate, ValidationError, ValidationErrors}; + use mail::{build_mail, Mailer}; +use plume_models::{ + db_conn::DbConn, + users::{User, AUTH_COOKIE}, + Error, BASE_URL, +}; use routes::errors::ErrorPage; #[get("/login?")] @@ -34,16 +38,22 @@ pub struct LoginForm { #[validate(length(min = "1", message = "We need an email or a username to identify you"))] pub email_or_name: String, #[validate(length(min = "1", message = "Your password can't be empty"))] - pub password: String + pub password: String, } #[post("/login", data = "")] -pub fn create(conn: DbConn, form: LenientForm, flash: Option, mut cookies: Cookies, intl: I18n) -> Result { +pub fn create( + conn: DbConn, + form: LenientForm, + flash: Option, + mut cookies: Cookies, + intl: I18n, +) -> Result { let user = User::find_by_email(&*conn, &form.email_or_name) .or_else(|_| User::find_by_fqn(&*conn, &form.email_or_name)); let mut errors = match form.validate() { Ok(_) => ValidationErrors::new(), - Err(e) => e + Err(e) => e, }; let user_id = if let Ok(user) = user { @@ -58,7 +68,9 @@ pub fn create(conn: DbConn, form: LenientForm, flash: Option, flash: Option>>, form: Form, - requests: State>>> + requests: State>>>, ) -> Ructe { let mut requests = requests.lock().unwrap(); // Remove outdated requests (more than 1 day old) to avoid the list to grow too much requests.retain(|r| r.creation_date.elapsed().as_secs() < 24 * 60 * 60); - if User::find_by_email(&*conn, &form.email).is_ok() && !requests.iter().any(|x| x.mail == form.email.clone()) { + if User::find_by_email(&*conn, &form.email).is_ok() + && !requests.iter().any(|x| x.mail == form.email.clone()) + { let id = plume_common::utils::random_hex(); requests.push(ResetRequest { @@ -159,22 +179,35 @@ pub fn password_reset_request( if let Some(message) = build_mail( form.email.clone(), i18n!(intl.catalog, "Password reset"), - i18n!(intl.catalog, "Here is the link to reset your password: {0}"; link) + i18n!(intl.catalog, "Here is the link to reset your password: {0}"; link), ) { if let Some(ref mut mail) = *mail.lock().unwrap() { - mail - .send(message.into()) - .map_err(|_| eprintln!("Couldn't send password reset mail")).ok(); } + mail.send(message.into()) + .map_err(|_| eprintln!("Couldn't send password reset mail")) + .ok(); + } } } - render!(session::password_reset_request_ok( - &(&*conn, &intl.catalog, None) - )) + render!(session::password_reset_request_ok(&( + &*conn, + &intl.catalog, + None + ))) } #[get("/password-reset/")] -pub fn password_reset_form(conn: DbConn, intl: I18n, token: String, requests: State>>>) -> Result { - requests.lock().unwrap().iter().find(|x| x.id == token.clone()).ok_or(Error::NotFound)?; +pub fn password_reset_form( + conn: DbConn, + intl: I18n, + token: String, + requests: State>>>, +) -> Result { + requests + .lock() + .unwrap() + .iter() + .find(|x| x.id == token.clone()) + .ok_or(Error::NotFound)?; Ok(render!(session::password_reset( &(&*conn, &intl.catalog, None), &NewPasswordForm::default(), @@ -183,13 +216,11 @@ pub fn password_reset_form(conn: DbConn, intl: I18n, token: String, requests: St } #[derive(FromForm, Default, Validate)] -#[validate( - schema( - function = "passwords_match", - skip_on_field_errors = "false", - message = "Passwords are not matching" - ) -)] +#[validate(schema( + function = "passwords_match", + skip_on_field_errors = "false", + message = "Passwords are not matching" +))] pub struct NewPasswordForm { pub password: String, pub password_confirmation: String, @@ -209,19 +240,28 @@ pub fn password_reset( intl: I18n, token: String, requests: State>>>, - form: Form + form: Form, ) -> Result { form.validate() .and_then(|_| { let mut requests = requests.lock().unwrap(); - let req = requests.iter().find(|x| x.id == token.clone()).ok_or_else(|| to_validation(0))?.clone(); - if req.creation_date.elapsed().as_secs() < 60 * 60 * 2 { // Reset link is only valid for 2 hours + let req = requests + .iter() + .find(|x| x.id == token.clone()) + .ok_or_else(|| to_validation(0))? + .clone(); + if req.creation_date.elapsed().as_secs() < 60 * 60 * 2 { + // Reset link is only valid for 2 hours requests.retain(|r| *r != req); let user = User::find_by_email(&*conn, &req.mail).map_err(to_validation)?; user.reset_password(&*conn, &form.password).ok(); - Ok(Redirect::to(uri!(new: m = i18n!(intl.catalog, "Your password was successfully reset.")))) + Ok(Redirect::to(uri!( + new: m = i18n!(intl.catalog, "Your password was successfully reset.") + ))) } else { - Ok(Redirect::to(uri!(new: m = i18n!(intl.catalog, "Sorry, but the link expired. Try again")))) + Ok(Redirect::to(uri!( + new: m = i18n!(intl.catalog, "Sorry, but the link expired. Try again") + ))) } }) .map_err(|err| { @@ -235,10 +275,13 @@ pub fn password_reset( fn to_validation(_: T) -> ValidationErrors { let mut errors = ValidationErrors::new(); - errors.add("", ValidationError { - code: Cow::from("server_error"), - message: Some(Cow::from("An unknown error occured")), - params: std::collections::HashMap::new() - }); + errors.add( + "", + ValidationError { + code: Cow::from("server_error"), + message: Some(Cow::from("An unknown error occured")), + params: std::collections::HashMap::new(), + }, + ); errors } diff --git a/src/routes/tags.rs b/src/routes/tags.rs index 477a0b6e..d406a413 100644 --- a/src/routes/tags.rs +++ b/src/routes/tags.rs @@ -1,15 +1,17 @@ use rocket_i18n::I18n; -use plume_models::{ - db_conn::DbConn, - posts::Post, - users::User, -}; -use routes::{Page, errors::ErrorPage}; +use plume_models::{db_conn::DbConn, posts::Post, users::User}; +use routes::{errors::ErrorPage, Page}; use template_utils::Ructe; #[get("/tag/?")] -pub fn tag(user: Option, conn: DbConn, name: String, page: Option, intl: I18n) -> Result { +pub fn tag( + user: Option, + conn: DbConn, + name: String, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let posts = Post::list_by_tag(&*conn, name.clone(), page.limits())?; Ok(render!(tags::index( diff --git a/src/routes/user.rs b/src/routes/user.rs index 018364bc..a6ca6e58 100644 --- a/src/routes/user.rs +++ b/src/routes/user.rs @@ -19,14 +19,20 @@ use plume_common::activity_pub::{ }; use plume_common::utils; use plume_models::{ + blogs::Blog, + db_conn::DbConn, + follows, + headers::Headers, + instance::Instance, + posts::{LicensedArticle, Post}, + reshares::Reshare, + users::*, Error, - blogs::Blog, db_conn::DbConn, follows, headers::Headers, instance::Instance, posts::{LicensedArticle, Post}, - reshares::Reshare, users::*, }; -use routes::{Page, PlumeRocket, errors::ErrorPage}; +use routes::{errors::ErrorPage, Page, PlumeRocket}; use template_utils::Ructe; -use Worker; use Searcher; +use Worker; #[get("/me")] pub fn me(user: Option) -> Result> { @@ -56,19 +62,21 @@ pub fn details( let user_clone = user.clone(); let searcher = searcher.clone(); worker.execute(move || { - for create_act in user_clone.fetch_outbox::().expect("Remote user: outbox couldn't be fetched") { + for create_act in user_clone + .fetch_outbox::() + .expect("Remote user: outbox couldn't be fetched") + { match create_act.create_props.object_object::() { Ok(article) => { Post::from_activity( &(&*fetch_articles_conn, &searcher), article, user_clone.clone().into_id(), - ).expect("Article from remote user couldn't be saved"); + ) + .expect("Article from remote user couldn't be saved"); println!("Fetched article from remote user"); } - Err(e) => { - println!("Error while fetching articles in background: {:?}", e) - } + Err(e) => println!("Error while fetching articles in background: {:?}", e), } } }); @@ -76,8 +84,12 @@ pub fn details( // Fetch followers let user_clone = user.clone(); worker.execute(move || { - for user_id in user_clone.fetch_followers_ids().expect("Remote user: fetching followers error") { - let follower = User::from_url(&*fetch_followers_conn, &user_id).expect("user::details: Couldn't fetch follower"); + for user_id in user_clone + .fetch_followers_ids() + .expect("Remote user: fetching followers error") + { + let follower = User::from_url(&*fetch_followers_conn, &user_id) + .expect("user::details: Couldn't fetch follower"); follows::Follow::insert( &*fetch_followers_conn, follows::NewFollow { @@ -85,7 +97,8 @@ pub fn details( following_id: user_clone.id, ap_url: String::new(), }, - ).expect("Couldn't save follower for remote user"); + ) + .expect("Couldn't save follower for remote user"); } }); @@ -93,7 +106,9 @@ pub fn details( let user_clone = user.clone(); if user.needs_update() { worker.execute(move || { - user_clone.refetch(&*update_conn).expect("Couldn't update user info"); + user_clone + .refetch(&*update_conn) + .expect("Couldn't update user info"); }); } } @@ -103,11 +118,16 @@ pub fn details( Ok(render!(users::details( &(&*conn, &intl.catalog, account.clone()), user.clone(), - account.and_then(|x| x.is_following(&*conn, user.id).ok()).unwrap_or(false), + account + .and_then(|x| x.is_following(&*conn, user.id).ok()) + .unwrap_or(false), user.instance_id != Instance::get_local(&*conn)?.id, user.get_instance(&*conn)?.public_domain, recents, - reshares.into_iter().filter_map(|r| r.get_post(&*conn).ok()).collect() + reshares + .into_iter() + .filter_map(|r| r.get_post(&*conn).ok()) + .collect() ))) } @@ -124,19 +144,25 @@ pub fn dashboard(user: User, conn: DbConn, intl: I18n) -> Result Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to access your dashboard"), + &i18n!( + i18n.catalog, + "You need to be logged in order to access your dashboard" + ), uri!(dashboard), ) } #[post("/@//follow")] -pub fn follow(name: String, conn: DbConn, user: User, worker: Worker) -> Result { +pub fn follow( + name: String, + conn: DbConn, + user: User, + worker: Worker, +) -> Result { let target = User::find_by_fqn(&*conn, &name)?; if let Ok(follow) = follows::Follow::find(&*conn, user.id, target.id) { let delete_act = follow.delete(&*conn)?; - worker.execute(move || { - broadcast(&user, delete_act, vec![target]) - }); + worker.execute(move || broadcast(&user, delete_act, vec![target])); } else { let f = follows::Follow::insert( &*conn, @@ -157,13 +183,22 @@ pub fn follow(name: String, conn: DbConn, user: User, worker: Worker) -> Result< #[post("/@//follow", rank = 2)] pub fn follow_auth(name: String, i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to subscribe to someone"), + &i18n!( + i18n.catalog, + "You need to be logged in order to subscribe to someone" + ), uri!(follow: name = name), ) } #[get("/@//followers?", rank = 2)] -pub fn followers(name: String, conn: DbConn, account: Option, page: Option, intl: I18n) -> Result { +pub fn followers( + name: String, + conn: DbConn, + account: Option, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let user = User::find_by_fqn(&*conn, &name)?; let followers_count = user.count_followers(&*conn)?; @@ -171,7 +206,9 @@ pub fn followers(name: String, conn: DbConn, account: Option, page: Option Ok(render!(users::followers( &(&*conn, &intl.catalog, account.clone()), user.clone(), - account.and_then(|x| x.is_following(&*conn, user.id).ok()).unwrap_or(false), + account + .and_then(|x| x.is_following(&*conn, user.id).ok()) + .unwrap_or(false), user.instance_id != Instance::get_local(&*conn)?.id, user.get_instance(&*conn)?.public_domain, user.get_followers_page(&*conn, page.limits())?, @@ -181,7 +218,13 @@ pub fn followers(name: String, conn: DbConn, account: Option, page: Option } #[get("/@//followed?", rank = 2)] -pub fn followed(name: String, conn: DbConn, account: Option, page: Option, intl: I18n) -> Result { +pub fn followed( + name: String, + conn: DbConn, + account: Option, + page: Option, + intl: I18n, +) -> Result { let page = page.unwrap_or_default(); let user = User::find_by_fqn(&*conn, &name)?; let followed_count = user.count_followed(&*conn)?; @@ -189,7 +232,9 @@ pub fn followed(name: String, conn: DbConn, account: Option, page: Option< Ok(render!(users::followed( &(&*conn, &intl.catalog, account.clone()), user.clone(), - account.and_then(|x| x.is_following(&*conn, user.id).ok()).unwrap_or(false), + account + .and_then(|x| x.is_following(&*conn, user.id).ok()) + .unwrap_or(false), user.instance_id != Instance::get_local(&*conn)?.id, user.get_instance(&*conn)?.public_domain, user.get_followed_page(&*conn, page.limits())?, @@ -238,7 +283,10 @@ pub fn edit(name: String, user: User, conn: DbConn, intl: I18n) -> Result/edit", rank = 2)] pub fn edit_auth(name: String, i18n: I18n) -> Flash { utils::requires_login( - &i18n!(i18n.catalog, "You need to be logged in order to edit your profile"), + &i18n!( + i18n.catalog, + "You need to be logged in order to edit your profile" + ), uri!(edit: name = name), ) } @@ -251,18 +299,41 @@ pub struct UpdateUserForm { } #[put("/@/<_name>/edit", data = "")] -pub fn update(_name: String, conn: DbConn, user: User, form: LenientForm) -> Result { +pub fn update( + _name: String, + conn: DbConn, + user: User, + form: LenientForm, +) -> Result { user.update( &*conn, - if !form.display_name.is_empty() { form.display_name.clone() } else { user.display_name.clone() }, - if !form.email.is_empty() { form.email.clone() } else { user.email.clone().unwrap_or_default() }, - if !form.summary.is_empty() { form.summary.clone() } else { user.summary.to_string() }, + if !form.display_name.is_empty() { + form.display_name.clone() + } else { + user.display_name.clone() + }, + if !form.email.is_empty() { + form.email.clone() + } else { + user.email.clone().unwrap_or_default() + }, + if !form.summary.is_empty() { + form.summary.clone() + } else { + user.summary.to_string() + }, )?; Ok(Redirect::to(uri!(me))) } #[post("/@//delete")] -pub fn delete(name: String, conn: DbConn, user: User, mut cookies: Cookies, searcher: Searcher) -> Result { +pub fn delete( + name: String, + conn: DbConn, + user: User, + mut cookies: Cookies, + searcher: Searcher, +) -> Result { let account = User::find_by_fqn(&*conn, &name)?; if user.id == account.id { account.delete(&*conn, &searcher)?; @@ -278,32 +349,25 @@ pub fn delete(name: String, conn: DbConn, user: User, mut cookies: Cookies, sear } #[derive(Default, FromForm, Validate)] -#[validate( - schema( - function = "passwords_match", - skip_on_field_errors = "false", - message = "Passwords are not matching" - ) -)] +#[validate(schema( + function = "passwords_match", + skip_on_field_errors = "false", + message = "Passwords are not matching" +))] pub struct NewUserForm { - #[validate(length(min = "1", message = "Username can't be empty"), - custom( function = "validate_username", message = "User name is not allowed to contain any of < > & @ ' or \""))] + #[validate( + length(min = "1", message = "Username can't be empty"), + custom( + function = "validate_username", + message = "User name is not allowed to contain any of < > & @ ' or \"" + ) + )] pub username: String, #[validate(email(message = "Invalid email"))] pub email: String, - #[validate( - length( - min = "8", - message = "Password should be at least 8 characters long" - ) - )] + #[validate(length(min = "8", message = "Password should be at least 8 characters long"))] pub password: String, - #[validate( - length( - min = "8", - message = "Password should be at least 8 characters long" - ) - )] + #[validate(length(min = "8", message = "Password should be at least 8 characters long"))] pub password_confirmation: String, } @@ -325,17 +389,20 @@ pub fn validate_username(username: &str) -> Result<(), ValidationError> { fn to_validation(_: Error) -> ValidationErrors { let mut errors = ValidationErrors::new(); - errors.add("", ValidationError { - code: Cow::from("server_error"), - message: Some(Cow::from("An unknown error occured")), - params: HashMap::new() - }); + errors.add( + "", + ValidationError { + code: Cow::from("server_error"), + message: Some(Cow::from("An unknown error occured")), + params: HashMap::new(), + }, + ); errors } #[post("/users/new", data = "")] pub fn create(conn: DbConn, form: LenientForm, intl: I18n) -> Result { - if !Instance::get_local(&*conn) + if !Instance::get_local(&*conn) .map(|i| i.open_registrations) .unwrap_or(true) { @@ -355,13 +422,16 @@ pub fn create(conn: DbConn, form: LenientForm, intl: I18n) -> Resul "", form.email.to_string(), User::hash_pass(&form.password).map_err(to_validation)?, - ).map_err(to_validation)?; + ) + .map_err(to_validation)?; Ok(Redirect::to(uri!(super::session::new: m = _))) }) - .map_err(|err| { + .map_err(|err| { render!(users::new( &(&*conn, &intl.catalog, None), - Instance::get_local(&*conn).map(|i| i.open_registrations).unwrap_or(true), + Instance::get_local(&*conn) + .map(|i| i.open_registrations) + .unwrap_or(true), &form, err )) @@ -395,21 +465,27 @@ pub fn inbox( ))))?; let actor = User::from_url(&conn, actor_id).expect("user::inbox: user error"); - if !verify_http_headers(&actor, &headers.0, &sig).is_secure() - && !act.clone().verify(&actor) - { + if !verify_http_headers(&actor, &headers.0, &sig).is_secure() && !act.clone().verify(&actor) { // maybe we just know an old key? - actor.refetch(&conn).and_then(|_| User::get(&conn, actor.id)) - .and_then(|actor| if verify_http_headers(&actor, &headers.0, &sig).is_secure() - || act.clone().verify(&actor) - { - Ok(()) - } else { - Err(Error::Signature) - }) + actor + .refetch(&conn) + .and_then(|_| User::get(&conn, actor.id)) + .and_then(|actor| { + if verify_http_headers(&actor, &headers.0, &sig).is_secure() + || act.clone().verify(&actor) + { + Ok(()) + } else { + Err(Error::Signature) + } + }) .map_err(|_| { - println!("Rejected invalid activity supposedly from {}, with headers {:?}", actor.username, headers.0); - status::BadRequest(Some("Invalid signature"))})?; + println!( + "Rejected invalid activity supposedly from {}, with headers {:?}", + actor.username, headers.0 + ); + status::BadRequest(Some("Invalid signature")) + })?; } if Instance::is_blocked(&*conn, actor_id).map_err(|_| None)? { @@ -432,18 +508,20 @@ pub fn ap_followers( ) -> Option> { let user = User::find_by_fqn(&*conn, &name).ok()?; let followers = user - .get_followers(&*conn).ok()? + .get_followers(&*conn) + .ok()? .into_iter() .map(|f| Id::new(f.ap_url)) .collect::>(); let mut coll = OrderedCollection::default(); coll.object_props - .set_id_string(user.followers_endpoint).ok()?; + .set_id_string(user.followers_endpoint) + .ok()?; coll.collection_props - .set_total_items_u64(followers.len() as u64).ok()?; - coll.collection_props - .set_items_link_vec(followers).ok()?; + .set_total_items_u64(followers.len() as u64) + .ok()?; + coll.collection_props.set_items_link_vec(followers).ok()?; Some(ActivityStream::new(coll)) } @@ -456,7 +534,8 @@ pub fn atom_feed(name: String, conn: DbConn) -> Option> { .unwrap() .compute_box("~", &name, "atom.xml")) .entries( - Post::get_recents_for_author(&*conn, &author, 15).ok()? + Post::get_recents_for_author(&*conn, &author, 15) + .ok()? .into_iter() .map(|p| super::post_to_atom(p, &*conn)) .collect::>(), diff --git a/src/routes/well_known.rs b/src/routes/well_known.rs index 89b0c895..eed1ee47 100644 --- a/src/routes/well_known.rs +++ b/src/routes/well_known.rs @@ -3,32 +3,42 @@ use rocket::response::Content; use serde_json; use webfinger::*; -use plume_models::{BASE_URL, ap_url, db_conn::DbConn, blogs::Blog, users::User}; +use plume_models::{ap_url, blogs::Blog, db_conn::DbConn, users::User, BASE_URL}; #[get("/.well-known/nodeinfo")] pub fn nodeinfo() -> Content { - Content(ContentType::new("application", "jrd+json"), json!({ - "links": [ - { - "rel": "http://nodeinfo.diaspora.software/ns/schema/2.0", - "href": ap_url(&format!("{domain}/nodeinfo/2.0", domain = BASE_URL.as_str())) - }, - { - "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", - "href": ap_url(&format!("{domain}/nodeinfo/2.1", domain = BASE_URL.as_str())) - } - ] - }).to_string()) + Content( + ContentType::new("application", "jrd+json"), + json!({ + "links": [ + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.0", + "href": ap_url(&format!("{domain}/nodeinfo/2.0", domain = BASE_URL.as_str())) + }, + { + "rel": "http://nodeinfo.diaspora.software/ns/schema/2.1", + "href": ap_url(&format!("{domain}/nodeinfo/2.1", domain = BASE_URL.as_str())) + } + ] + }) + .to_string(), + ) } #[get("/.well-known/host-meta")] pub fn host_meta() -> String { - format!(r#" + format!( + r#" - "#, url = ap_url(&format!("{domain}/.well-known/webfinger?resource={{uri}}", domain = BASE_URL.as_str()))) + "#, + url = ap_url(&format!( + "{domain}/.well-known/webfinger?resource={{uri}}", + domain = BASE_URL.as_str() + )) + ) } struct WebfingerResolver; @@ -41,20 +51,31 @@ impl Resolver for WebfingerResolver { fn find(acct: String, conn: DbConn) -> Result { User::find_by_fqn(&*conn, &acct) .and_then(|usr| usr.webfinger(&*conn)) - .or_else(|_| Blog::find_by_fqn(&*conn, &acct) - .and_then(|blog| blog.webfinger(&*conn)) - .or(Err(ResolverError::NotFound))) + .or_else(|_| { + Blog::find_by_fqn(&*conn, &acct) + .and_then(|blog| blog.webfinger(&*conn)) + .or(Err(ResolverError::NotFound)) + }) } } #[get("/.well-known/webfinger?")] pub fn webfinger(resource: String, conn: DbConn) -> Content { - match WebfingerResolver::endpoint(resource, conn).and_then(|wf| serde_json::to_string(&wf).map_err(|_| ResolverError::NotFound)) { + match WebfingerResolver::endpoint(resource, conn) + .and_then(|wf| serde_json::to_string(&wf).map_err(|_| ResolverError::NotFound)) + { Ok(wf) => Content(ContentType::new("application", "jrd+json"), wf), - Err(err) => Content(ContentType::new("text", "plain"), String::from(match err { - ResolverError::InvalidResource => "Invalid resource. Make sure to request an acct: URI", - ResolverError::NotFound => "Requested resource was not found", - ResolverError::WrongInstance => "This is not the instance of the requested resource" - })) + Err(err) => Content( + ContentType::new("text", "plain"), + String::from(match err { + ResolverError::InvalidResource => { + "Invalid resource. Make sure to request an acct: URI" + } + ResolverError::NotFound => "Requested resource was not found", + ResolverError::WrongInstance => { + "This is not the instance of the requested resource" + } + }), + ), } } diff --git a/src/template_utils.rs b/src/template_utils.rs index 5b407607..825a460e 100644 --- a/src/template_utils.rs +++ b/src/template_utils.rs @@ -1,9 +1,9 @@ -use plume_models::{Connection, notifications::*, users::User}; +use plume_models::{notifications::*, users::User, Connection}; -use rocket::http::{Method, Status}; use rocket::http::hyper::header::{ETag, EntityTag}; +use rocket::http::{Method, Status}; use rocket::request::Request; -use rocket::response::{self, Response, Responder, content::Html as HtmlCt}; +use rocket::response::{self, content::Html as HtmlCt, Responder, Response}; use rocket_i18n::Catalog; use std::collections::hash_map::DefaultHasher; use std::hash::Hasher; @@ -13,7 +13,7 @@ pub use askama_escape::escape; pub static CACHE_NAME: &str = env!("CACHE_ID"); -pub type BaseContext<'a> = &'a(&'a Connection, &'a Catalog, Option); +pub type BaseContext<'a> = &'a (&'a Connection, &'a Catalog, Option); #[derive(Debug)] pub struct Ructe(pub Vec); @@ -27,7 +27,10 @@ impl<'r> Responder<'r> for Ructe { let mut hasher = DefaultHasher::new(); hasher.write(&self.0); let etag = format!("{:x}", hasher.finish()); - if r.headers().get("If-None-Match").any(|s| s[1..s.len()-1] == etag) { + if r.headers() + .get("If-None-Match") + .any(|s| s[1..s.len() - 1] == etag) + { Response::build() .status(Status::NotModified) .header(ETag(EntityTag::strong(etag))) @@ -85,7 +88,13 @@ impl Size { } } -pub fn avatar(conn: &Connection, user: &User, size: Size, pad: bool, catalog: &Catalog) -> Html { +pub fn avatar( + conn: &Connection, + user: &User, + size: Size, + pad: bool, + catalog: &Catalog, +) -> Html { let name = escape(&user.name()).to_string(); Html(format!( r#"
Html { pub fn paginate(catalog: &Catalog, page: i32, total: i32) -> Html { paginate_param(catalog, page, total, None) } -pub fn paginate_param(catalog: &Catalog, page: i32, total: i32, param: Option) -> Html { +pub fn paginate_param( + catalog: &Catalog, + page: i32, + total: i32, + param: Option, +) -> Html { let mut res = String::new(); - let param = param.map(|mut p| {p.push('&'); p}).unwrap_or_default(); + let param = param + .map(|mut p| { + p.push('&'); + p + }) + .unwrap_or_default(); res.push_str(r#""); Html(res) } pub fn encode_query_param(param: &str) -> String { - param.chars().map(|c| match c { - '+' => Ok("%2B"), - ' ' => Err('+'), - c => Err(c), - }).fold(String::new(), |mut s,r| { - match r { - Ok(r) => s.push_str(r), - Err(r) => s.push(r), - }; - s - }) + param + .chars() + .map(|c| match c { + '+' => Ok("%2B"), + ' ' => Err('+'), + c => Err(c), + }) + .fold(String::new(), |mut s, r| { + match r { + Ok(r) => s.push_str(r), + Err(r) => s.push(r), + }; + s + }) } #[macro_export] macro_rules! icon { ($name:expr) => { - Html(concat!(r#"")) - } + Html(concat!( + r#"" + )) + }; } macro_rules! input { - ($catalog:expr, $name:tt ($kind:tt), $label:expr, $optional:expr, $details:expr, $form:expr, $err:expr, $props:expr) => { - { - use validator::ValidationErrorsKind; - use std::borrow::Cow; - let cat = $catalog; + ($catalog:expr, $name:tt ($kind:tt), $label:expr, $optional:expr, $details:expr, $form:expr, $err:expr, $props:expr) => {{ + use std::borrow::Cow; + use validator::ValidationErrorsKind; + let cat = $catalog; - Html(format!(r#" + Html(format!( + r#"