Plume/plume-models/src/users.rs

1837 lines
61 KiB
Rust
Raw Normal View History

2020-01-21 07:02:03 +01:00
use crate::{
2021-01-11 21:27:52 +01:00
ap_url, blocklisted_emails::BlocklistedEmail, blogs::Blog, db_conn::DbConn, follows::Follow,
instance::*, medias::Media, notifications::Notification, post_authors::PostAuthor, posts::Post,
safe_string::SafeString, schema::users, timeline::Timeline, Connection, Error, Result,
UserEvent::*, CONFIG, ITEMS_PER_PAGE, USER_CHAN,
2020-01-21 07:02:03 +01:00
};
2018-06-10 13:13:07 +02:00
use activitypub::{
activity::Delete,
actor::Person,
collection::OrderedCollection,
object::{Image, Tombstone},
Activity, CustomObject, Endpoint,
2018-05-16 20:20:44 +02:00
};
2022-02-12 17:43:01 +01:00
use activitystreams::{
2022-02-13 09:41:52 +01:00
activity::Delete as Delete07,
2022-05-02 05:58:01 +02:00
actor::{ApActor, AsApActor},
2022-02-13 09:41:52 +01:00
actor::{ApActor as ApActor07, Endpoints as Endpoints07, Person as Person07},
base::{AnyBase, Base},
collection::{OrderedCollection as OrderedCollection07, OrderedCollectionPage},
2022-02-13 09:41:52 +01:00
iri_string::types::IriString,
2022-02-13 18:38:13 +01:00
markers::Activity as Activity07,
2022-05-02 05:58:01 +02:00
object::{kind::ImageType, AsObject as _, Image as Image07, Tombstone as Tombstone07},
2022-02-13 09:41:52 +01:00
prelude::*,
2022-02-12 17:43:01 +01:00
};
use chrono::{NaiveDateTime, Utc};
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
use diesel::{self, BelongingToDsl, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl};
2020-10-04 12:18:54 +02:00
use ldap3::{LdapConn, Scope, SearchEntry};
2018-05-19 09:39:59 +02:00
use openssl::{
hash::MessageDigest,
pkey::{PKey, Private},
rsa::Rsa,
sign,
2018-05-19 09:39:59 +02:00
};
2020-01-21 07:02:03 +01:00
use plume_common::{
activity_pub::{
2022-05-02 10:43:03 +02:00
inbox::{AsActor, AsObject, FromId},
request::get,
2021-11-24 13:41:44 +01:00
sign::{gen_keypair, Error as SignError, Result as SignResult, Signer},
2022-02-13 09:41:52 +01:00
ActivityStream, ApSignature, ApSignature07, CustomPerson as CustomPerson07, Id, IntoId,
PublicKey, PublicKey07, ToAsString, ToAsUri, PUBLIC_VISIBILITY,
2020-01-21 07:02:03 +01:00
},
utils,
};
use riker::actors::{Publish, Tell};
2018-05-19 09:39:59 +02:00
use rocket::{
outcome::IntoOutcome,
2018-05-19 09:39:59 +02:00
request::{self, FromRequest, Request},
};
2019-03-20 17:56:17 +01:00
use std::{
cmp::PartialEq,
hash::{Hash, Hasher},
2021-01-31 14:55:28 +01:00
sync::Arc,
2019-03-20 17:56:17 +01:00
};
2018-06-18 23:50:40 +02:00
use webfinger::*;
2018-04-24 11:21:39 +02:00
pub type CustomPerson = CustomObject<ApSignature, Person>;
pub enum Role {
Admin = 0,
Moderator = 1,
Normal = 2,
2021-11-22 15:05:28 +01:00
Instance = 3,
}
#[derive(Queryable, Identifiable, Clone, Debug, AsChangeset)]
#[changeset_options(treat_none_as_null = "true")]
2018-04-22 20:13:12 +02:00
pub struct User {
pub id: i32,
pub username: String,
pub display_name: String,
pub outbox_url: String,
pub inbox_url: String,
pub summary: String,
2018-04-22 20:13:12 +02:00
pub email: Option<String>,
pub hashed_password: Option<String>,
2018-04-30 19:46:27 +02:00
pub instance_id: i32,
2018-05-01 20:02:29 +02:00
pub creation_date: NaiveDateTime,
pub ap_url: String,
pub private_key: Option<String>,
pub public_key: String,
2018-07-27 12:53:21 +02:00
pub shared_inbox_url: Option<String>,
pub followers_endpoint: String,
pub avatar_id: Option<i32>,
pub last_fetched_date: NaiveDateTime,
pub fqn: String,
pub summary_html: SafeString,
/// 0 = admin
/// 1 = moderator
2021-11-22 15:05:28 +01:00
/// 3 = local instance
/// anything else = normal user
pub role: i32,
pub preferred_theme: Option<String>,
pub hide_custom_css: bool,
2018-04-22 20:13:12 +02:00
}
#[derive(Default, Insertable)]
2018-04-22 20:13:12 +02:00
#[table_name = "users"]
pub struct NewUser {
pub username: String,
pub display_name: String,
pub outbox_url: String,
pub inbox_url: String,
pub summary: String,
2018-04-22 20:13:12 +02:00
pub email: Option<String>,
pub hashed_password: Option<String>,
2018-05-01 20:02:29 +02:00
pub instance_id: i32,
pub ap_url: String,
pub private_key: Option<String>,
pub public_key: String,
2018-07-27 12:53:21 +02:00
pub shared_inbox_url: Option<String>,
pub followers_endpoint: String,
pub avatar_id: Option<i32>,
pub summary_html: SafeString,
pub role: i32,
pub fqn: String,
2018-04-22 20:13:12 +02:00
}
pub const AUTH_COOKIE: &str = "user_id";
const USER_PREFIX: &str = "@";
2018-04-22 20:13:12 +02:00
impl User {
insert!(users, NewUser);
get!(users);
find_by!(users, find_by_email, email as &str);
find_by!(users, find_by_name, username as &str, instance_id as i32);
find_by!(users, find_by_ap_url, ap_url as &str);
pub fn is_moderator(&self) -> bool {
self.role == Role::Admin as i32 || self.role == Role::Moderator as i32
}
pub fn is_admin(&self) -> bool {
self.role == Role::Admin as i32
}
pub fn one_by_instance(conn: &Connection) -> Result<Vec<User>> {
users::table
.filter(users::instance_id.eq_any(users::table.select(users::instance_id).distinct()))
2018-09-27 23:06:40 +02:00
.load::<User>(conn)
.map_err(Error::from)
}
pub fn delete(&self, conn: &Connection) -> Result<()> {
2020-01-21 07:02:03 +01:00
use crate::schema::post_authors;
2018-09-29 18:34:43 +02:00
for blog in Blog::find_for_author(conn, self)?
.iter()
2019-03-20 17:56:17 +01:00
.filter(|b| b.count_authors(conn).map(|c| c <= 1).unwrap_or(false))
{
blog.delete(conn)?;
}
2018-09-29 18:34:43 +02:00
// delete the posts if they is the only author
let all_their_posts_ids: Vec<i32> = post_authors::table
.filter(post_authors::author_id.eq(self.id))
.select(post_authors::post_id)
.load(conn)?;
2018-09-29 18:34:43 +02:00
for post_id in all_their_posts_ids {
// disabling this lint, because otherwise we'd have to turn it on
// the head, and make it even harder to follow!
#[allow(clippy::op_ref)]
2018-09-29 18:34:43 +02:00
let has_other_authors = post_authors::table
.filter(post_authors::post_id.eq(post_id))
.filter(post_authors::author_id.ne(self.id))
.count()
.load(conn)?
.first()
2019-03-20 17:56:17 +01:00
.unwrap_or(&0)
> &0;
2018-09-29 18:34:43 +02:00
if !has_other_authors {
Post::get(conn, post_id)?.delete(conn)?;
2018-09-29 18:34:43 +02:00
}
}
for notif in Notification::find_followed_by(conn, self)? {
notif.delete(conn)?
}
diesel::delete(self)
.execute(conn)
.map(|_| ())
.map_err(Error::from)
2018-09-09 12:25:55 +02:00
}
pub fn get_instance(&self, conn: &Connection) -> Result<Instance> {
Instance::get(conn, self.instance_id)
}
pub fn set_role(&self, conn: &Connection, new_role: Role) -> Result<()> {
diesel::update(self)
.set(users::role.eq(new_role as i32))
.execute(conn)
.map(|_| ())
.map_err(Error::from)
}
pub fn count_local(conn: &Connection) -> Result<i64> {
users::table
.filter(users::instance_id.eq(Instance::get_local()?.id))
.count()
.get_result(&*conn)
.map_err(Error::from)
2018-06-10 21:33:42 +02:00
}
pub fn find_by_fqn(conn: &DbConn, fqn: &str) -> Result<User> {
let from_db = users::table
.filter(users::fqn.eq(fqn))
.first(&**conn)
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
.optional()?;
if let Some(from_db) = from_db {
Ok(from_db)
} else {
User::fetch_from_webfinger(conn, fqn)
2018-05-01 13:48:19 +02:00
}
}
2022-01-04 19:15:51 +01:00
/**
* TODO: Should create user record with normalized(lowercased) email
*/
pub fn email_used(conn: &DbConn, email: &str) -> Result<bool> {
use diesel::dsl::{exists, select};
select(exists(
users::table
.filter(users::instance_id.eq(Instance::get_local()?.id))
.filter(users::email.eq(email))
.or_filter(users::email.eq(email.to_ascii_lowercase())),
))
.get_result(&**conn)
.map_err(Error::from)
}
fn fetch_from_webfinger(conn: &DbConn, acct: &str) -> Result<User> {
let link = resolve(acct.to_owned(), true)?
.links
.into_iter()
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
.ok_or(Error::Webfinger)?;
2022-05-02 12:24:36 +02:00
User::from_id(
2021-11-27 23:53:13 +01:00
conn,
link.href.as_ref().ok_or(Error::Webfinger)?,
None,
CONFIG.proxy(),
)
.map_err(|(_, e)| e)
2018-05-01 13:48:19 +02:00
}
pub fn fetch_remote_interact_uri(acct: &str) -> Result<String> {
resolve(acct.to_owned(), true)?
.links
.into_iter()
.find(|l| l.rel == "http://ostatus.org/schema/1.0/subscribe")
.and_then(|l| l.template)
.ok_or(Error::Webfinger)
}
2022-05-02 05:58:01 +02:00
fn fetch(url: &str) -> Result<CustomPerson07> {
let mut res = get(url, Self::get_sender07(), CONFIG.proxy().cloned())?;
let text = &res.text()?;
// without this workaround, publicKey is not correctly deserialized
2022-05-02 05:58:01 +02:00
let ap_sign = serde_json::from_str::<ApSignature07>(text)?;
let person = serde_json::from_str::<Person07>(text)?;
let json = CustomPerson07::new(
ApActor::new(
person
.clone()
.id_unchecked()
.ok_or(Error::MissingApProperty)?
.to_owned(),
person,
),
ap_sign,
); // FIXME: Don't clone()
Ok(json)
2018-05-01 20:02:29 +02:00
}
pub fn fetch_from_url(conn: &DbConn, url: &str) -> Result<User> {
2022-05-02 05:58:01 +02:00
User::fetch(url).and_then(|json| User::from_activity07(conn, json))
2018-05-01 13:48:19 +02:00
}
pub fn refetch(&self, conn: &Connection) -> Result<()> {
User::fetch(&self.ap_url.clone()).and_then(|json| {
let avatar = Media::save_remote(
conn,
2022-05-02 05:58:01 +02:00
json.ap_actor_ref()
.icon()
.ok_or(Error::MissingApProperty)? // FIXME: Fails when icon is not set
.iter()
.next()
.and_then(|i| {
i.clone()
.extend::<Image07, ImageType>() // FIXME: Don't clone()
.ok()?
.and_then(|url| Some(url.id_unchecked()?.to_string()))
})
.ok_or(Error::MissingApProperty)?,
2021-11-27 23:53:13 +01:00
self,
2019-03-20 17:56:17 +01:00
)
.ok();
2022-05-02 05:58:01 +02:00
let pub_key = &json.ext_one.public_key.public_key_pem;
diesel::update(self)
.set((
2022-05-02 05:58:01 +02:00
users::username.eq(json
.ap_actor_ref()
.preferred_username()
.ok_or(Error::MissingApProperty)?),
users::display_name.eq(json
.ap_actor_ref()
.name()
.ok_or(Error::MissingApProperty)?
.to_as_string()
.ok_or(Error::MissingApProperty)?),
users::outbox_url.eq(json
.ap_actor_ref()
.outbox()?
.ok_or(Error::MissingApProperty)?
.as_str()),
users::inbox_url.eq(json.ap_actor_ref().inbox()?.as_str()),
users::summary.eq(SafeString::new(
&json
2022-05-02 05:58:01 +02:00
.ap_actor_ref()
.summary()
.and_then(|summary| summary.to_as_string())
.unwrap_or_default(),
)),
2022-05-02 05:58:01 +02:00
users::followers_endpoint.eq(json
.ap_actor_ref()
.followers()?
.ok_or(Error::MissingApProperty)?
.as_str()),
2018-12-02 19:07:36 +01:00
users::avatar_id.eq(avatar.map(|a| a.id)),
users::last_fetched_date.eq(Utc::now().naive_utc()),
2022-05-02 05:58:01 +02:00
users::public_key.eq(pub_key),
))
.execute(conn)
.map(|_| ())
2021-12-05 12:24:11 +01:00
.map_err(Error::from)
})
}
pub fn hash_pass(pass: &str) -> Result<String> {
bcrypt::hash(pass, 10).map_err(Error::from)
2018-04-23 11:52:44 +02:00
}
2020-10-04 12:18:22 +02:00
fn ldap_register(conn: &Connection, name: &str, password: &str) -> Result<User> {
2020-10-07 23:39:38 +02:00
if CONFIG.ldap.is_none() {
return Err(Error::NotFound);
}
let ldap = CONFIG.ldap.as_ref().unwrap();
let mut ldap_conn = LdapConn::new(&ldap.addr).map_err(|_| Error::NotFound)?;
let ldap_name = format!("{}={},{}", ldap.user_name_attr, name, ldap.base_dn);
let bind = ldap_conn
.simple_bind(&ldap_name, password)
.map_err(|_| Error::NotFound)?;
if bind.success().is_err() {
return Err(Error::NotFound);
}
let search = ldap_conn
.search(
&ldap_name,
Scope::Base,
"(|(objectClass=person)(objectClass=user))",
vec![&ldap.mail_attr],
)
.map_err(|_| Error::NotFound)?
.success()
.map_err(|_| Error::NotFound)?;
for entry in search.0 {
let entry = SearchEntry::construct(entry);
let email = entry.attrs.get("mail").and_then(|vec| vec.first());
2020-10-08 20:24:03 +02:00
if let Some(email) = email {
2020-10-04 12:18:22 +02:00
let _ = ldap_conn.unbind();
2020-10-07 23:39:38 +02:00
return NewUser::new_local(
conn,
name.to_owned(),
name.to_owned(),
Role::Normal,
"",
2020-10-08 20:24:03 +02:00
email.to_owned(),
2020-10-07 23:39:38 +02:00
None,
);
2020-10-04 12:18:22 +02:00
}
}
2020-10-07 23:39:38 +02:00
let _ = ldap_conn.unbind();
Err(Error::NotFound)
}
2020-10-04 12:18:22 +02:00
fn ldap_login(&self, password: &str) -> bool {
if let Some(ldap) = CONFIG.ldap.as_ref() {
let mut conn = if let Ok(conn) = LdapConn::new(&ldap.addr) {
conn
} else {
return false;
};
2020-10-04 12:18:54 +02:00
let name = format!(
"{}={},{}",
ldap.user_name_attr, &self.username, ldap.base_dn
);
2020-10-04 12:18:22 +02:00
if let Ok(bind) = conn.simple_bind(&name, password) {
bind.success().is_ok()
} else {
false
}
} else {
false
}
}
pub fn login(conn: &Connection, ident: &str, password: &str) -> Result<User> {
let local_id = Instance::get_local()?.id;
2020-10-07 23:39:38 +02:00
let user = match User::find_by_email(conn, ident) {
Ok(user) => Ok(user),
2020-10-07 23:40:27 +02:00
_ => User::find_by_name(conn, ident, local_id),
}
.and_then(|u| {
if u.instance_id == local_id {
Ok(u)
} else {
Err(Error::NotFound)
}
});
match user {
Ok(user) if user.hashed_password.is_some() => {
2020-10-04 12:18:54 +02:00
if bcrypt::verify(password, user.hashed_password.as_ref().unwrap()).unwrap_or(false)
{
Ok(user)
} else {
Err(Error::NotFound)
}
2020-10-04 12:18:54 +02:00
}
Ok(user) => {
2020-10-04 12:18:22 +02:00
if user.ldap_login(password) {
Ok(user)
} else {
Err(Error::NotFound)
}
2020-10-04 12:18:54 +02:00
}
e => {
2020-10-04 12:18:22 +02:00
if let Ok(user) = User::ldap_register(conn, ident, password) {
return Ok(user);
}
2020-10-07 23:39:38 +02:00
// if no user was found, and we were unable to auto-register from ldap
// fake-verify a password, and return an error.
2020-10-04 12:18:54 +02:00
let other = User::get(&*conn, 1)
.expect("No user is registered")
.hashed_password;
other.map(|pass| bcrypt::verify(password, &pass));
e
2020-10-04 12:18:54 +02:00
}
}
2018-04-23 11:52:44 +02:00
}
pub fn reset_password(&self, conn: &Connection, pass: &str) -> Result<()> {
diesel::update(self)
.set(users::hashed_password.eq(User::hash_pass(pass)?))
.execute(conn)?;
Ok(())
}
pub fn get_local_page(conn: &Connection, (min, max): (i32, i32)) -> Result<Vec<User>> {
users::table
.filter(users::instance_id.eq(Instance::get_local()?.id))
2018-09-09 12:25:55 +02:00
.order(users::username.asc())
.offset(min.into())
.limit((max - min).into())
.load::<User>(conn)
.map_err(Error::from)
2018-09-09 12:25:55 +02:00
}
pub fn outbox(&self, conn: &Connection) -> Result<ActivityStream<OrderedCollection>> {
Ok(ActivityStream::new(self.outbox_collection(conn)?))
}
2022-02-12 18:22:48 +01:00
pub fn outbox07(&self, conn: &Connection) -> Result<ActivityStream<OrderedCollection07>> {
Ok(ActivityStream::new(self.outbox_collection07(conn)?))
}
pub fn outbox_collection(&self, conn: &Connection) -> Result<OrderedCollection> {
2018-05-19 00:04:30 +02:00
let mut coll = OrderedCollection::default();
let first = &format!("{}?page=1", &self.outbox_url);
let last = &format!(
"{}?page={}",
&self.outbox_url,
2021-11-27 23:53:13 +01:00
self.get_activities_count(conn) / i64::from(ITEMS_PER_PAGE) + 1
);
coll.collection_props.set_first_link(Id::new(first))?;
coll.collection_props.set_last_link(Id::new(last))?;
coll.collection_props
2021-11-27 23:53:13 +01:00
.set_total_items_u64(self.get_activities_count(conn) as u64)?;
Ok(coll)
}
2022-02-12 18:09:28 +01:00
pub fn outbox_collection07(&self, conn: &Connection) -> Result<OrderedCollection07> {
let mut coll = OrderedCollection07::new();
let first = &format!("{}?page=1", &self.outbox_url);
let last = &format!(
"{}?page={}",
&self.outbox_url,
self.get_activities_count(conn) / i64::from(ITEMS_PER_PAGE) + 1
);
coll.set_first(first.parse::<IriString>()?);
coll.set_last(last.parse::<IriString>()?);
coll.set_total_items(self.get_activities_count(conn) as u64);
Ok(coll)
}
2022-02-13 11:09:13 +01:00
pub fn outbox_page07(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<ActivityStream<OrderedCollectionPage>> {
2022-02-13 11:09:13 +01:00
Ok(ActivityStream::new(
self.outbox_collection_page07(conn, (min, max))?,
))
}
pub fn outbox_collection_page07(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<OrderedCollectionPage> {
let acts = self.get_activities_page(conn, (min, max))?;
let n_acts = self.get_activities_count(conn);
let mut coll = OrderedCollectionPage::new();
if n_acts - i64::from(min) >= i64::from(ITEMS_PER_PAGE) {
coll.set_next(
format!("{}?page={}", &self.outbox_url, min / ITEMS_PER_PAGE + 2)
.parse::<IriString>()?,
);
}
if min > 0 {
coll.set_prev(
format!("{}?page={}", &self.outbox_url, min / ITEMS_PER_PAGE)
.parse::<IriString>()?,
);
}
coll.set_many_items(
acts.iter()
.filter_map(|value| AnyBase::from_arbitrary_json(value).ok()),
);
coll.set_part_of(self.outbox_url.parse::<IriString>()?);
Ok(coll)
}
fn fetch_outbox_page<T: Activity>(&self, url: &str) -> Result<(Vec<T>, Option<String>)> {
2022-05-02 05:58:01 +02:00
let mut res = get(url, Self::get_sender07(), CONFIG.proxy().cloned())?;
let text = &res.text()?;
2019-03-20 17:56:17 +01:00
let json: serde_json::Value = serde_json::from_str(text)?;
let items = json["items"]
.as_array()
.unwrap_or(&vec![])
.iter()
.filter_map(|j| serde_json::from_value(j.clone()).ok())
.collect::<Vec<T>>();
2021-03-27 19:46:37 +01:00
let next = json.get("next").map(|x| x.as_str().unwrap().to_owned());
Ok((items, next))
}
2022-02-13 18:38:13 +01:00
pub fn fetch_outbox_page07<T: Activity07 + serde::de::DeserializeOwned>(
&self,
url: &str,
) -> Result<(Vec<T>, Option<String>)> {
2022-05-02 05:58:01 +02:00
let mut res = get(url, Self::get_sender07(), CONFIG.proxy().cloned())?;
2022-02-13 18:38:13 +01:00
let text = &res.text()?;
let json: serde_json::Value = serde_json::from_str(text)?;
let items = json["items"]
.as_array()
.unwrap_or(&vec![])
.iter()
.filter_map(|j| serde_json::from_value(j.clone()).ok())
.collect::<Vec<T>>();
let next = json.get("next").map(|x| x.as_str().unwrap().to_owned());
Ok((items, next))
}
pub fn fetch_outbox<T: Activity>(&self) -> Result<Vec<T>> {
let mut res = get(
&self.outbox_url[..],
2022-05-02 05:58:01 +02:00
Self::get_sender07(),
CONFIG.proxy().cloned(),
)?;
let text = &res.text()?;
let json: serde_json::Value = serde_json::from_str(text)?;
if let Some(first) = json.get("first") {
let mut items: Vec<T> = Vec::new();
let mut next = first.as_str().unwrap().to_owned();
while let Ok((mut page, nxt)) = self.fetch_outbox_page(&next) {
if page.is_empty() {
break;
}
2021-11-27 23:53:13 +01:00
items.append(&mut page);
if let Some(n) = nxt {
if n == next {
break;
}
next = n;
} else {
break;
}
}
Ok(items)
} else {
Ok(json["items"]
.as_array()
.unwrap_or(&vec![])
.iter()
.filter_map(|j| serde_json::from_value(j.clone()).ok())
.collect::<Vec<T>>())
}
2018-07-26 22:23:53 +02:00
}
2022-02-13 18:38:22 +01:00
pub fn fetch_outbox07<T: Activity07 + serde::de::DeserializeOwned>(&self) -> Result<Vec<T>> {
let mut res = get(
&self.outbox_url[..],
2022-05-02 05:58:01 +02:00
Self::get_sender07(),
2022-02-13 18:38:22 +01:00
CONFIG.proxy().cloned(),
)?;
let text = &res.text()?;
let json: serde_json::Value = serde_json::from_str(text)?;
if let Some(first) = json.get("first") {
let mut items: Vec<T> = Vec::new();
let mut next = first.as_str().unwrap().to_owned();
while let Ok((mut page, nxt)) = self.fetch_outbox_page07(&next) {
if page.is_empty() {
break;
}
items.append(&mut page);
if let Some(n) = nxt {
if n == next {
break;
}
next = n;
} else {
break;
}
}
Ok(items)
} else {
Ok(json["items"]
.as_array()
.unwrap_or(&vec![])
.iter()
.filter_map(|j| serde_json::from_value(j.clone()).ok())
.collect::<Vec<T>>())
}
}
pub fn fetch_followers_ids(&self) -> Result<Vec<String>> {
let mut res = get(
&self.followers_endpoint[..],
2022-05-02 05:58:01 +02:00
Self::get_sender07(),
CONFIG.proxy().cloned(),
)?;
let text = &res.text()?;
let json: serde_json::Value = serde_json::from_str(text)?;
Ok(json["items"]
.as_array()
.unwrap_or(&vec![])
.iter()
.filter_map(|j| serde_json::from_value(j.clone()).ok())
.collect::<Vec<String>>())
2018-07-27 12:53:21 +02:00
}
fn get_activities_count(&self, conn: &Connection) -> i64 {
2020-01-21 07:02:03 +01:00
use crate::schema::post_authors;
use crate::schema::posts;
let posts_by_self = PostAuthor::belonging_to(self).select(post_authors::post_id);
posts::table
.filter(posts::published.eq(true))
.filter(posts::id.eq_any(posts_by_self))
.count()
.first(conn)
.unwrap()
}
fn get_activities_page(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<Vec<serde_json::Value>> {
2020-01-21 07:02:03 +01:00
use crate::schema::post_authors;
use crate::schema::posts;
2018-04-29 22:23:44 +02:00
let posts_by_self = PostAuthor::belonging_to(self).select(post_authors::post_id);
let posts = posts::table
.filter(posts::published.eq(true))
2018-09-27 23:06:40 +02:00
.filter(posts::id.eq_any(posts_by_self))
.order(posts::creation_date.desc())
.offset(min.into())
.limit((max - min).into())
.load::<Post>(conn)?;
Ok(posts
.into_iter()
.filter_map(|p| {
p.create_activity07(conn)
2019-03-20 17:56:17 +01:00
.ok()
.and_then(|a| serde_json::to_value(a).ok())
})
.collect::<Vec<serde_json::Value>>())
2018-04-29 20:01:42 +02:00
}
pub fn get_followers(&self, conn: &Connection) -> Result<Vec<User>> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
let follows = Follow::belonging_to(self).select(follows::follower_id);
users::table
.filter(users::id.eq_any(follows))
.load::<User>(conn)
.map_err(Error::from)
}
pub fn count_followers(&self, conn: &Connection) -> Result<i64> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
let follows = Follow::belonging_to(self).select(follows::follower_id);
users::table
.filter(users::id.eq_any(follows))
.count()
.get_result(conn)
.map_err(Error::from)
}
2019-03-20 17:56:17 +01:00
pub fn get_followers_page(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<Vec<User>> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
2018-07-25 15:50:29 +02:00
let follows = Follow::belonging_to(self).select(follows::follower_id);
users::table
.filter(users::id.eq_any(follows))
2018-07-25 15:50:29 +02:00
.offset(min.into())
.limit((max - min).into())
.load::<User>(conn)
.map_err(Error::from)
2018-07-25 15:50:29 +02:00
}
pub fn get_followed(&self, conn: &Connection) -> Result<Vec<User>> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows::dsl::*;
2018-09-27 23:06:40 +02:00
let f = follows.filter(follower_id.eq(self.id)).select(following_id);
users::table
.filter(users::id.eq_any(f))
.load::<User>(conn)
.map_err(Error::from)
}
pub fn count_followed(&self, conn: &Connection) -> Result<i64> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
follows::table
.filter(follows::follower_id.eq(self.id))
.count()
.get_result(conn)
.map_err(Error::from)
}
2019-03-20 17:56:17 +01:00
pub fn get_followed_page(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<Vec<User>> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
let follows = follows::table
.filter(follows::follower_id.eq(self.id))
.select(follows::following_id)
.limit((max - min).into());
users::table
.filter(users::id.eq_any(follows))
.offset(min.into())
.load::<User>(conn)
.map_err(Error::from)
}
pub fn is_followed_by(&self, conn: &Connection, other_id: i32) -> Result<bool> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
follows::table
2018-06-13 20:06:14 +02:00
.filter(follows::follower_id.eq(other_id))
.filter(follows::following_id.eq(self.id))
.count()
.get_result::<i64>(conn)
.map_err(Error::from)
.map(|r| r > 0)
2018-06-13 20:06:14 +02:00
}
pub fn is_following(&self, conn: &Connection, other_id: i32) -> Result<bool> {
2020-01-21 07:02:03 +01:00
use crate::schema::follows;
follows::table
.filter(follows::follower_id.eq(self.id))
.filter(follows::following_id.eq(other_id))
.count()
.get_result::<i64>(conn)
.map_err(Error::from)
.map(|r| r > 0)
}
pub fn has_liked(&self, conn: &Connection, post: &Post) -> Result<bool> {
2020-01-21 07:02:03 +01:00
use crate::schema::likes;
likes::table
2018-05-12 22:56:57 +02:00
.filter(likes::post_id.eq(post.id))
.filter(likes::user_id.eq(self.id))
.count()
.get_result::<i64>(conn)
.map_err(Error::from)
.map(|r| r > 0)
2018-05-12 22:56:57 +02:00
}
pub fn has_reshared(&self, conn: &Connection, post: &Post) -> Result<bool> {
2020-01-21 07:02:03 +01:00
use crate::schema::reshares;
reshares::table
2018-05-19 11:51:10 +02:00
.filter(reshares::post_id.eq(post.id))
.filter(reshares::user_id.eq(self.id))
.count()
.get_result::<i64>(conn)
.map_err(Error::from)
.map(|r| r > 0)
2018-05-19 11:51:10 +02:00
}
pub fn is_author_in(&self, conn: &Connection, blog: &Blog) -> Result<bool> {
2020-01-21 07:02:03 +01:00
use crate::schema::blog_authors;
blog_authors::table
.filter(blog_authors::author_id.eq(self.id))
2018-06-10 20:16:25 +02:00
.filter(blog_authors::blog_id.eq(blog.id))
.count()
.get_result::<i64>(conn)
.map_err(Error::from)
.map(|r| r > 0)
2018-06-10 20:16:25 +02:00
}
pub fn get_keypair(&self) -> Result<PKey<Private>> {
2019-03-20 17:56:17 +01:00
PKey::from_rsa(Rsa::private_key_from_pem(
2021-11-27 23:53:13 +01:00
self.private_key.clone().ok_or(Error::Signature)?.as_ref(),
2019-03-20 17:56:17 +01:00
)?)
.map_err(Error::from)
}
2018-05-19 00:04:30 +02:00
pub fn rotate_keypair(&self, conn: &Connection) -> Result<PKey<Private>> {
if self.private_key.is_none() {
2019-03-20 17:56:17 +01:00
return Err(Error::InvalidValue);
}
if (Utc::now().naive_utc() - self.last_fetched_date).num_minutes() < 10 {
//rotated recently
self.get_keypair()
} else {
let (public_key, private_key) = gen_keypair();
2019-03-20 17:56:17 +01:00
let public_key =
String::from_utf8(public_key).expect("NewUser::new_local: public key error");
let private_key =
String::from_utf8(private_key).expect("NewUser::new_local: private key error");
let res = PKey::from_rsa(Rsa::private_key_from_pem(private_key.as_ref())?)?;
diesel::update(self)
2019-03-20 17:56:17 +01:00
.set((
users::public_key.eq(public_key),
users::private_key.eq(Some(private_key)),
2019-03-20 17:56:17 +01:00
users::last_fetched_date.eq(Utc::now().naive_utc()),
))
.execute(conn)
.map_err(Error::from)
.map(|_| res)
}
}
pub fn to_activity(&self, conn: &Connection) -> Result<CustomPerson> {
2018-05-19 00:04:30 +02:00
let mut actor = Person::default();
2019-03-20 17:56:17 +01:00
actor.object_props.set_id_string(self.ap_url.clone())?;
actor
.object_props
.set_name_string(self.display_name.clone())?;
actor
.object_props
.set_summary_string(self.summary_html.get().clone())?;
2019-03-20 17:56:17 +01:00
actor.object_props.set_url_string(self.ap_url.clone())?;
actor
.ap_actor_props
.set_inbox_string(self.inbox_url.clone())?;
actor
.ap_actor_props
.set_outbox_string(self.outbox_url.clone())?;
actor
.ap_actor_props
.set_preferred_username_string(self.username.clone())?;
actor
.ap_actor_props
.set_followers_string(self.followers_endpoint.clone())?;
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
if let Some(shared_inbox_url) = self.shared_inbox_url.clone() {
let mut endpoints = Endpoint::default();
endpoints.set_shared_inbox_string(shared_inbox_url)?;
actor.ap_actor_props.set_endpoints_endpoint(endpoints)?;
}
let mut public_key = PublicKey::default();
2019-03-20 17:56:17 +01:00
public_key.set_id_string(format!("{}#main-key", self.ap_url))?;
public_key.set_owner_string(self.ap_url.clone())?;
public_key.set_public_key_pem_string(self.public_key.clone())?;
let mut ap_signature = ApSignature::default();
2019-03-20 17:56:17 +01:00
ap_signature.set_public_key_publickey(public_key)?;
if let Some(avatar_id) = self.avatar_id {
let mut avatar = Image::default();
avatar
.object_props
.set_url_string(Media::get(conn, avatar_id)?.url()?)?;
actor.object_props.set_icon_object(avatar)?;
}
2018-09-03 14:48:34 +02:00
Ok(CustomPerson::new(actor, ap_signature))
2018-05-19 00:04:30 +02:00
}
2022-02-13 09:41:52 +01:00
pub fn to_activity07(&self, conn: &Connection) -> Result<CustomPerson07> {
let mut actor = ApActor07::new(self.inbox_url.parse()?, Person07::new());
let ap_url = self.ap_url.parse::<IriString>()?;
actor.set_id(ap_url.clone());
actor.set_name(self.display_name.clone());
actor.set_summary(self.summary_html.get().clone());
actor.set_url(ap_url.clone());
actor.set_inbox(self.inbox_url.parse()?);
actor.set_outbox(self.outbox_url.parse()?);
actor.set_preferred_username(self.username.clone());
actor.set_followers(self.followers_endpoint.parse()?);
if let Some(shared_inbox_url) = self.shared_inbox_url.clone() {
let endpoints = Endpoints07 {
shared_inbox: Some(shared_inbox_url.parse::<IriString>()?),
..Endpoints07::default()
};
actor.set_endpoints(endpoints);
}
let pub_key = PublicKey07 {
id: format!("{}#main-key", self.ap_url).parse()?,
owner: ap_url,
public_key_pem: self.public_key.clone(),
};
let ap_signature = ApSignature07 {
public_key: pub_key,
};
if let Some(avatar_id) = self.avatar_id {
let mut avatar = Image07::new();
avatar.set_url(Media::get(conn, avatar_id)?.url()?.parse::<IriString>()?);
actor.set_icon(avatar.into_any_base()?);
}
Ok(CustomPerson07::new(actor, ap_signature))
}
pub fn delete_activity(&self, conn: &Connection) -> Result<Delete> {
let mut del = Delete::default();
let mut tombstone = Tombstone::default();
tombstone.object_props.set_id_string(self.ap_url.clone())?;
del.delete_props
.set_actor_link(Id::new(self.ap_url.clone()))?;
del.delete_props.set_object_object(tombstone)?;
del.object_props
.set_id_string(format!("{}#delete", self.ap_url))?;
del.object_props
.set_to_link_vec(vec![Id::new(PUBLIC_VISIBILITY)])?;
del.object_props.set_cc_link_vec(
self.get_followers(conn)?
.into_iter()
.map(|f| Id::new(f.ap_url))
.collect(),
)?;
Ok(del)
}
2022-02-13 10:43:14 +01:00
pub fn delete_activity07(&self, conn: &Connection) -> Result<Delete07> {
let mut tombstone = Tombstone07::new();
tombstone.set_id(self.ap_url.parse()?);
let mut del = Delete07::new(
self.ap_url.parse::<IriString>()?,
Base::retract(tombstone)?.into_generic()?,
2022-02-13 10:43:14 +01:00
);
del.set_id(format!("{}#delete", self.ap_url).parse()?);
del.set_many_tos(vec![PUBLIC_VISIBILITY.parse::<IriString>()?]);
del.set_many_ccs(
self.get_followers(conn)?
.into_iter()
.filter_map(|f| f.ap_url.parse::<IriString>().ok()),
);
Ok(del)
}
pub fn avatar_url(&self, conn: &Connection) -> String {
2019-03-20 17:56:17 +01:00
self.avatar_id
.and_then(|id| Media::get(conn, id).and_then(|m| m.url()).ok())
.unwrap_or_else(|| "/static/images/default-avatar.png".to_string())
}
2018-06-18 23:50:40 +02:00
pub fn webfinger(&self, conn: &Connection) -> Result<Webfinger> {
Ok(Webfinger {
2021-02-14 14:14:44 +01:00
subject: format!("acct:{}", self.acct_authority(conn)?),
aliases: vec![self.ap_url.clone()],
2018-06-18 23:50:40 +02:00
links: vec![
Link {
rel: String::from("http://webfinger.net/rel/profile-page"),
mime_type: Some(String::from("text/html")),
href: Some(self.ap_url.clone()),
template: None,
2018-06-18 23:50:40 +02:00
},
Link {
rel: String::from("http://schemas.google.com/g/2010#updates-from"),
mime_type: Some(String::from("application/atom+xml")),
href: Some(self.get_instance(conn)?.compute_box(
USER_PREFIX,
&self.username,
"feed.atom",
)),
template: None,
2018-06-18 23:50:40 +02:00
},
Link {
rel: String::from("self"),
mime_type: Some(String::from("application/activity+json")),
href: Some(self.ap_url.clone()),
template: None,
},
Link {
rel: String::from("http://ostatus.org/schema/1.0/subscribe"),
mime_type: None,
href: None,
template: Some(format!(
"https://{}/remote_interact?target={{uri}}",
self.get_instance(conn)?.public_domain
)),
},
],
})
2018-06-18 23:50:40 +02:00
}
2021-02-14 14:14:44 +01:00
pub fn acct_authority(&self, conn: &Connection) -> Result<String> {
Ok(format!(
"{}@{}",
self.username,
self.get_instance(conn)?.public_domain
))
}
pub fn set_avatar(&self, conn: &Connection, id: i32) -> Result<()> {
2018-09-03 14:04:17 +02:00
diesel::update(self)
.set(users::avatar_id.eq(id))
.execute(conn)
.map(|_| ())
.map_err(Error::from)
2018-09-03 14:04:17 +02:00
}
pub fn needs_update(&self) -> bool {
(Utc::now().naive_utc() - self.last_fetched_date).num_days() > 1
}
pub fn name(&self) -> String {
if !self.display_name.is_empty() {
self.display_name.clone()
} else {
self.fqn.clone()
}
}
pub fn remote_user_found(&self) {
tracing::trace!("{:?}", self);
self.publish_remote_user_found();
}
fn publish_remote_user_found(&self) {
USER_CHAN.tell(
Publish {
msg: RemoteUserFound(Arc::new(self.clone())),
topic: "user.remote_user_found".into(),
},
None,
)
}
2018-05-19 00:04:30 +02:00
}
2018-04-23 11:52:44 +02:00
impl<'a, 'r> FromRequest<'a, 'r> for User {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<User, ()> {
let conn = request.guard::<DbConn>()?;
request
.cookies()
2018-04-23 11:52:44 +02:00
.get_private(AUTH_COOKIE)
.and_then(|cookie| cookie.value().parse().ok())
.and_then(|id| User::get(&*conn, id).ok())
2018-04-23 11:52:44 +02:00
.or_forward(())
}
2018-04-22 20:13:12 +02:00
}
2018-05-18 10:04:40 +02:00
impl IntoId for User {
2018-05-19 00:04:30 +02:00
fn into_id(self) -> Id {
Id::new(self.ap_url)
2018-05-18 10:04:40 +02:00
}
}
impl Eq for User {}
2018-05-18 10:04:40 +02:00
2022-05-02 09:07:08 +02:00
impl FromId<DbConn> for User {
2022-02-11 06:49:52 +01:00
type Error = Error;
type Object = CustomPerson07;
fn from_db07(conn: &DbConn, id: &str) -> Result<Self> {
Self::find_by_ap_url(conn, id)
}
fn from_activity07(conn: &DbConn, acct: CustomPerson07) -> Result<Self> {
2022-02-12 14:32:19 +01:00
let actor = acct.ap_actor_ref();
let username = actor
.preferred_username()
.ok_or(Error::MissingApProperty)?
.to_string();
if username.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) {
return Err(Error::InvalidValue);
}
let summary = acct
.object_ref()
.summary()
.and_then(|prop| prop.to_as_string())
2022-02-12 14:32:19 +01:00
.unwrap_or_default();
let mut new_user = NewUser {
display_name: acct
.object_ref()
.name()
.and_then(|prop| prop.to_as_string())
.unwrap_or_else(|| username.clone()),
username: username.clone(),
2022-02-12 14:32:19 +01:00
outbox_url: actor.outbox()?.ok_or(Error::MissingApProperty)?.to_string(),
inbox_url: actor.inbox()?.to_string(),
role: 2,
summary_html: SafeString::new(&summary),
summary,
2022-02-12 14:32:19 +01:00
public_key: acct.ext_one.public_key.public_key_pem.to_string(),
shared_inbox_url: actor
.endpoints()?
.and_then(|e| e.shared_inbox.map(|inbox| inbox.to_string())),
followers_endpoint: actor
.followers()?
.ok_or(Error::MissingApProperty)?
.to_string(),
..NewUser::default()
};
let avatar_id = acct.object_ref().icon().and_then(|icon| icon.to_as_uri());
2022-02-12 14:32:19 +01:00
let (ap_url, inst) = {
let any_base = acct.into_any_base()?;
let id = any_base.id().ok_or(Error::MissingApProperty)?;
(
id.to_string(),
id.authority_components()
.ok_or(Error::Url)?
.host()
.to_string(),
)
};
new_user.ap_url = ap_url;
let instance = Instance::find_by_domain(conn, &inst).or_else(|_| {
Instance::insert(
conn,
NewInstance {
name: inst.to_owned(),
public_domain: inst.to_owned(),
local: false,
// We don't really care about all the following for remote instances
long_description: SafeString::new(""),
short_description: SafeString::new(""),
default_license: String::new(),
open_registrations: true,
short_description_html: String::new(),
long_description_html: String::new(),
},
)
})?;
new_user.instance_id = instance.id;
new_user.fqn = if instance.local {
username
2022-02-12 14:32:19 +01:00
} else {
format!("{}@{}", username, instance.public_domain)
};
let user = User::insert(conn, new_user)?;
if let Some(avatar_id) = avatar_id {
let avatar = Media::save_remote(conn, avatar_id, &user);
if let Ok(avatar) = avatar {
if let Err(e) = user.set_avatar(conn, avatar.id) {
tracing::error!("{:?}", e);
}
}
}
Ok(user)
2022-02-11 06:49:52 +01:00
}
fn get_sender07() -> &'static dyn Signer {
Instance::get_local_instance_user().expect("Failed to local instance user")
}
}
impl AsActor<&DbConn> for User {
2018-05-18 10:04:40 +02:00
fn get_inbox_url(&self) -> String {
self.inbox_url.clone()
}
fn get_shared_inbox_url(&self) -> Option<String> {
self.shared_inbox_url.clone()
2018-05-18 10:04:40 +02:00
}
fn is_local(&self) -> bool {
Instance::get_local()
.map(|i| self.instance_id == i.id)
.unwrap_or(false)
}
2018-05-18 10:04:40 +02:00
}
2022-05-02 10:43:03 +02:00
impl AsObject<User, Delete07, &DbConn> for User {
2022-02-12 17:43:01 +01:00
type Error = Error;
type Output = ();
fn activity07(self, conn: &DbConn, actor: User, _id: &str) -> Result<()> {
if self.id == actor.id {
self.delete(conn).map(|_| ())
} else {
Err(Error::Unauthorized)
}
}
}
impl Signer for User {
fn get_key_id(&self) -> String {
format!("{}#main-key", self.ap_url)
}
2021-11-24 13:41:44 +01:00
fn sign(&self, to_sign: &str) -> SignResult<Vec<u8>> {
let key = self.get_keypair().map_err(|_| SignError())?;
let mut signer = sign::Signer::new(MessageDigest::sha256(), &key)?;
2019-03-20 17:56:17 +01:00
signer.update(to_sign.as_bytes())?;
2021-11-24 13:41:44 +01:00
signer.sign_to_vec().map_err(SignError::from)
}
2021-11-24 13:41:44 +01:00
fn verify(&self, data: &str, signature: &[u8]) -> SignResult<bool> {
let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?;
let mut verifier = sign::Verifier::new(MessageDigest::sha256(), &key)?;
2019-03-20 17:56:17 +01:00
verifier.update(data.as_bytes())?;
2021-11-24 13:41:44 +01:00
verifier.verify(signature).map_err(SignError::from)
}
}
impl PartialEq for User {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Hash for User {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
impl NewUser {
/// Creates a new local user
pub fn new_local(
conn: &Connection,
username: String,
display_name: String,
role: Role,
summary: &str,
email: String,
2020-10-04 12:18:22 +02:00
password: Option<String>,
) -> Result<User> {
let (pub_key, priv_key) = gen_keypair();
let instance = Instance::get_local()?;
let blocklisted = BlocklistedEmail::matches_blocklist(conn, &email)?;
if let Some(x) = blocklisted {
return Err(Error::Blocklisted(x.notify_user, x.notification_text));
}
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
let res = User::insert(
conn,
NewUser {
username: username.clone(),
display_name,
role: role as i32,
summary: summary.to_owned(),
2021-11-27 23:53:13 +01:00
summary_html: SafeString::new(&utils::md_to_html(summary, None, false, None).0),
email: Some(email),
2020-10-04 12:18:22 +02:00
hashed_password: password,
instance_id: instance.id,
public_key: String::from_utf8(pub_key).or(Err(Error::Signature))?,
private_key: Some(String::from_utf8(priv_key).or(Err(Error::Signature))?),
outbox_url: instance.compute_box(USER_PREFIX, &username, "outbox"),
inbox_url: instance.compute_box(USER_PREFIX, &username, "inbox"),
ap_url: instance.compute_box(USER_PREFIX, &username, ""),
shared_inbox_url: Some(ap_url(&format!("{}/inbox", &instance.public_domain))),
followers_endpoint: instance.compute_box(USER_PREFIX, &username, "followers"),
fqn: username,
avatar_id: None,
},
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
)?;
// create default timeline
Timeline::new_for_user(conn, res.id, "My feed".into(), "followed".into())?;
Ok(res)
}
}
2021-01-31 14:55:28 +01:00
#[derive(Clone, Debug)]
pub enum UserEvent {
RemoteUserFound(Arc<User>),
}
impl From<UserEvent> for Arc<User> {
fn from(event: UserEvent) -> Self {
use UserEvent::*;
match event {
RemoteUserFound(user) => user,
}
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
2020-01-21 07:02:03 +01:00
use crate::{
instance::{tests as instance_tests, Instance},
2022-01-10 07:08:11 +01:00
medias::{Media, NewMedia},
2021-01-30 15:15:07 +01:00
tests::db,
2022-01-10 14:19:44 +01:00
Connection as Conn, ITEMS_PER_PAGE,
2020-01-21 07:02:03 +01:00
};
2022-01-10 14:19:44 +01:00
use assert_json_diff::assert_json_eq;
use diesel::{Connection, SaveChangesDsl};
use serde_json::to_value;
pub(crate) fn fill_database(conn: &Conn) -> Vec<User> {
instance_tests::fill_database(conn);
let admin = NewUser::new_local(
conn,
"admin".to_owned(),
"The admin".to_owned(),
Role::Admin,
"Hello there, I'm the admin",
"admin@example.com".to_owned(),
2020-10-08 19:59:54 +02:00
Some("invalid_admin_password".to_owned()),
2019-03-20 17:56:17 +01:00
)
.unwrap();
let user = NewUser::new_local(
conn,
"user".to_owned(),
"Some user".to_owned(),
Role::Normal,
"Hello there, I'm no one",
"user@example.com".to_owned(),
2020-10-08 19:59:54 +02:00
Some("invalid_user_password".to_owned()),
2019-03-20 17:56:17 +01:00
)
.unwrap();
2022-01-10 14:19:44 +01:00
let mut other = NewUser::new_local(
conn,
"other".to_owned(),
"Another user".to_owned(),
Role::Normal,
"Hello there, I'm someone else",
"other@example.com".to_owned(),
2020-10-08 19:59:54 +02:00
Some("invalid_other_password".to_owned()),
2019-03-20 17:56:17 +01:00
)
.unwrap();
2022-01-10 14:19:44 +01:00
let avatar = Media::insert(
2022-01-10 07:08:11 +01:00
conn,
NewMedia {
2022-01-10 14:19:44 +01:00
file_path: "static/media/example.png".into(),
2022-01-10 07:08:11 +01:00
alt_text: "Another user".into(),
is_remote: false,
remote_url: None,
sensitive: false,
content_warning: None,
owner_id: other.id,
},
)
.unwrap();
2022-01-10 14:19:44 +01:00
other.avatar_id = Some(avatar.id);
let other = other.save_changes::<User>(&*conn).unwrap();
2022-01-10 07:08:11 +01:00
2019-03-20 17:56:17 +01:00
vec![admin, user, other]
}
2022-01-10 14:19:44 +01:00
fn fill_pages(
conn: &DbConn,
) -> (
Vec<crate::posts::Post>,
Vec<crate::users::User>,
Vec<crate::blogs::Blog>,
) {
use crate::post_authors::NewPostAuthor;
use crate::posts::NewPost;
let (mut posts, users, blogs) = crate::inbox::tests::fill_database(conn);
let user = &users[0];
let blog = &blogs[0];
for i in 1..(ITEMS_PER_PAGE * 4 + 3) {
let title = format!("Post {}", i);
let content = format!("Content for post {}.", i);
let post = Post::insert(
conn,
NewPost {
blog_id: blog.id,
slug: title.clone(),
title: title.clone(),
content: SafeString::new(&content),
published: true,
license: "CC-0".into(),
creation_date: None,
ap_url: format!("{}/{}", blog.ap_url, title),
subtitle: "".into(),
source: content,
cover_id: None,
},
)
.unwrap();
PostAuthor::insert(
conn,
NewPostAuthor {
post_id: post.id,
author_id: user.id,
},
)
.unwrap();
posts.push(post);
}
(posts, users, blogs)
}
#[test]
fn find_by() {
2021-01-30 15:15:07 +01:00
let conn = db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
let test_user = NewUser::new_local(
2021-01-30 15:15:07 +01:00
&conn,
"test".to_owned(),
"test user".to_owned(),
Role::Normal,
"Hello I'm a test",
"test@example.com".to_owned(),
2020-10-08 19:59:54 +02:00
Some(User::hash_pass("test_password").unwrap()),
2019-03-20 17:56:17 +01:00
)
.unwrap();
assert_eq!(
test_user.id,
2021-01-30 15:15:07 +01:00
User::find_by_name(&conn, "test", Instance::get_local().unwrap().id)
.unwrap()
.id
);
assert_eq!(
test_user.id,
2021-01-30 15:15:07 +01:00
User::find_by_fqn(&conn, &test_user.fqn).unwrap().id
);
assert_eq!(
test_user.id,
2021-01-30 15:15:07 +01:00
User::find_by_email(&conn, "test@example.com").unwrap().id
);
assert_eq!(
test_user.id,
User::find_by_ap_url(
2021-01-30 15:15:07 +01:00
&conn,
&format!(
"https://{}/@/{}/",
Instance::get_local().unwrap().public_domain,
"test"
)
2019-03-20 17:56:17 +01:00
)
.unwrap()
.id
);
Ok(())
});
}
#[test]
fn delete() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let inserted = fill_database(&conn);
2021-01-30 15:15:07 +01:00
assert!(User::get(&conn, inserted[0].id).is_ok());
inserted[0].delete(&conn).unwrap();
assert!(User::get(&conn, inserted[0].id).is_err());
Ok(())
});
}
#[test]
fn admin() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let inserted = fill_database(&conn);
let local_inst = Instance::get_local().unwrap();
let mut i = 0;
2021-01-30 15:15:07 +01:00
while local_inst.has_admin(&conn).unwrap() {
assert!(i < 100); //prevent from looping indefinitelly
2019-03-20 17:56:17 +01:00
local_inst
2021-01-30 15:15:07 +01:00
.main_admin(&conn)
2019-03-20 17:56:17 +01:00
.unwrap()
2021-01-30 15:15:07 +01:00
.set_role(&conn, Role::Normal)
2019-03-20 17:56:17 +01:00
.unwrap();
i += 1;
}
2021-01-30 15:15:07 +01:00
inserted[0].set_role(&conn, Role::Admin).unwrap();
assert_eq!(inserted[0].id, local_inst.main_admin(&conn).unwrap().id);
Ok(())
});
}
#[test]
fn auth() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
let test_user = NewUser::new_local(
2021-01-30 15:15:07 +01:00
&conn,
"test".to_owned(),
"test user".to_owned(),
Role::Normal,
"Hello I'm a test",
"test@example.com".to_owned(),
2020-10-08 19:59:54 +02:00
Some(User::hash_pass("test_password").unwrap()),
2019-03-20 17:56:17 +01:00
)
.unwrap();
2020-10-08 20:24:03 +02:00
assert_eq!(
2021-01-30 15:15:07 +01:00
User::login(&conn, "test", "test_password").unwrap().id,
2020-10-08 20:24:03 +02:00
test_user.id
);
2021-01-30 15:15:07 +01:00
assert!(User::login(&conn, "test", "other_password").is_err());
Ok(())
});
}
#[test]
fn get_local_page() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
2021-01-30 15:15:07 +01:00
let page = User::get_local_page(&conn, (0, 2)).unwrap();
assert_eq!(page.len(), 2);
assert!(page[0].username <= page[1].username);
2021-01-30 15:15:07 +01:00
let mut last_username = User::get_local_page(&conn, (0, 1)).unwrap()[0]
2019-03-20 17:56:17 +01:00
.username
.clone();
2021-01-30 15:15:07 +01:00
for i in 1..User::count_local(&conn).unwrap() as i32 {
let page = User::get_local_page(&conn, (i, i + 1)).unwrap();
assert_eq!(page.len(), 1);
assert!(last_username <= page[0].username);
last_username = page[0].username.clone();
}
assert_eq!(
2021-01-30 15:15:07 +01:00
User::get_local_page(&conn, (0, User::count_local(&conn).unwrap() as i32 + 10))
2019-03-20 17:56:17 +01:00
.unwrap()
.len() as i64,
2021-01-30 15:15:07 +01:00
User::count_local(&conn).unwrap()
);
Ok(())
});
}
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
2022-02-13 09:59:22 +01:00
#[test]
fn self_federation07() {
let conn = db();
conn.test_transaction::<_, (), _>(|| {
let users = fill_database(&conn);
let ap_repr = users[0].to_activity07(&conn).unwrap();
users[0].delete(&conn).unwrap();
let user = User::from_activity07(&conn, ap_repr).unwrap();
assert_eq!(user.username, users[0].username);
assert_eq!(user.display_name, users[0].display_name);
assert_eq!(user.outbox_url, users[0].outbox_url);
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
assert_eq!(user.inbox_url, users[0].inbox_url);
assert_eq!(user.instance_id, users[0].instance_id);
assert_eq!(user.ap_url, users[0].ap_url);
assert_eq!(user.public_key, users[0].public_key);
assert_eq!(user.shared_inbox_url, users[0].shared_inbox_url);
assert_eq!(user.followers_endpoint, users[0].followers_endpoint);
2021-01-30 15:15:07 +01:00
assert_eq!(user.avatar_url(&conn), users[0].avatar_url(&conn));
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
assert_eq!(user.fqn, users[0].fqn);
assert_eq!(user.summary_html, users[0].summary_html);
Ok(())
});
}
2022-01-10 14:19:44 +01:00
#[test]
fn to_activity() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let users = fill_database(&conn);
let user = &users[0];
let act = user.to_activity(&conn)?;
let expected = json!({
"endpoints": {
"sharedInbox": "https://plu.me/inbox"
},
"followers": "https://plu.me/@/admin/followers",
"following": null,
"id": "https://plu.me/@/admin/",
"inbox": "https://plu.me/@/admin/inbox",
"liked": null,
"name": "The admin",
"outbox": "https://plu.me/@/admin/outbox",
"preferredUsername": "admin",
"publicKey": {
"id": "https://plu.me/@/admin/#main-key",
"owner": "https://plu.me/@/admin/",
"publicKeyPem": user.public_key,
},
"summary": "<p dir=\"auto\">Hello there, Im the admin</p>\n",
"type": "Person",
"url": "https://plu.me/@/admin/"
});
assert_json_eq!(to_value(act)?, expected);
let other = &users[2];
let other_act = other.to_activity(&conn)?;
let expected_other = json!({
"endpoints": {
"sharedInbox": "https://plu.me/inbox"
},
"followers": "https://plu.me/@/other/followers",
"following": null,
"icon": {
"url": "https://plu.me/static/media/example.png",
"type": "Image",
},
"id": "https://plu.me/@/other/",
"inbox": "https://plu.me/@/other/inbox",
"liked": null,
"name": "Another user",
"outbox": "https://plu.me/@/other/outbox",
"preferredUsername": "other",
"publicKey": {
"id": "https://plu.me/@/other/#main-key",
"owner": "https://plu.me/@/other/",
2022-02-13 09:57:06 +01:00
"publicKeyPem": other.public_key,
},
"summary": "<p dir=\"auto\">Hello there, Im someone else</p>\n",
"type": "Person",
"url": "https://plu.me/@/other/"
});
assert_json_eq!(to_value(other_act)?, expected_other);
Ok(())
});
}
#[test]
fn to_activity07() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let users = fill_database(&conn);
let user = &users[0];
let act = user.to_activity07(&conn)?;
let expected = json!({
"endpoints": {
"sharedInbox": "https://plu.me/inbox"
},
"followers": "https://plu.me/@/admin/followers",
"id": "https://plu.me/@/admin/",
"inbox": "https://plu.me/@/admin/inbox",
"name": "The admin",
"outbox": "https://plu.me/@/admin/outbox",
"preferredUsername": "admin",
"publicKey": {
"id": "https://plu.me/@/admin/#main-key",
"owner": "https://plu.me/@/admin/",
"publicKeyPem": user.public_key,
},
"summary": "<p dir=\"auto\">Hello there, Im the admin</p>\n",
"type": "Person",
"url": "https://plu.me/@/admin/"
});
assert_json_eq!(to_value(act)?, expected);
let other = &users[2];
let other_act = other.to_activity07(&conn)?;
let expected_other = json!({
"endpoints": {
"sharedInbox": "https://plu.me/inbox"
},
"followers": "https://plu.me/@/other/followers",
"icon": {
"url": "https://plu.me/static/media/example.png",
"type": "Image",
},
"id": "https://plu.me/@/other/",
"inbox": "https://plu.me/@/other/inbox",
"name": "Another user",
"outbox": "https://plu.me/@/other/outbox",
"preferredUsername": "other",
"publicKey": {
"id": "https://plu.me/@/other/#main-key",
"owner": "https://plu.me/@/other/",
2022-01-10 14:19:44 +01:00
"publicKeyPem": other.public_key,
},
"summary": "<p dir=\"auto\">Hello there, Im someone else</p>\n",
"type": "Person",
"url": "https://plu.me/@/other/"
});
assert_json_eq!(to_value(other_act)?, expected_other);
Ok(())
});
}
#[test]
fn delete_activity() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let users = fill_database(&conn);
let user = &users[1];
let act = user.delete_activity(&conn)?;
let expected = json!({
"actor": "https://plu.me/@/user/",
"cc": [],
"id": "https://plu.me/@/user/#delete",
"object": {
"id": "https://plu.me/@/user/",
"type": "Tombstone",
},
"to": ["https://www.w3.org/ns/activitystreams#Public"],
2022-02-13 10:48:16 +01:00
"type": "Delete",
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
fn delete_activity07() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let users = fill_database(&conn);
let user = &users[1];
let act = user.delete_activity07(&conn)?;
let expected = json!({
"actor": "https://plu.me/@/user/",
"cc": [],
"id": "https://plu.me/@/user/#delete",
"object": {
"id": "https://plu.me/@/user/",
"type": "Tombstone",
},
"to": ["https://www.w3.org/ns/activitystreams#Public"],
2022-01-10 14:19:44 +01:00
"type": "Delete",
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
fn outbox_collection() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let (_pages, users, _blogs) = fill_pages(&conn);
let user = &users[0];
let act = user.outbox_collection(&conn)?;
let expected = json!({
"first": "https://plu.me/@/admin/outbox?page=1",
"items": null,
"last": "https://plu.me/@/admin/outbox?page=5",
"totalItems": 51,
"type": "OrderedCollection",
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
fn outbox_collection07() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let (_pages, users, _blogs) = fill_pages(&conn);
let user = &users[0];
let act = user.outbox_collection07(&conn)?;
let expected = json!({
"first": "https://plu.me/@/admin/outbox?page=1",
"last": "https://plu.me/@/admin/outbox?page=5",
"totalItems": 51,
"type": "OrderedCollection",
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
fn outbox_collection_page07() {
let conn = db();
conn.test_transaction::<_, Error, _>(|| {
let users = fill_database(&conn);
let user = &users[0];
let act = user.outbox_collection_page07(&conn, (33, 36))?;
let expected = json!({
"items": [],
"partOf": "https://plu.me/@/admin/outbox",
"prev": "https://plu.me/@/admin/outbox?page=2",
"type": "OrderedCollectionPage",
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
}