Plume/plume-models/src/blogs.rs

1064 lines
33 KiB
Rust
Raw Normal View History

2020-01-21 07:02:03 +01:00
use crate::{
db_conn::DbConn, instance::*, medias::Media, posts::Post, safe_string::SafeString,
schema::blogs, users::User, Connection, Error, PlumeRocket, Result, CONFIG, ITEMS_PER_PAGE,
2020-01-21 07:02:03 +01:00
};
2022-02-22 16:48:06 +01:00
use activitystreams::{
actor::{ApActor, ApActorExt, AsApActor, Group},
2022-02-22 16:48:06 +01:00
base::AnyBase,
collection::{OrderedCollection, OrderedCollectionPage},
iri_string::types::IriString,
object::{kind::ImageType, ApObject, Image, ObjectExt},
2022-02-22 16:48:06 +01:00
prelude::*,
};
2018-09-27 23:06:40 +02:00
use chrono::NaiveDateTime;
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
use diesel::{self, ExpressionMethods, OptionalExtension, QueryDsl, RunQueryDsl, SaveChangesDsl};
2018-05-19 09:39:59 +02:00
use openssl::{
hash::MessageDigest,
pkey::{PKey, Private},
rsa::Rsa,
sign::{Signer, Verifier},
2018-05-19 09:39:59 +02:00
};
use plume_common::activity_pub::{
2022-05-02 09:07:08 +02:00
inbox::{AsActor, FromId},
2022-05-02 17:29:44 +02:00
sign, ActivityStream, ApSignature, CustomGroup, Id, IntoId, PublicKey, Source, SourceProperty,
ToAsString, ToAsUri,
2018-05-19 09:39:59 +02:00
};
2020-01-21 07:02:03 +01:00
use webfinger::*;
2018-04-24 11:21:39 +02:00
#[derive(Queryable, Identifiable, Clone, AsChangeset, Debug)]
#[changeset_options(treat_none_as_null = "true")]
2018-04-23 12:29:27 +02:00
pub struct Blog {
pub id: i32,
pub actor_id: String,
pub title: String,
pub summary: String,
pub outbox_url: String,
pub inbox_url: String,
2018-04-30 19:46:27 +02:00
pub instance_id: i32,
2018-09-27 23:06:40 +02:00
pub creation_date: NaiveDateTime,
pub ap_url: String,
pub private_key: Option<String>,
pub public_key: String,
pub fqn: String,
pub summary_html: SafeString,
pub icon_id: Option<i32>,
pub banner_id: Option<i32>,
pub theme: Option<String>,
2018-04-23 12:29:27 +02:00
}
#[derive(Default, Insertable)]
2018-04-23 12:29:27 +02:00
#[table_name = "blogs"]
pub struct NewBlog {
pub actor_id: String,
pub title: String,
pub summary: String,
pub outbox_url: String,
pub inbox_url: String,
2018-05-01 20:02:29 +02:00
pub instance_id: i32,
pub ap_url: String,
pub private_key: Option<String>,
pub public_key: String,
pub summary_html: SafeString,
pub icon_id: Option<i32>,
pub banner_id: Option<i32>,
pub theme: Option<String>,
2018-04-23 12:29:27 +02:00
}
const BLOG_PREFIX: &str = "~";
2018-04-23 12:29:27 +02:00
impl Blog {
insert!(blogs, NewBlog, |inserted, conn| {
let instance = inserted.get_instance(conn)?;
if inserted.outbox_url.is_empty() {
2019-03-20 17:56:17 +01:00
inserted.outbox_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, "outbox");
}
if inserted.inbox_url.is_empty() {
2019-03-20 17:56:17 +01:00
inserted.inbox_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, "inbox");
}
if inserted.ap_url.is_empty() {
2019-03-20 17:56:17 +01:00
inserted.ap_url = instance.compute_box(BLOG_PREFIX, &inserted.actor_id, "");
}
if inserted.fqn.is_empty() {
if instance.local {
inserted.fqn = inserted.actor_id.clone();
} else {
inserted.fqn = format!("{}@{}", inserted.actor_id, instance.public_domain);
}
}
inserted.save_changes(conn).map_err(Error::from)
});
get!(blogs);
find_by!(blogs, find_by_ap_url, ap_url as &str);
find_by!(blogs, find_by_name, actor_id as &str, instance_id as i32);
2018-04-23 12:29:27 +02:00
pub fn get_instance(&self, conn: &Connection) -> Result<Instance> {
Instance::get(conn, self.instance_id)
}
pub fn list_authors(&self, conn: &Connection) -> Result<Vec<User>> {
2020-01-21 07:02:03 +01:00
use crate::schema::blog_authors;
use crate::schema::users;
let authors_ids = blog_authors::table
.filter(blog_authors::blog_id.eq(self.id))
.select(blog_authors::author_id);
users::table
.filter(users::id.eq_any(authors_ids))
.load::<User>(conn)
.map_err(Error::from)
}
pub fn count_authors(&self, conn: &Connection) -> Result<i64> {
2020-01-21 07:02:03 +01:00
use crate::schema::blog_authors;
blog_authors::table
.filter(blog_authors::blog_id.eq(self.id))
.count()
.get_result(conn)
.map_err(Error::from)
}
pub fn find_for_author(conn: &Connection, author: &User) -> Result<Vec<Blog>> {
2020-01-21 07:02:03 +01:00
use crate::schema::blog_authors;
let author_ids = blog_authors::table
.filter(blog_authors::author_id.eq(author.id))
.select(blog_authors::blog_id);
blogs::table
.filter(blogs::id.eq_any(author_ids))
2018-06-10 19:55:08 +02:00
.load::<Blog>(conn)
.map_err(Error::from)
2018-06-10 19:55:08 +02:00
}
pub fn find_by_fqn(conn: &DbConn, fqn: &str) -> Result<Blog> {
let from_db = blogs::table
.filter(blogs::fqn.eq(fqn))
.first(&**conn)
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
.optional()?;
if let Some(from_db) = from_db {
Ok(from_db)
} else {
Blog::fetch_from_webfinger(conn, fqn)
}
}
fn fetch_from_webfinger(conn: &DbConn, acct: &str) -> Result<Blog> {
resolve_with_prefix(Prefix::Group, acct.to_owned(), true)?
2019-03-20 17:56:17 +01:00
.links
.into_iter()
.find(|l| l.mime_type == Some(String::from("application/activity+json")))
.ok_or(Error::Webfinger)
2021-11-27 23:53:13 +01:00
.and_then(|l| {
2022-05-02 12:24:36 +02:00
Blog::from_id(
2021-11-27 23:53:13 +01:00
conn,
&l.href.ok_or(Error::MissingApProperty)?,
None,
CONFIG.proxy(),
)
.map_err(|(_, e)| e)
})
}
2022-05-02 18:26:15 +02:00
pub fn to_activity(&self, conn: &Connection) -> Result<CustomGroup> {
let mut blog = ApActor::new(self.inbox_url.parse()?, Group::new());
blog.set_preferred_username(self.actor_id.clone());
blog.set_name(self.title.clone());
blog.set_outbox(self.outbox_url.parse()?);
blog.set_summary(self.summary_html.to_string());
2022-03-06 16:03:01 +01:00
let source = SourceProperty {
source: Source {
content: self.summary.clone(),
media_type: String::from("text/markdown"),
},
};
let mut icon = Image::new();
let _ = self.icon_id.map(|id| {
Media::get(conn, id).and_then(|m| {
let _ = m
.url()
.and_then(|url| url.parse::<IriString>().map_err(|_| Error::Url))
.map(|url| icon.set_url(url));
icon.set_attributed_to(
User::get(conn, m.owner_id)?
.into_id()
.parse::<IriString>()?,
);
Ok(())
})
});
blog.set_icon(icon.into_any_base()?);
let mut banner = Image::new();
let _ = self.banner_id.map(|id| {
Media::get(conn, id).and_then(|m| {
let _ = m
.url()
.and_then(|url| url.parse::<IriString>().map_err(|_| Error::Url))
.map(|url| banner.set_url(url));
banner.set_attributed_to(
User::get(conn, m.owner_id)?
.into_id()
.parse::<IriString>()?,
);
Ok(())
})
});
blog.set_image(banner.into_any_base()?);
blog.set_id(self.ap_url.parse()?);
2022-05-02 17:29:44 +02:00
let pub_key = PublicKey {
id: format!("{}#main-key", self.ap_url).parse()?,
owner: self.ap_url.parse()?,
public_key_pem: self.public_key.clone(),
};
2022-05-02 17:29:44 +02:00
let ap_signature = ApSignature {
public_key: pub_key,
};
Ok(CustomGroup::new(blog, ap_signature, source))
}
2022-05-02 19:11:46 +02:00
pub fn outbox(&self, conn: &Connection) -> Result<ActivityStream<OrderedCollection>> {
self.outbox_collection(conn).map(ActivityStream::new)
2022-05-02 15:48:02 +02:00
}
2022-05-02 19:11:46 +02:00
pub fn outbox_collection(&self, conn: &Connection) -> Result<OrderedCollection> {
let acts = self.get_activities(conn);
let acts = acts
.iter()
.filter_map(|value| AnyBase::from_arbitrary_json(value).ok())
.collect::<Vec<AnyBase>>();
let n_acts = acts.len();
let mut coll = OrderedCollection::new();
coll.set_many_items(acts);
coll.set_total_items(n_acts as u64);
coll.set_first(format!("{}?page=1", &self.outbox_url).parse::<IriString>()?);
coll.set_last(
format!(
"{}?page={}",
&self.outbox_url,
(n_acts as u64 + ITEMS_PER_PAGE as u64 - 1) as u64 / ITEMS_PER_PAGE as u64
)
.parse::<IriString>()?,
);
Ok(coll)
}
2022-05-02 19:11:46 +02:00
pub fn outbox_page(
2022-05-02 15:56:54 +02:00
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<ActivityStream<OrderedCollectionPage>> {
2022-05-02 19:11:46 +02:00
self.outbox_collection_page(conn, (min, max))
2022-05-02 15:56:54 +02:00
.map(ActivityStream::new)
}
2022-05-02 19:11:46 +02:00
pub fn outbox_collection_page(
&self,
conn: &Connection,
(min, max): (i32, i32),
) -> Result<OrderedCollectionPage> {
let mut coll = OrderedCollectionPage::new();
let acts = self.get_activity_page(conn, (min, max));
//This still doesn't do anything because the outbox
//doesn't do anything yet
coll.set_next(
format!("{}?page={}", &self.outbox_url, min / ITEMS_PER_PAGE + 1)
.parse::<IriString>()?,
);
coll.set_prev(
format!("{}?page={}", &self.outbox_url, min / ITEMS_PER_PAGE - 1)
.parse::<IriString>()?,
);
coll.set_many_items(
acts.iter()
.filter_map(|value| AnyBase::from_arbitrary_json(value).ok()),
);
Ok(coll)
}
2021-01-15 17:05:42 +01:00
fn get_activities(&self, _conn: &Connection) -> Vec<serde_json::Value> {
vec![]
2018-04-29 19:49:56 +02:00
}
fn get_activity_page(
&self,
_conn: &Connection,
(_min, _max): (i32, i32),
2021-01-15 17:05:42 +01:00
) -> Vec<serde_json::Value> {
vec![]
}
pub fn get_keypair(&self) -> Result<PKey<Private>> {
2019-03-20 17:56:17 +01:00
PKey::from_rsa(Rsa::private_key_from_pem(
2021-11-27 23:53:13 +01:00
self.private_key
.clone()
.ok_or(Error::MissingApProperty)?
.as_ref(),
2019-03-20 17:56:17 +01:00
)?)
.map_err(Error::from)
}
2018-06-18 23:50:40 +02:00
pub fn webfinger(&self, conn: &Connection) -> Result<Webfinger> {
Ok(Webfinger {
subject: format!(
"acct:{}@{}",
self.actor_id,
self.get_instance(conn)?.public_domain
),
aliases: vec![self.ap_url.clone()],
2018-06-18 23:50:40 +02:00
links: vec![
Link {
rel: String::from("http://webfinger.net/rel/profile-page"),
mime_type: None,
href: Some(self.ap_url.clone()),
template: None,
2018-06-18 23:50:40 +02:00
},
Link {
rel: String::from("http://schemas.google.com/g/2010#updates-from"),
mime_type: Some(String::from("application/atom+xml")),
href: Some(self.get_instance(conn)?.compute_box(
BLOG_PREFIX,
&self.actor_id,
"feed.atom",
)),
template: None,
2018-06-18 23:50:40 +02:00
},
Link {
rel: String::from("self"),
mime_type: Some(String::from("application/activity+json")),
href: Some(self.ap_url.clone()),
template: None,
},
],
})
2018-06-18 23:50:40 +02:00
}
pub fn icon_url(&self, conn: &Connection) -> String {
self.icon_id
.and_then(|id| Media::get(conn, id).and_then(|m| m.url()).ok())
.unwrap_or_else(|| "/static/images/default-avatar.png".to_string())
}
pub fn banner_url(&self, conn: &Connection) -> Option<String> {
self.banner_id
.and_then(|i| Media::get(conn, i).ok())
.and_then(|c| c.url().ok())
}
pub fn delete(&self, conn: &Connection) -> Result<()> {
2021-11-27 23:53:13 +01:00
for post in Post::get_for_blog(conn, self)? {
post.delete(conn)?;
2018-10-20 15:03:59 +02:00
}
diesel::delete(self)
.execute(conn)
.map(|_| ())
.map_err(Error::from)
2018-10-20 15:03:59 +02:00
}
2018-04-23 12:29:27 +02:00
}
2018-05-19 00:04:30 +02:00
impl IntoId for Blog {
fn into_id(self) -> Id {
2018-05-18 10:04:40 +02:00
Id::new(self.ap_url)
}
}
2022-05-02 09:07:08 +02:00
impl FromId<DbConn> for Blog {
2022-02-22 16:48:06 +01:00
type Error = Error;
type Object = CustomGroup;
2022-02-22 16:48:06 +01:00
2022-05-02 18:23:37 +02:00
fn from_db(conn: &DbConn, id: &str) -> Result<Self> {
2022-02-22 16:48:06 +01:00
Self::find_by_ap_url(conn, id)
}
fn from_activity(conn: &DbConn, acct: CustomGroup) -> Result<Self> {
2022-02-22 16:48:06 +01:00
let (name, outbox_url, inbox_url) = {
let actor = acct.ap_actor_ref();
let name = actor
.preferred_username()
.ok_or(Error::MissingApProperty)?
.to_string();
if name.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) {
return Err(Error::InvalidValue);
}
(
name,
actor.outbox()?.ok_or(Error::MissingApProperty)?.to_string(),
actor.inbox()?.to_string(),
)
};
let mut new_blog = NewBlog {
actor_id: name.to_string(),
outbox_url,
inbox_url,
public_key: acct.ext_one.public_key.public_key_pem.to_string(),
private_key: None,
theme: None,
..NewBlog::default()
};
let object = ApObject::new(acct.inner);
new_blog.title = object
.name()
.and_then(|name| name.to_as_string())
.unwrap_or(name);
2022-02-22 16:48:06 +01:00
new_blog.summary_html = SafeString::new(
&object
.summary()
.and_then(|summary| summary.to_as_string())
.unwrap_or_default(),
);
let icon_id = object
.icon()
.and_then(|icons| {
icons.iter().next().and_then(|icon| {
let icon = icon.to_owned().extend::<Image, ImageType>().ok()??;
2022-02-22 16:48:06 +01:00
let owner = icon.attributed_to()?.to_as_uri()?;
Media::save_remote(
conn,
icon.url()?.to_as_uri()?,
2022-05-02 12:24:36 +02:00
&User::from_id(conn, &owner, None, CONFIG.proxy()).ok()?,
2022-02-22 16:48:06 +01:00
)
.ok()
})
})
.map(|m| m.id);
new_blog.icon_id = icon_id;
let banner_id = object
.image()
.and_then(|banners| {
banners.iter().next().and_then(|banner| {
let banner = banner.to_owned().extend::<Image, ImageType>().ok()??;
2022-02-22 16:48:06 +01:00
let owner = banner.attributed_to()?.to_as_uri()?;
Media::save_remote(
conn,
banner.url()?.to_as_uri()?,
2022-05-02 12:24:36 +02:00
&User::from_id(conn, &owner, None, CONFIG.proxy()).ok()?,
2022-02-22 16:48:06 +01:00
)
.ok()
})
})
.map(|m| m.id);
new_blog.banner_id = banner_id;
new_blog.summary = acct.ext_two.source.content;
2022-02-22 16:48:06 +01:00
let any_base = AnyBase::from_extended(object)?;
let id = any_base.id().ok_or(Error::MissingApProperty)?;
new_blog.ap_url = id.to_string();
let inst = id
.authority_components()
.ok_or(Error::Url)?
.host()
.to_string();
let instance = Instance::find_by_domain(conn, &inst).or_else(|_| {
Instance::insert(
conn,
NewInstance {
public_domain: inst.to_owned(),
name: inst.to_owned(),
local: false,
// We don't really care about all the following for remote instances
long_description: SafeString::new(""),
short_description: SafeString::new(""),
default_license: String::new(),
open_registrations: true,
short_description_html: String::new(),
long_description_html: String::new(),
},
)
})?;
new_blog.instance_id = instance.id;
Blog::insert(conn, new_blog)
}
2022-05-02 18:24:22 +02:00
fn get_sender() -> &'static dyn sign::Signer {
2022-02-22 16:48:06 +01:00
Instance::get_local_instance_user().expect("Failed to local instance user")
}
}
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
impl AsActor<&PlumeRocket> for Blog {
2018-05-18 10:04:40 +02:00
fn get_inbox_url(&self) -> String {
self.inbox_url.clone()
}
fn get_shared_inbox_url(&self) -> Option<String> {
None
}
fn is_local(&self) -> bool {
Instance::get_local()
.map(|i| self.instance_id == i.id)
.unwrap_or(false)
}
2018-05-18 10:04:40 +02:00
}
impl sign::Signer for Blog {
fn get_key_id(&self) -> String {
format!("{}#main-key", self.ap_url)
}
2021-11-24 13:41:44 +01:00
fn sign(&self, to_sign: &str) -> sign::Result<Vec<u8>> {
let key = self.get_keypair().map_err(|_| sign::Error())?;
2019-03-20 17:56:17 +01:00
let mut signer = Signer::new(MessageDigest::sha256(), &key)?;
signer.update(to_sign.as_bytes())?;
2021-11-24 13:41:44 +01:00
signer.sign_to_vec().map_err(sign::Error::from)
}
2021-11-24 13:41:44 +01:00
fn verify(&self, data: &str, signature: &[u8]) -> sign::Result<bool> {
2019-03-20 17:56:17 +01:00
let key = PKey::from_rsa(Rsa::public_key_from_pem(self.public_key.as_ref())?)?;
let mut verifier = Verifier::new(MessageDigest::sha256(), &key)?;
2019-03-20 17:56:17 +01:00
verifier.update(data.as_bytes())?;
2021-11-24 13:41:44 +01:00
verifier.verify(signature).map_err(sign::Error::from)
}
}
impl NewBlog {
pub fn new_local(
actor_id: String,
title: String,
summary: String,
instance_id: i32,
) -> Result<NewBlog> {
let (pub_key, priv_key) = sign::gen_keypair();
Ok(NewBlog {
actor_id,
title,
summary,
instance_id,
public_key: String::from_utf8(pub_key).or(Err(Error::Signature))?,
private_key: Some(String::from_utf8(priv_key).or(Err(Error::Signature))?),
..NewBlog::default()
})
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
2020-01-21 07:02:03 +01:00
use crate::{
2022-02-25 15:38:36 +01:00
blog_authors::*, instance::tests as instance_tests, medias::NewMedia, tests::db,
users::tests as usersTests, Connection as Conn,
2020-01-21 07:02:03 +01:00
};
2022-02-22 17:07:57 +01:00
use assert_json_diff::assert_json_eq;
use diesel::Connection;
2022-02-22 17:07:57 +01:00
use serde_json::to_value;
pub(crate) fn fill_database(conn: &Conn) -> (Vec<User>, Vec<Blog>) {
instance_tests::fill_database(conn);
let users = usersTests::fill_database(conn);
let mut blog1 = Blog::insert(
2019-03-20 17:56:17 +01:00
conn,
NewBlog::new_local(
"BlogName".to_owned(),
"Blog name".to_owned(),
"This is a small blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let blog2 = Blog::insert(
conn,
NewBlog::new_local(
"MyBlog".to_owned(),
"My blog".to_owned(),
"Welcome to my blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let blog3 = Blog::insert(
conn,
NewBlog::new_local(
"WhyILikePlume".to_owned(),
"Why I like Plume".to_owned(),
"In this blog I will explay you why I like Plume so much".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
BlogAuthor::insert(
conn,
NewBlogAuthor {
blog_id: blog1.id,
author_id: users[0].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
conn,
NewBlogAuthor {
blog_id: blog1.id,
author_id: users[1].id,
is_owner: false,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
conn,
NewBlogAuthor {
blog_id: blog2.id,
author_id: users[1].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
conn,
NewBlogAuthor {
blog_id: blog3.id,
author_id: users[2].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
2022-02-22 18:09:11 +01:00
blog1.icon_id = Some(
Media::insert(
conn,
NewMedia {
file_path: "aaa.png".into(),
alt_text: String::new(),
is_remote: false,
remote_url: None,
sensitive: false,
content_warning: None,
owner_id: users[0].id,
},
)
.unwrap()
.id,
);
blog1.banner_id = Some(
Media::insert(
conn,
NewMedia {
file_path: "bbb.png".into(),
alt_text: String::new(),
is_remote: false,
remote_url: None,
sensitive: false,
content_warning: None,
owner_id: users[0].id,
},
)
.unwrap()
.id,
);
let _: Blog = blog1.save_changes(&*conn).unwrap();
2019-03-20 17:56:17 +01:00
(users, vec![blog1, blog2, blog3])
}
#[test]
fn get_instance() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
let blog = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"SomeName".to_owned(),
"Some name".to_owned(),
"This is some blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
2019-03-20 17:56:17 +01:00
assert_eq!(
2021-01-30 15:15:07 +01:00
blog.get_instance(&conn).unwrap().id,
Instance::get_local().unwrap().id
2019-03-20 17:56:17 +01:00
);
// TODO add tests for remote instance
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
#[test]
fn authors() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let (user, _) = fill_database(&conn);
let b1 = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"SomeName".to_owned(),
"Some name".to_owned(),
"This is some blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let b2 = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"Blog".to_owned(),
"Blog".to_owned(),
"I've named my blog Blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let blog = vec![b1, b2];
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[0].id,
author_id: user[0].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[0].id,
author_id: user[1].id,
is_owner: false,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[1].id,
author_id: user[0].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
assert!(blog[0]
2021-01-30 15:15:07 +01:00
.list_authors(&conn)
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|a| a.id == user[0].id));
assert!(blog[0]
2021-01-30 15:15:07 +01:00
.list_authors(&conn)
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|a| a.id == user[1].id));
assert!(blog[1]
2021-01-30 15:15:07 +01:00
.list_authors(&conn)
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|a| a.id == user[0].id));
assert!(!blog[1]
2021-01-30 15:15:07 +01:00
.list_authors(&conn)
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|a| a.id == user[1].id));
2021-01-30 15:15:07 +01:00
assert!(Blog::find_for_author(&conn, &user[0])
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|b| b.id == blog[0].id));
2021-01-30 15:15:07 +01:00
assert!(Blog::find_for_author(&conn, &user[1])
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|b| b.id == blog[0].id));
2021-01-30 15:15:07 +01:00
assert!(Blog::find_for_author(&conn, &user[0])
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|b| b.id == blog[1].id));
2021-01-30 15:15:07 +01:00
assert!(!Blog::find_for_author(&conn, &user[1])
2019-03-20 17:56:17 +01:00
.unwrap()
.iter()
.any(|b| b.id == blog[1].id));
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
#[test]
fn find_local() {
2021-01-30 15:15:07 +01:00
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
let blog = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"SomeName".to_owned(),
"Some name".to_owned(),
"This is some blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
2021-01-30 15:15:07 +01:00
assert_eq!(Blog::find_by_fqn(&conn, "SomeName").unwrap().id, blog.id);
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
#[test]
fn get_fqn() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
fill_database(&conn);
let blog = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"SomeName".to_owned(),
"Some name".to_owned(),
"This is some blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
assert_eq!(blog.fqn, "SomeName");
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
#[test]
fn delete() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let (_, blogs) = fill_database(&conn);
2021-01-30 15:15:07 +01:00
blogs[0].delete(&conn).unwrap();
assert!(Blog::get(&conn, blogs[0].id).is_err());
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
#[test]
fn delete_via_user() {
let conn = &db();
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let (user, _) = fill_database(&conn);
let b1 = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"SomeName".to_owned(),
"Some name".to_owned(),
"This is some blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let b2 = Blog::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlog::new_local(
"Blog".to_owned(),
"Blog".to_owned(),
"I've named my blog Blog".to_owned(),
Instance::get_local().unwrap().id,
2019-03-20 17:56:17 +01:00
)
.unwrap(),
)
.unwrap();
let blog = vec![b1, b2];
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[0].id,
author_id: user[0].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[0].id,
author_id: user[1].id,
is_owner: false,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
BlogAuthor::insert(
2021-01-30 15:15:07 +01:00
&conn,
NewBlogAuthor {
blog_id: blog[1].id,
author_id: user[0].id,
is_owner: true,
},
2019-03-20 17:56:17 +01:00
)
.unwrap();
2021-01-30 15:15:07 +01:00
user[0].delete(&conn).unwrap();
assert!(Blog::get(&conn, blog[0].id).is_ok());
assert!(Blog::get(&conn, blog[1].id).is_err());
user[1].delete(&conn).unwrap();
assert!(Blog::get(&conn, blog[0].id).is_err());
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
}
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
#[test]
2022-05-02 19:11:46 +02:00
fn self_federation() {
2021-01-30 15:15:07 +01:00
let conn = &db();
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
conn.test_transaction::<_, (), _>(|| {
2021-01-30 15:15:07 +01:00
let (users, mut blogs) = fill_database(&conn);
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
blogs[0].icon_id = Some(
Media::insert(
2021-01-30 15:15:07 +01:00
&conn,
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
NewMedia {
file_path: "aaa.png".into(),
alt_text: String::new(),
is_remote: false,
remote_url: None,
sensitive: false,
content_warning: None,
owner_id: users[0].id,
},
)
.unwrap()
.id,
);
blogs[0].banner_id = Some(
Media::insert(
2021-01-30 15:15:07 +01:00
&conn,
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
NewMedia {
file_path: "bbb.png".into(),
alt_text: String::new(),
is_remote: false,
remote_url: None,
sensitive: false,
content_warning: None,
owner_id: users[0].id,
},
)
.unwrap()
.id,
);
2021-01-30 15:15:07 +01:00
let _: Blog = blogs[0].save_changes(&**conn).unwrap();
2022-05-02 18:26:15 +02:00
let ap_repr = blogs[0].to_activity(&conn).unwrap();
2021-01-30 15:15:07 +01:00
blogs[0].delete(&conn).unwrap();
let blog = Blog::from_activity(&conn, ap_repr).unwrap();
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
assert_eq!(blog.actor_id, blogs[0].actor_id);
assert_eq!(blog.title, blogs[0].title);
assert_eq!(blog.summary, blogs[0].summary);
assert_eq!(blog.outbox_url, blogs[0].outbox_url);
assert_eq!(blog.inbox_url, blogs[0].inbox_url);
assert_eq!(blog.instance_id, blogs[0].instance_id);
assert_eq!(blog.ap_url, blogs[0].ap_url);
assert_eq!(blog.public_key, blogs[0].public_key);
assert_eq!(blog.fqn, blogs[0].fqn);
assert_eq!(blog.summary_html, blogs[0].summary_html);
2021-01-30 15:15:07 +01:00
assert_eq!(blog.icon_url(&conn), blogs[0].icon_url(&conn));
assert_eq!(blog.banner_url(&conn), blogs[0].banner_url(&conn));
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
Ok(())
Add support for generic timeline (#525) * Begin adding support for timeline * fix some bugs with parser * fmt * add error reporting for parser * add tests for timeline query parser * add rejection tests for parse * begin adding support for lists also run migration before compiling, so schema.rs is up to date * add sqlite migration * end adding lists still miss tests and query integration * cargo fmt * try to add some tests * Add some constraint to db, and fix list test and refactor other tests to use begin_transaction * add more tests for lists * add support for lists in query executor * add keywords for including/excluding boosts and likes * cargo fmt * add function to list lists used by query will make it easier to warn users when creating timeline with unknown lists * add lang support * add timeline creation error message when using unexisting lists * Update .po files * WIP: interface for timelines * don't use diesel for migrations not sure how it passed the ci on the other branch * add some tests for timeline add an int representing the order of timelines (first one will be on top, second just under...) use first() instead of limit(1).get().into_iter().nth(0) remove migrations from build artifacts as they are now compiled in * cargo fmt * remove timeline order * fix tests * add tests for timeline creation failure * cargo fmt * add tests for timelines * add test for matching direct lists and keywords * add test for language filtering * Add a more complex test for Timeline::matches, and fix TQ::matches for TQ::Or * Make the main crate compile + FMT * Use the new timeline system - Replace the old "feed" system with timelines - Display all timelines someone can access on their home page (either their personal ones, or instance timelines) - Remove functions that were used to get user/local/federated feed - Add new posts to timelines - Create a default timeline called "My feed" for everyone, and "Local feed"/"Federated feed" with timelines @fdb-hiroshima I don't know if that's how you pictured it? If you imagined it differently I can of course make changes. I hope I didn't forgot anything… * Cargo fmt * Try to fix the migration * Fix tests * Fix the test (for real this time ?) * Fix the tests ? + fmt * Use Kind::Like and Kind::Reshare when needed * Forgot to run cargo fmt once again * revert translations * fix reviewed stuff * reduce code duplication by macros * cargo fmt
2019-10-07 19:08:20 +02:00
})
Big refactoring of the Inbox (#443) * Big refactoring of the Inbox We now have a type that routes an activity through the registered handlers until one of them matches. Each Actor/Activity/Object combination is represented by an implementation of AsObject These combinations are then registered on the Inbox type, which will try to deserialize the incoming activity in the requested types. Advantages: - nicer syntax: the final API is clearer and more idiomatic - more generic: only two traits (`AsActor` and `AsObject`) instead of one for each kind of activity - it is easier to see which activities we handle and which one we don't * Small fixes - Avoid panics - Don't search for AP ID infinitely - Code style issues * Fix tests * Introduce a new trait: FromId It should be implemented for any AP object. It allows to look for an object in database using its AP ID, or to dereference it if it was not present in database Also moves the inbox code to plume-models to test it (and write a basic test for each activity type we handle) * Use if let instead of match * Don't require PlumeRocket::intl for tests * Return early and remove a forgotten dbg! * Add more tests to try to understand where the issues come from * Also add a test for comment federation * Don't check creation_date is the same for blogs * Make user and blog federation more tolerant to errors/missing fields * Make clippy happy * Use the correct Accept header when dereferencing * Fix follow approval with Mastodon * Add spaces to characters that should not be in usernames And validate blog names too * Smarter dereferencing: only do it once for each actor/object * Forgot some files * Cargo fmt * Delete plume_test * Delete plume_tests * Update get_id docs + Remove useless : Sized * Appease cargo fmt * Remove dbg! + Use as_ref instead of clone when possible + Use and_then instead of map when possible * Remove .po~ * send unfollow to local instance * read cover from update activity * Make sure "cc" and "to" are never empty and fix a typo in a constant name * Cargo fmt
2019-04-17 19:31:47 +02:00
}
2022-02-22 17:07:57 +01:00
#[test]
2022-05-02 18:26:15 +02:00
fn to_activity() {
let conn = &db();
conn.test_transaction::<_, Error, _>(|| {
let (_users, blogs) = fill_database(&conn);
let blog = &blogs[0];
2022-05-02 18:26:15 +02:00
let act = blog.to_activity(conn)?;
let expected = json!({
"icon": {
"attributedTo": "https://plu.me/@/admin/",
"type": "Image",
"url": "https://plu.me/aaa.png"
},
"id": "https://plu.me/~/BlogName/",
"image": {
"attributedTo": "https://plu.me/@/admin/",
"type": "Image",
"url": "https://plu.me/bbb.png"
},
"inbox": "https://plu.me/~/BlogName/inbox",
"name": "Blog name",
"outbox": "https://plu.me/~/BlogName/outbox",
"preferredUsername": "BlogName",
"publicKey": {
"id": "https://plu.me/~/BlogName/#main-key",
"owner": "https://plu.me/~/BlogName/",
"publicKeyPem": blog.public_key
},
"source": {
"content": "This is a small blog",
"mediaType": "text/markdown"
},
"summary": "",
"type": "Group"
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
2022-05-02 19:11:46 +02:00
fn outbox_collection() {
let conn = &db();
conn.test_transaction::<_, Error, _>(|| {
let (_users, blogs) = fill_database(conn);
let blog = &blogs[0];
2022-05-02 19:11:46 +02:00
let act = blog.outbox_collection(conn)?;
let expected = json!({
"items": [],
"totalItems": 0,
"first": "https://plu.me/~/BlogName/outbox?page=1",
"last": "https://plu.me/~/BlogName/outbox?page=0",
"type": "OrderedCollection"
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
#[test]
2022-05-02 19:11:46 +02:00
fn outbox_collection_page() {
let conn = &db();
conn.test_transaction::<_, Error, _>(|| {
let (_users, blogs) = fill_database(conn);
let blog = &blogs[0];
2022-05-02 19:11:46 +02:00
let act = blog.outbox_collection_page(conn, (33, 36))?;
let expected = json!({
"next": "https://plu.me/~/BlogName/outbox?page=3",
"prev": "https://plu.me/~/BlogName/outbox?page=1",
"items": [],
"type": "OrderedCollectionPage"
});
assert_json_eq!(to_value(act)?, expected);
Ok(())
});
}
}