Fix the SQlite build

This commit is contained in:
Bat 2018-09-27 22:06:40 +01:00 committed by Igor Galić
parent 535c68b423
commit 743620eb6a
No known key found for this signature in database
GPG Key ID: ACFEFF7F6A123A86
20 changed files with 142 additions and 114 deletions

View File

@ -37,6 +37,5 @@ git = "https://github.com/SergioBenitez/Rocket"
rev = "55459db7732b9a240826a5c120c650f87e3372ce" rev = "55459db7732b9a240826a5c120c650f87e3372ce"
[features] [features]
default = ["postgres"]
postgres = ["diesel/postgres"] postgres = ["diesel/postgres"]
sqlite = ["diesel/sqlite"] sqlite = ["diesel/sqlite"]

View File

@ -2,7 +2,7 @@ use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods};
use schema::blog_authors; use schema::blog_authors;
#[derive(Queryable, Identifiable)] #[derive(Clone, Queryable, Identifiable)]
pub struct BlogAuthor { pub struct BlogAuthor {
pub id: i32, pub id: i32,
pub blog_id: i32, pub blog_id: i32,

View File

@ -1,4 +1,5 @@
use activitypub::{Actor, Object, CustomObject, actor::Group, collection::OrderedCollection}; use activitypub::{Actor, Object, CustomObject, actor::Group, collection::OrderedCollection};
use chrono::NaiveDateTime;
use reqwest::{ use reqwest::{
Client, Client,
header::{Accept, qitem}, header::{Accept, qitem},
@ -6,7 +7,7 @@ use reqwest::{
}; };
use serde_json; use serde_json;
use url::Url; use url::Url;
use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, dsl::any}; use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods};
use openssl::{ use openssl::{
hash::MessageDigest, hash::MessageDigest,
pkey::{PKey, Private}, pkey::{PKey, Private},
@ -15,7 +16,7 @@ use openssl::{
}; };
use webfinger::*; use webfinger::*;
use {BASE_URL, USE_HTTPS, Connection, SqlDateTime}; use {BASE_URL, USE_HTTPS, Connection};
use plume_common::activity_pub::{ use plume_common::activity_pub::{
ap_accept_header, ApSignature, ActivityStream, Id, IntoId, PublicKey, ap_accept_header, ApSignature, ActivityStream, Id, IntoId, PublicKey,
inbox::WithInbox, inbox::WithInbox,
@ -37,7 +38,7 @@ pub struct Blog {
pub outbox_url: String, pub outbox_url: String,
pub inbox_url: String, pub inbox_url: String,
pub instance_id: i32, pub instance_id: i32,
pub creation_date: SqlDateTime, pub creation_date: NaiveDateTime,
pub ap_url: String, pub ap_url: String,
pub private_key: Option<String>, pub private_key: Option<String>,
pub public_key: String pub public_key: String
@ -73,7 +74,7 @@ impl Blog {
use schema::blog_authors; use schema::blog_authors;
use schema::users; use schema::users;
let authors_ids = blog_authors::table.filter(blog_authors::blog_id.eq(self.id)).select(blog_authors::author_id); let authors_ids = blog_authors::table.filter(blog_authors::blog_id.eq(self.id)).select(blog_authors::author_id);
users::table.filter(users::id.eq(any(authors_ids))) users::table.filter(users::id.eq_any(authors_ids))
.load::<User>(conn) .load::<User>(conn)
.expect("Couldn't load authors of a blog") .expect("Couldn't load authors of a blog")
} }
@ -81,7 +82,7 @@ impl Blog {
pub fn find_for_author(conn: &Connection, author_id: i32) -> Vec<Blog> { pub fn find_for_author(conn: &Connection, author_id: i32) -> Vec<Blog> {
use schema::blog_authors; use schema::blog_authors;
let author_ids = blog_authors::table.filter(blog_authors::author_id.eq(author_id)).select(blog_authors::blog_id); let author_ids = blog_authors::table.filter(blog_authors::author_id.eq(author_id)).select(blog_authors::blog_id);
blogs::table.filter(blogs::id.eq(any(author_ids))) blogs::table.filter(blogs::id.eq_any(author_ids))
.load::<Blog>(conn) .load::<Blog>(conn)
.expect("Couldn't load blogs ") .expect("Couldn't load blogs ")
} }
@ -189,19 +190,19 @@ impl Blog {
if self.outbox_url.len() == 0 { if self.outbox_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(blogs::outbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "outbox"))) .set(blogs::outbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "outbox")))
.get_result::<Blog>(conn).expect("Couldn't update outbox URL"); .execute(conn).expect("Couldn't update outbox URL");
} }
if self.inbox_url.len() == 0 { if self.inbox_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(blogs::inbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "inbox"))) .set(blogs::inbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "inbox")))
.get_result::<Blog>(conn).expect("Couldn't update inbox URL"); .execute(conn).expect("Couldn't update inbox URL");
} }
if self.ap_url.len() == 0 { if self.ap_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(blogs::ap_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), ""))) .set(blogs::ap_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "")))
.get_result::<Blog>(conn).expect("Couldn't update AP URL"); .execute(conn).expect("Couldn't update AP URL");
} }
} }

View File

@ -3,8 +3,8 @@ use activitypub::{
link, link,
object::{Note} object::{Note}
}; };
use chrono; use chrono::{self, NaiveDateTime};
use diesel::{self, PgConnection, RunQueryDsl, QueryDsl, ExpressionMethods, dsl::any}; use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods};
use serde_json; use serde_json;
use plume_common::activity_pub::{ use plume_common::activity_pub::{
@ -12,7 +12,7 @@ use plume_common::activity_pub::{
inbox::{FromActivity, Notify} inbox::{FromActivity, Notify}
}; };
use plume_common::utils; use plume_common::utils;
use {Connection, SqlDateTime}; use Connection;
use instance::Instance; use instance::Instance;
use mentions::Mention; use mentions::Mention;
use notifications::*; use notifications::*;
@ -28,7 +28,7 @@ pub struct Comment {
pub in_response_to_id: Option<i32>, pub in_response_to_id: Option<i32>,
pub post_id: i32, pub post_id: i32,
pub author_id: i32, pub author_id: i32,
pub creation_date: SqlDateTime, pub creation_date: NaiveDateTime,
pub ap_url: Option<String>, pub ap_url: Option<String>,
pub sensitive: bool, pub sensitive: bool,
pub spoiler_text: String pub spoiler_text: String
@ -63,7 +63,7 @@ impl Comment {
pub fn count_local(conn: &Connection) -> usize { pub fn count_local(conn: &Connection) -> usize {
use schema::users; use schema::users;
let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id); let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id);
comments::table.filter(comments::author_id.eq(any(local_authors))) comments::table.filter(comments::author_id.eq_any(local_authors))
.load::<Comment>(conn) .load::<Comment>(conn)
.expect("Couldn't load local comments") .expect("Couldn't load local comments")
.len() .len()
@ -87,8 +87,9 @@ impl Comment {
if self.ap_url.is_none() { if self.ap_url.is_none() {
diesel::update(self) diesel::update(self)
.set(comments::ap_url.eq(self.compute_id(conn))) .set(comments::ap_url.eq(self.compute_id(conn)))
.get_result(conn) .execute(conn)
.expect("Failed to update comment AP URL") .expect("Failed to update comment AP URL");
Comment::get(conn, self.id).expect("Couldn't get the updated comment")
} else { } else {
self.clone() self.clone()
} }
@ -134,7 +135,7 @@ impl Comment {
} }
} }
impl FromActivity<Note, PgConnection> for Comment { impl FromActivity<Note, Connection> for Comment {
fn from_activity(conn: &Connection, note: Note, actor: Id) -> Comment { fn from_activity(conn: &Connection, note: Note, actor: Id) -> Comment {
let previous_url = note.object_props.in_reply_to.clone().unwrap().as_str().unwrap().to_string(); let previous_url = note.object_props.in_reply_to.clone().unwrap().as_str().unwrap().to_string();
let previous_comment = Comment::find_by_ap_url(conn, previous_url.clone()); let previous_comment = Comment::find_by_ap_url(conn, previous_url.clone());
@ -168,7 +169,7 @@ impl FromActivity<Note, PgConnection> for Comment {
} }
} }
impl Notify<PgConnection> for Comment { impl Notify<Connection> for Comment {
fn notify(&self, conn: &Connection) { fn notify(&self, conn: &Connection) {
for author in self.get_post(conn).get_authors(conn) { for author in self.get_post(conn).get_authors(conn) {
Notification::insert(conn, NewNotification { Notification::insert(conn, NewNotification {

View File

@ -1,16 +1,17 @@
use diesel::{ use diesel::{
pg::PgConnection,
r2d2::{ConnectionManager, Pool, PooledConnection} r2d2::{ConnectionManager, Pool, PooledConnection}
}; };
use rocket::{Request, State, Outcome, http::Status, request::{self, FromRequest}}; use rocket::{Request, State, Outcome, http::Status, request::{self, FromRequest}};
use std::ops::Deref; use std::ops::Deref;
pub type PgPool = Pool<ConnectionManager<PgConnection>>; use Connection;
pub type DbPool = Pool<ConnectionManager<Connection>>;
// From rocket documentation // From rocket documentation
// Connection request guard type: a wrapper around an r2d2 pooled connection. // Connection request guard type: a wrapper around an r2d2 pooled connection.
pub struct DbConn(pub PooledConnection<ConnectionManager<PgConnection>>); pub struct DbConn(pub PooledConnection<ConnectionManager<Connection>>);
/// Attempts to retrieve a single connection from the managed database pool. If /// Attempts to retrieve a single connection from the managed database pool. If
/// no pool is currently managed, fails with an `InternalServerError` status. If /// no pool is currently managed, fails with an `InternalServerError` status. If
@ -19,7 +20,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
type Error = (); type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let pool = request.guard::<State<PgPool>>()?; let pool = request.guard::<State<DbPool>>()?;
match pool.get() { match pool.get() {
Ok(conn) => Outcome::Success(DbConn(conn)), Ok(conn) => Outcome::Success(DbConn(conn)),
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())) Err(_) => Outcome::Failure((Status::ServiceUnavailable, ()))
@ -29,7 +30,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
// For the convenience of using an &DbConn as an &Connection. // For the convenience of using an &DbConn as an &Connection.
impl Deref for DbConn { impl Deref for DbConn {
type Target = PgConnection; type Target = Connection;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.0 &self.0

View File

@ -1,5 +1,5 @@
use activitypub::{Actor, activity::{Accept, Follow as FollowAct, Undo}, actor::Person}; use activitypub::{Actor, activity::{Accept, Follow as FollowAct, Undo}, actor::Person};
use diesel::{self, PgConnection, ExpressionMethods, QueryDsl, RunQueryDsl}; use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl};
use plume_common::activity_pub::{broadcast, Id, IntoId, inbox::{FromActivity, Notify, WithInbox, Deletable}, sign::Signer}; use plume_common::activity_pub::{broadcast, Id, IntoId, inbox::{FromActivity, Notify, WithInbox, Deletable}, sign::Signer};
use Connection; use Connection;
@ -8,7 +8,7 @@ use notifications::*;
use users::User; use users::User;
use schema::follows; use schema::follows;
#[derive(Queryable, Identifiable, Associations)] #[derive(Clone, Queryable, Identifiable, Associations)]
#[belongs_to(User, foreign_key = "following_id")] #[belongs_to(User, foreign_key = "following_id")]
pub struct Follow { pub struct Follow {
pub id: i32, pub id: i32,
@ -80,7 +80,7 @@ impl Follow {
} }
} }
impl FromActivity<FollowAct, PgConnection> for Follow { impl FromActivity<FollowAct, Connection> for Follow {
fn from_activity(conn: &Connection, follow: FollowAct, _actor: Id) -> Follow { fn from_activity(conn: &Connection, follow: FollowAct, _actor: Id) -> Follow {
let from_id = follow.follow_props.actor_link::<Id>().map(|l| l.into()) let from_id = follow.follow_props.actor_link::<Id>().map(|l| l.into())
.unwrap_or_else(|_| follow.follow_props.actor_object::<Person>().expect("No actor object (nor ID) on Follow").object_props.id_string().expect("No ID on actor on Follow")); .unwrap_or_else(|_| follow.follow_props.actor_object::<Person>().expect("No actor object (nor ID) on Follow").object_props.id_string().expect("No ID on actor on Follow"));
@ -95,7 +95,7 @@ impl FromActivity<FollowAct, PgConnection> for Follow {
} }
} }
impl Notify<PgConnection> for Follow { impl Notify<Connection> for Follow {
fn notify(&self, conn: &Connection) { fn notify(&self, conn: &Connection) {
Notification::insert(conn, NewNotification { Notification::insert(conn, NewNotification {
kind: notification_kind::FOLLOW.to_string(), kind: notification_kind::FOLLOW.to_string(),
@ -105,7 +105,7 @@ impl Notify<PgConnection> for Follow {
} }
} }
impl Deletable<PgConnection, Undo> for Follow { impl Deletable<Connection, Undo> for Follow {
fn delete(&self, conn: &Connection) -> Undo { fn delete(&self, conn: &Connection) -> Undo {
diesel::delete(self).execute(conn).expect("Coudn't delete follow"); diesel::delete(self).execute(conn).expect("Coudn't delete follow");

View File

@ -1,21 +1,22 @@
use chrono::NaiveDateTime;
use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods};
use std::iter::Iterator; use std::iter::Iterator;
use plume_common::utils::md_to_html; use plume_common::utils::md_to_html;
use {Connection, SqlDateTime}; use Connection;
use safe_string::SafeString; use safe_string::SafeString;
use ap_url; use ap_url;
use users::User; use users::User;
use schema::{instances, users}; use schema::{instances, users};
#[derive(Identifiable, Queryable, Serialize)] #[derive(Clone, Identifiable, Queryable, Serialize)]
pub struct Instance { pub struct Instance {
pub id: i32, pub id: i32,
pub public_domain: String, pub public_domain: String,
pub name: String, pub name: String,
pub local: bool, pub local: bool,
pub blocked: bool, pub blocked: bool,
pub creation_date: SqlDateTime, pub creation_date: NaiveDateTime,
pub open_registrations: bool, pub open_registrations: bool,
pub short_description: SafeString, pub short_description: SafeString,
pub long_description: SafeString, pub long_description: SafeString,
@ -72,7 +73,7 @@ impl Instance {
pub fn toggle_block(&self, conn: &Connection) { pub fn toggle_block(&self, conn: &Connection) {
diesel::update(self) diesel::update(self)
.set(instances::blocked.eq(!self.blocked)) .set(instances::blocked.eq(!self.blocked))
.get_result::<Instance>(conn) .execute(conn)
.expect("Couldn't block/unblock instance"); .expect("Couldn't block/unblock instance");
} }
@ -115,7 +116,7 @@ impl Instance {
)) ))
} }
pub fn update(&self, conn: &Connection, name: String, open_registrations: bool, short_description: SafeString, long_description: SafeString) -> Instance { pub fn update(&self, conn: &Connection, name: String, open_registrations: bool, short_description: SafeString, long_description: SafeString) {
let (sd, _) = md_to_html(short_description.as_ref()); let (sd, _) = md_to_html(short_description.as_ref());
let (ld, _) = md_to_html(long_description.as_ref()); let (ld, _) = md_to_html(long_description.as_ref());
diesel::update(self) diesel::update(self)
@ -126,8 +127,8 @@ impl Instance {
instances::long_description.eq(long_description), instances::long_description.eq(long_description),
instances::short_description_html.eq(sd), instances::short_description_html.eq(sd),
instances::long_description_html.eq(ld) instances::long_description_html.eq(ld)
)).get_result::<Instance>(conn) )).execute(conn)
.expect("Couldn't update instance") .expect("Couldn't update instance");
} }
pub fn count(conn: &Connection) -> i64 { pub fn count(conn: &Connection) -> i64 {

View File

@ -26,6 +26,12 @@ extern crate webfinger;
use std::env; use std::env;
#[cfg(all(feature = "sqlite", not(feature = "postgres")))]
pub type Connection = diesel::SqliteConnection;
#[cfg(all(not(feature = "sqlite"), feature = "postgres"))]
pub type Connection = diesel::PgConnection;
macro_rules! find_by { macro_rules! find_by {
($table:ident, $fn:ident, $($col:ident as $type:ident),+) => { ($table:ident, $fn:ident, $($col:ident as $type:ident),+) => {
/// Try to find a $table with a given $col /// Try to find a $table with a given $col
@ -66,11 +72,14 @@ macro_rules! get {
macro_rules! insert { macro_rules! insert {
($table:ident, $from:ident) => { ($table:ident, $from:ident) => {
last!($table);
pub fn insert(conn: &crate::Connection, new: $from) -> Self { pub fn insert(conn: &crate::Connection, new: $from) -> Self {
diesel::insert_into($table::table) diesel::insert_into($table::table)
.values(new) .values(new)
.get_result(conn) .execute(conn)
.expect("Error saving new $table") .expect("Error saving new $table");
Self::last(conn)
} }
}; };
} }
@ -80,8 +89,24 @@ macro_rules! update {
pub fn update(&self, conn: &crate::Connection) -> Self { pub fn update(&self, conn: &crate::Connection) -> Self {
diesel::update(self) diesel::update(self)
.set(self) .set(self)
.get_result(conn) .execute(conn)
.expect(concat!("Error updating ", stringify!($table))) .expect(concat!("Error updating ", stringify!($table)));
Self::get(conn, self.id)
.expect(concat!(stringify!($table), " we just updated doesn't exist anymore???"))
}
};
}
macro_rules! last {
($table:ident) => {
pub fn last(conn: &crate::Connection) -> Self {
$table::table.order_by($table::id.desc())
.limit(1)
.load::<Self>(conn)
.expect(concat!("Error getting last ", stringify!($table)))
.iter().next()
.expect(concat!("No last ", stringify!($table)))
.clone()
} }
}; };
} }
@ -105,18 +130,6 @@ pub fn ap_url(url: String) -> String {
format!("{}://{}", scheme, url) format!("{}://{}", scheme, url)
} }
#[cfg(all(not(feature = "postgres"), feature = "sqlite"))]
pub type SqlDateTime = chrono::NaiveDateTime;
#[cfg(all(not(feature = "postgres"), feature = "sqlite"))]
pub type Connection = diesel::SqliteConnection;
#[cfg(all(not(feature = "sqlite"), feature = "postgres"))]
pub type SqlDateTime = chrono::NaiveDateTime;
#[cfg(all(not(feature = "sqlite"), feature = "postgres"))]
pub type Connection = diesel::PgConnection;
pub mod admin; pub mod admin;
pub mod blog_authors; pub mod blog_authors;
pub mod blogs; pub mod blogs;

View File

@ -14,7 +14,7 @@ use posts::Post;
use users::User; use users::User;
use schema::likes; use schema::likes;
#[derive(Queryable, Identifiable)] #[derive(Clone, Queryable, Identifiable)]
pub struct Like { pub struct Like {
pub id: i32, pub id: i32,
pub user_id: i32, pub user_id: i32,
@ -45,7 +45,7 @@ impl Like {
User::get(conn, self.user_id).unwrap().ap_url, User::get(conn, self.user_id).unwrap().ap_url,
Post::get(conn, self.post_id).unwrap().ap_url Post::get(conn, self.post_id).unwrap().ap_url
))) )))
.get_result::<Like>(conn).expect("Couldn't update AP URL"); .execute(conn).expect("Couldn't update AP URL");
} }
} }

View File

@ -6,7 +6,7 @@ use {ap_url, Connection};
use instance::Instance; use instance::Instance;
use schema::medias; use schema::medias;
#[derive(Identifiable, Queryable, Serialize)] #[derive(Clone, Identifiable, Queryable, Serialize)]
pub struct Media { pub struct Media {
pub id: i32, pub id: i32,
pub file_path: String, pub file_path: String,

View File

@ -1,5 +1,5 @@
use activitypub::link; use activitypub::link;
use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods};
use plume_common::activity_pub::inbox::Notify; use plume_common::activity_pub::inbox::Notify;
use Connection; use Connection;
@ -9,7 +9,7 @@ use posts::Post;
use users::User; use users::User;
use schema::mentions; use schema::mentions;
#[derive(Queryable, Identifiable, Serialize, Deserialize)] #[derive(Clone, Queryable, Identifiable, Serialize, Deserialize)]
pub struct Mention { pub struct Mention {
pub id: i32, pub id: i32,
pub mentioned_id: i32, pub mentioned_id: i32,
@ -104,7 +104,7 @@ impl Mention {
} }
} }
impl Notify<PgConnection> for Mention { impl Notify<Connection> for Mention {
fn notify(&self, conn: &Connection) { fn notify(&self, conn: &Connection) {
self.get_mentioned(conn).map(|m| { self.get_mentioned(conn).map(|m| {
Notification::insert(conn, NewNotification { Notification::insert(conn, NewNotification {

View File

@ -1,7 +1,8 @@
use chrono::NaiveDateTime;
use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods}; use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods};
use serde_json; use serde_json;
use {Connection, SqlDateTime}; use Connection;
use comments::Comment; use comments::Comment;
use follows::Follow; use follows::Follow;
use likes::Like; use likes::Like;
@ -19,11 +20,11 @@ pub mod notification_kind {
pub const RESHARE: &'static str = "RESHARE"; pub const RESHARE: &'static str = "RESHARE";
} }
#[derive(Queryable, Identifiable, Serialize)] #[derive(Clone, Queryable, Identifiable, Serialize)]
pub struct Notification { pub struct Notification {
pub id: i32, pub id: i32,
pub user_id: i32, pub user_id: i32,
pub creation_date: SqlDateTime, pub creation_date: NaiveDateTime,
pub kind: String, pub kind: String,
pub object_id: i32 pub object_id: i32
} }

View File

@ -4,7 +4,7 @@ use posts::Post;
use users::User; use users::User;
use schema::post_authors; use schema::post_authors;
#[derive(Queryable, Identifiable, Associations)] #[derive(Clone, Queryable, Identifiable, Associations)]
#[belongs_to(Post)] #[belongs_to(Post)]
#[belongs_to(User, foreign_key = "author_id")] #[belongs_to(User, foreign_key = "author_id")]
pub struct PostAuthor { pub struct PostAuthor {

View File

@ -5,7 +5,7 @@ use activitypub::{
}; };
use canapi::{Error, Provider}; use canapi::{Error, Provider};
use chrono::{NaiveDateTime, TimeZone, Utc}; use chrono::{NaiveDateTime, TimeZone, Utc};
use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods, BelongingToDsl, dsl::any}; use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods, BelongingToDsl};
use heck::KebabCase; use heck::KebabCase;
use serde_json; use serde_json;
@ -15,7 +15,7 @@ use plume_common::activity_pub::{
PUBLIC_VISIBILTY, Id, IntoId, PUBLIC_VISIBILTY, Id, IntoId,
inbox::{Deletable, FromActivity} inbox::{Deletable, FromActivity}
}; };
use {BASE_URL, ap_url, Connection, SqlDateTime}; use {BASE_URL, ap_url, Connection};
use blogs::Blog; use blogs::Blog;
use instance::Instance; use instance::Instance;
use likes::Like; use likes::Like;
@ -36,7 +36,7 @@ pub struct Post {
pub content: SafeString, pub content: SafeString,
pub published: bool, pub published: bool,
pub license: String, pub license: String,
pub creation_date: SqlDateTime, pub creation_date: NaiveDateTime,
pub ap_url: String, pub ap_url: String,
pub subtitle: String, pub subtitle: String,
pub source: String, pub source: String,
@ -116,31 +116,32 @@ impl Post {
use schema::tags; use schema::tags;
let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id); let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id);
posts::table.filter(posts::id.eq(any(ids))) posts::table.filter(posts::id.eq_any(ids))
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.order(posts::creation_date.desc()) .order(posts::creation_date.desc())
.offset(min.into()) .offset(min.into())
.limit((max - min).into()) .limit((max - min).into())
.get_results::<Post>(conn) .load(conn)
.expect("Error loading posts by tag") .expect("Error loading posts by tag")
} }
pub fn count_for_tag(conn: &Connection, tag: String) -> i64 { pub fn count_for_tag(conn: &Connection, tag: String) -> i64 {
use schema::tags; use schema::tags;
let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id); let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id);
posts::table.filter(posts::id.eq(any(ids))) *posts::table.filter(posts::id.eq_any(ids))
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.count() .count()
.get_result(conn) .load(conn)
.expect("Error counting posts by tag") .expect("Error counting posts by tag")
.iter().next().unwrap()
} }
pub fn count_local(conn: &Connection) -> usize { pub fn count_local(conn: &Connection) -> usize {
use schema::post_authors; use schema::post_authors;
use schema::users; use schema::users;
let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id); let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id);
let local_posts_id = post_authors::table.filter(post_authors::author_id.eq(any(local_authors))).select(post_authors::post_id); let local_posts_id = post_authors::table.filter(post_authors::author_id.eq_any(local_authors)).select(post_authors::post_id);
posts::table.filter(posts::id.eq(any(local_posts_id))) posts::table.filter(posts::id.eq_any(local_posts_id))
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.load::<Post>(conn) .load::<Post>(conn)
.expect("Couldn't load local posts") .expect("Couldn't load local posts")
@ -163,7 +164,7 @@ impl Post {
use schema::post_authors; use schema::post_authors;
let posts = PostAuthor::belonging_to(author).select(post_authors::post_id); let posts = PostAuthor::belonging_to(author).select(post_authors::post_id);
posts::table.filter(posts::id.eq(any(posts))) posts::table.filter(posts::id.eq_any(posts))
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.order(posts::creation_date.desc()) .order(posts::creation_date.desc())
.limit(limit) .limit(limit)
@ -215,7 +216,7 @@ impl Post {
posts::table.order(posts::creation_date.desc()) posts::table.order(posts::creation_date.desc())
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.filter(posts::blog_id.eq(any(blog_ids))) .filter(posts::blog_id.eq_any(blog_ids))
.offset(min.into()) .offset(min.into())
.limit((max - min).into()) .limit((max - min).into())
.load::<Post>(conn) .load::<Post>(conn)
@ -225,12 +226,13 @@ impl Post {
/// Give a page of customized user feed, based on a list of followed users /// Give a page of customized user feed, based on a list of followed users
pub fn user_feed_page(conn: &Connection, followed: Vec<i32>, (min, max): (i32, i32)) -> Vec<Post> { pub fn user_feed_page(conn: &Connection, followed: Vec<i32>, (min, max): (i32, i32)) -> Vec<Post> {
use schema::post_authors; use schema::post_authors;
let post_ids = post_authors::table.filter(post_authors::author_id.eq(any(followed))) let post_ids = post_authors::table
.filter(post_authors::author_id.eq_any(followed))
.select(post_authors::post_id); .select(post_authors::post_id);
posts::table.order(posts::creation_date.desc()) posts::table.order(posts::creation_date.desc())
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.filter(posts::id.eq(any(post_ids))) .filter(posts::id.eq_any(post_ids))
.offset(min.into()) .offset(min.into())
.limit((max - min).into()) .limit((max - min).into())
.load::<Post>(conn) .load::<Post>(conn)
@ -243,7 +245,7 @@ impl Post {
let posts = PostAuthor::belonging_to(author).select(post_authors::post_id); let posts = PostAuthor::belonging_to(author).select(post_authors::post_id);
posts::table.order(posts::creation_date.desc()) posts::table.order(posts::creation_date.desc())
.filter(posts::published.eq(false)) .filter(posts::published.eq(false))
.filter(posts::id.eq(any(posts))) .filter(posts::id.eq_any(posts))
.load::<Post>(conn) .load::<Post>(conn)
.expect("Error listing drafts") .expect("Error listing drafts")
} }
@ -252,7 +254,7 @@ impl Post {
use schema::users; use schema::users;
use schema::post_authors; use schema::post_authors;
let author_list = PostAuthor::belonging_to(self).select(post_authors::author_id); let author_list = PostAuthor::belonging_to(self).select(post_authors::author_id);
users::table.filter(users::id.eq(any(author_list))).load::<User>(conn).unwrap() users::table.filter(users::id.eq_any(author_list)).load::<User>(conn).unwrap()
} }
pub fn get_blog(&self, conn: &Connection) -> Blog { pub fn get_blog(&self, conn: &Connection) -> Blog {
@ -282,7 +284,8 @@ impl Post {
if self.ap_url.len() == 0 { if self.ap_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(posts::ap_url.eq(self.compute_id(conn))) .set(posts::ap_url.eq(self.compute_id(conn)))
.get_result::<Post>(conn).expect("Couldn't update AP URL") .execute(conn).expect("Couldn't update AP URL");
Post::get(conn, self.id).unwrap()
} else { } else {
self.clone() self.clone()
} }

View File

@ -1,20 +1,21 @@
use activitypub::activity::{Announce, Undo}; use activitypub::activity::{Announce, Undo};
use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; use chrono::NaiveDateTime;
use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods};
use plume_common::activity_pub::{Id, IntoId, inbox::{FromActivity, Notify, Deletable}, PUBLIC_VISIBILTY}; use plume_common::activity_pub::{Id, IntoId, inbox::{FromActivity, Notify, Deletable}, PUBLIC_VISIBILTY};
use {Connection, SqlDateTime}; use Connection;
use notifications::*; use notifications::*;
use posts::Post; use posts::Post;
use users::User; use users::User;
use schema::reshares; use schema::reshares;
#[derive(Serialize, Deserialize, Queryable, Identifiable)] #[derive(Clone, Serialize, Deserialize, Queryable, Identifiable)]
pub struct Reshare { pub struct Reshare {
pub id: i32, pub id: i32,
pub user_id: i32, pub user_id: i32,
pub post_id: i32, pub post_id: i32,
pub ap_url: String, pub ap_url: String,
pub creation_date: SqlDateTime pub creation_date: NaiveDateTime
} }
#[derive(Insertable)] #[derive(Insertable)]
@ -39,7 +40,7 @@ impl Reshare {
User::get(conn, self.user_id).unwrap().ap_url, User::get(conn, self.user_id).unwrap().ap_url,
Post::get(conn, self.post_id).unwrap().ap_url Post::get(conn, self.post_id).unwrap().ap_url
))) )))
.get_result::<Reshare>(conn).expect("Couldn't update AP URL"); .execute(conn).expect("Couldn't update AP URL");
} }
} }
@ -71,7 +72,7 @@ impl Reshare {
} }
} }
impl FromActivity<Announce, PgConnection> for Reshare { impl FromActivity<Announce, Connection> for Reshare {
fn from_activity(conn: &Connection, announce: Announce, _actor: Id) -> Reshare { fn from_activity(conn: &Connection, announce: Announce, _actor: Id) -> Reshare {
let user = User::from_url(conn, announce.announce_props.actor_link::<Id>().expect("Reshare::from_activity: actor error").into()); let user = User::from_url(conn, announce.announce_props.actor_link::<Id>().expect("Reshare::from_activity: actor error").into());
let post = Post::find_by_ap_url(conn, announce.announce_props.object_link::<Id>().expect("Reshare::from_activity: object error").into()); let post = Post::find_by_ap_url(conn, announce.announce_props.object_link::<Id>().expect("Reshare::from_activity: object error").into());
@ -85,7 +86,7 @@ impl FromActivity<Announce, PgConnection> for Reshare {
} }
} }
impl Notify<PgConnection> for Reshare { impl Notify<Connection> for Reshare {
fn notify(&self, conn: &Connection) { fn notify(&self, conn: &Connection) {
let post = self.get_post(conn).unwrap(); let post = self.get_post(conn).unwrap();
for author in post.get_authors(conn) { for author in post.get_authors(conn) {
@ -98,7 +99,7 @@ impl Notify<PgConnection> for Reshare {
} }
} }
impl Deletable<PgConnection, Undo> for Reshare { impl Deletable<Connection, Undo> for Reshare {
fn delete(&self, conn: &Connection) -> Undo { fn delete(&self, conn: &Connection) -> Undo {
diesel::delete(self).execute(conn).unwrap(); diesel::delete(self).execute(conn).unwrap();

View File

@ -92,6 +92,13 @@ impl Queryable<Text, diesel::pg::Pg> for SafeString {
} }
} }
impl Queryable<Text, diesel::sqlite::Sqlite> for SafeString {
type Row = String;
fn build(value: Self::Row) -> Self {
SafeString::new(&value)
}
}
impl<DB> ToSql<diesel::sql_types::Text, DB> for SafeString impl<DB> ToSql<diesel::sql_types::Text, DB> for SafeString
where where
DB: diesel::backend::Backend, DB: diesel::backend::Backend,

View File

@ -6,7 +6,7 @@ use activitypub::{
}; };
use bcrypt; use bcrypt;
use chrono::{Utc, NaiveDateTime}; use chrono::{Utc, NaiveDateTime};
use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, BelongingToDsl, dsl::any}; use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, BelongingToDsl};
use openssl::{ use openssl::{
hash::MessageDigest, hash::MessageDigest,
pkey::{PKey, Private}, pkey::{PKey, Private},
@ -101,8 +101,8 @@ impl User {
find_by!(users, find_by_ap_url, ap_url as String); find_by!(users, find_by_ap_url, ap_url as String);
pub fn one_by_instance(conn: &Connection) -> Vec<User> { pub fn one_by_instance(conn: &Connection) -> Vec<User> {
users::table.distinct_on(users::instance_id) users::table.filter(users::instance_id.eq_any(users::table.select(users::instance_id).distinct()))
.get_results::<User>(conn) .load::<User>(conn)
.expect("Error in User::on_by_instance") .expect("Error in User::on_by_instance")
} }
@ -117,7 +117,7 @@ impl User {
pub fn grant_admin_rights(&self, conn: &Connection) { pub fn grant_admin_rights(&self, conn: &Connection) {
diesel::update(self) diesel::update(self)
.set(users::is_admin.eq(true)) .set(users::is_admin.eq(true))
.load::<User>(conn) .execute(conn)
.expect("Couldn't grant admin rights"); .expect("Couldn't grant admin rights");
} }
@ -127,9 +127,9 @@ impl User {
users::display_name.eq(name), users::display_name.eq(name),
users::email.eq(email), users::email.eq(email),
users::summary.eq(summary), users::summary.eq(summary),
)).load::<User>(conn) )).execute(conn)
.expect("Couldn't update user") .expect("Couldn't update user");
.into_iter().nth(0).unwrap() User::get(conn, self.id).unwrap()
} }
pub fn count_local(conn: &Connection) -> usize { pub fn count_local(conn: &Connection) -> usize {
@ -279,31 +279,31 @@ impl User {
if self.outbox_url.len() == 0 { if self.outbox_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(users::outbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "outbox"))) .set(users::outbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "outbox")))
.get_result::<User>(conn).expect("Couldn't update outbox URL"); .execute(conn).expect("Couldn't update outbox URL");
} }
if self.inbox_url.len() == 0 { if self.inbox_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(users::inbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "inbox"))) .set(users::inbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "inbox")))
.get_result::<User>(conn).expect("Couldn't update inbox URL"); .execute(conn).expect("Couldn't update inbox URL");
} }
if self.ap_url.len() == 0 { if self.ap_url.len() == 0 {
diesel::update(self) diesel::update(self)
.set(users::ap_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), ""))) .set(users::ap_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "")))
.get_result::<User>(conn).expect("Couldn't update AP URL"); .execute(conn).expect("Couldn't update AP URL");
} }
if self.shared_inbox_url.is_none() { if self.shared_inbox_url.is_none() {
diesel::update(self) diesel::update(self)
.set(users::shared_inbox_url.eq(ap_url(format!("{}/inbox", Instance::get_local(conn).unwrap().public_domain)))) .set(users::shared_inbox_url.eq(ap_url(format!("{}/inbox", Instance::get_local(conn).unwrap().public_domain))))
.get_result::<User>(conn).expect("Couldn't update shared inbox URL"); .execute(conn).expect("Couldn't update shared inbox URL");
} }
if self.followers_endpoint.len() == 0 { if self.followers_endpoint.len() == 0 {
diesel::update(self) diesel::update(self)
.set(users::followers_endpoint.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "followers"))) .set(users::followers_endpoint.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "followers")))
.get_result::<User>(conn).expect("Couldn't update followers endpoint"); .execute(conn).expect("Couldn't update followers endpoint");
} }
} }
@ -375,7 +375,7 @@ impl User {
let posts_by_self = PostAuthor::belonging_to(self).select(post_authors::post_id); let posts_by_self = PostAuthor::belonging_to(self).select(post_authors::post_id);
let posts = posts::table let posts = posts::table
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.filter(posts::id.eq(any(posts_by_self))) .filter(posts::id.eq_any(posts_by_self))
.load::<Post>(conn).unwrap(); .load::<Post>(conn).unwrap();
posts.into_iter().map(|p| { posts.into_iter().map(|p| {
serde_json::to_value(p.create_activity(conn)).unwrap() serde_json::to_value(p.create_activity(conn)).unwrap()
@ -393,22 +393,22 @@ impl User {
pub fn get_followers(&self, conn: &Connection) -> Vec<User> { pub fn get_followers(&self, conn: &Connection) -> Vec<User> {
use schema::follows; use schema::follows;
let follows = Follow::belonging_to(self).select(follows::follower_id); let follows = Follow::belonging_to(self).select(follows::follower_id);
users::table.filter(users::id.eq(any(follows))).load::<User>(conn).unwrap() users::table.filter(users::id.eq_any(follows)).load::<User>(conn).unwrap()
} }
pub fn get_followers_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Vec<User> { pub fn get_followers_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Vec<User> {
use schema::follows; use schema::follows;
let follows = Follow::belonging_to(self).select(follows::follower_id); let follows = Follow::belonging_to(self).select(follows::follower_id);
users::table.filter(users::id.eq(any(follows))) users::table.filter(users::id.eq_any(follows))
.offset(min.into()) .offset(min.into())
.limit((max - min).into()) .limit((max - min).into())
.load::<User>(conn).unwrap() .load::<User>(conn).unwrap()
} }
pub fn get_following(&self, conn: &Connection) -> Vec<User> { pub fn get_following(&self, conn: &Connection) -> Vec<User> {
use schema::follows; use schema::follows::dsl::*;
let follows = follows::table.filter(follows::follower_id.eq(self.id)).select(follows::following_id); let f = follows.filter(follower_id.eq(self.id)).select(following_id);
users::table.filter(users::id.eq(any(follows))).load::<User>(conn).unwrap() users::table.filter(users::id.eq_any(f)).load::<User>(conn).unwrap()
} }
pub fn is_followed_by(&self, conn: &Connection, other_id: i32) -> bool { pub fn is_followed_by(&self, conn: &Connection, other_id: i32) -> bool {

View File

@ -1,10 +1,10 @@
use activitypub::{activity::{Announce, Create, Delete, Like, Undo, Update}, object::Tombstone}; use activitypub::{activity::{Announce, Create, Delete, Like, Undo, Update}, object::Tombstone};
use diesel::PgConnection;
use failure::Error; use failure::Error;
use serde_json; use serde_json;
use plume_common::activity_pub::{Id, inbox::{Deletable, FromActivity, InboxError}}; use plume_common::activity_pub::{Id, inbox::{Deletable, FromActivity, InboxError}};
use plume_models::{ use plume_models::{
Connection,
comments::Comment, comments::Comment,
follows::Follow, follows::Follow,
instance::Instance, instance::Instance,
@ -15,7 +15,7 @@ use plume_models::{
}; };
pub trait Inbox { pub trait Inbox {
fn received(&self, conn: &PgConnection, act: serde_json::Value) -> Result<(), Error> { fn received(&self, conn: &Connection, act: serde_json::Value) -> Result<(), Error> {
let actor_id = Id::new(act["actor"].as_str().unwrap_or_else(|| act["actor"]["id"].as_str().expect("No actor ID for incoming activity"))); let actor_id = Id::new(act["actor"].as_str().unwrap_or_else(|| act["actor"]["id"].as_str().expect("No actor ID for incoming activity")));
match act["type"].as_str() { match act["type"].as_str() {
Some(t) => { Some(t) => {

View File

@ -1,5 +1,4 @@
use atom_syndication::{ContentBuilder, Entry, EntryBuilder, LinkBuilder, Person, PersonBuilder}; use atom_syndication::{ContentBuilder, Entry, EntryBuilder, LinkBuilder, Person, PersonBuilder};
use diesel::PgConnection;
use rocket::{ use rocket::{
http::uri::{FromUriParam, UriDisplay}, http::uri::{FromUriParam, UriDisplay},
response::NamedFile response::NamedFile
@ -9,7 +8,7 @@ use std::{
path::{Path, PathBuf} path::{Path, PathBuf}
}; };
use plume_models::posts::Post; use plume_models::{Connection, posts::Post};
macro_rules! may_fail { macro_rules! may_fail {
($account:expr, $expr:expr, $template:expr, $msg:expr, | $res:ident | $block:block) => { ($account:expr, $expr:expr, $template:expr, $msg:expr, | $res:ident | $block:block) => {
@ -79,7 +78,7 @@ impl Page {
} }
} }
pub fn post_to_atom(post: Post, conn: &PgConnection) -> Entry { pub fn post_to_atom(post: Post, conn: &Connection) -> Entry {
EntryBuilder::default() EntryBuilder::default()
.title(post.title.clone()) .title(post.title.clone())
.content(ContentBuilder::default() .content(ContentBuilder::default()

View File

@ -1,5 +1,5 @@
use colored::Colorize; use colored::Colorize;
use diesel::{pg::PgConnection, r2d2::{ConnectionManager, Pool}}; use diesel::r2d2::{ConnectionManager, Pool};
use dotenv::dotenv; use dotenv::dotenv;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io; use std::io;
@ -9,21 +9,22 @@ use rpassword;
use plume_models::safe_string::SafeString; use plume_models::safe_string::SafeString;
use plume_models::{ use plume_models::{
Connection,
DB_URL, DB_URL,
db_conn::{DbConn, PgPool}, db_conn::{DbConn, DbPool},
instance::*, instance::*,
users::* users::*
}; };
/// Initializes a database pool. /// Initializes a database pool.
fn init_pool() -> Option<PgPool> { fn init_pool() -> Option<DbPool> {
dotenv().ok(); dotenv().ok();
let manager = ConnectionManager::<PgConnection>::new(DB_URL.as_str()); let manager = ConnectionManager::<Connection>::new(DB_URL.as_str());
Pool::new(manager).ok() Pool::new(manager).ok()
} }
pub fn check() -> PgPool { pub fn check() -> DbPool {
if let Some(pool) = init_pool() { if let Some(pool) = init_pool() {
match pool.get() { match pool.get() {
Ok(conn) => { Ok(conn) => {