diff --git a/.gitignore b/.gitignore index 46d7c43e..b066582b 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,6 @@ po/*.po~ Rocket.toml media docker-compose.yml +*.db +*.sqlite +*.sqlite3 diff --git a/.travis.yml b/.travis.yml index 6a110755..dc57a1e8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,10 @@ language: rust +env: + matrix: + - MIGRATION_DIR=migrations/postgres FEATURES=postgres DATABASE_URL=postgres://postgres@localhost/plume + - MIGRATION_DIR=migrations/sqlite FEATURES=sqlite DATABASE_URL=plume.sqlite3 rust: - nightly cache: cargo +script: + - cargo build --no-default-features --features="${FEATURES}" diff --git a/Cargo.lock b/Cargo.lock index 98b07809..87d89e02 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -475,6 +475,7 @@ dependencies = [ "byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "diesel_derives 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libsqlite3-sys 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", "pq-sys 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "r2d2 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1001,6 +1002,15 @@ dependencies = [ "crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "libsqlite3-sys" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "locale_config" version = "0.2.2" @@ -2882,6 +2892,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum lazycell 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d33a48d0365c96081958cc663eef834975cb1e8d8bea3378513fc72bdbf11e50" "checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d" "checksum libflate 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7d4b4c7aff5bac19b956f693d0ea0eade8066deb092186ae954fa6ba14daab98" +"checksum libsqlite3-sys 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d3711dfd91a1081d2458ad2d06ea30a8755256e74038be2ad927d94e1c955ca8" "checksum locale_config 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "14fbee0e39bc2dd6a2427c4fdea66e9826cc1fd09b0a0b7550359f5f6efe1dab" "checksum lock_api 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "949826a5ccf18c1b3a7c3d57692778d21768b79e46eb9dd07bfc4c2160036c54" "checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" diff --git a/Cargo.toml b/Cargo.toml index da661d84..58c6d61a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,7 +29,7 @@ features = ["serde"] version = "0.4" [dependencies.diesel] -features = ["postgres", "r2d2", "chrono"] +features = ["postgres", "sqlite", "r2d2", "chrono"] version = "*" [dependencies.plume-api] @@ -62,5 +62,10 @@ rev = "2805ce5dbae4a6441208484426440885a5640a6a" git = "https://github.com/BaptisteGelez/rocket_i18n" rev = "75a3bfd7b847324c078a355a7f101f8241a9f59b" +[features] +default = ["postgres"] +postgres = ["plume-models/postgres"] +sqlite = ["plume-models/sqlite"] + [workspace] members = ["plume-api", "plume-models", "plume-common"] diff --git a/diesel.toml b/diesel.toml new file mode 100644 index 00000000..57dcc64e --- /dev/null +++ b/diesel.toml @@ -0,0 +1,5 @@ +# For documentation on how to configure this file, +# see diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "plume-models/src/schema.rs" diff --git a/migrations/00000000000000_diesel_initial_setup/down.sql b/migrations/postgres/00000000000000_diesel_initial_setup/down.sql similarity index 100% rename from migrations/00000000000000_diesel_initial_setup/down.sql rename to migrations/postgres/00000000000000_diesel_initial_setup/down.sql diff --git a/migrations/00000000000000_diesel_initial_setup/up.sql b/migrations/postgres/00000000000000_diesel_initial_setup/up.sql similarity index 100% rename from migrations/00000000000000_diesel_initial_setup/up.sql rename to migrations/postgres/00000000000000_diesel_initial_setup/up.sql diff --git a/migrations/2018-04-22-093322_create_instances/down.sql b/migrations/postgres/2018-04-22-093322_create_instances/down.sql similarity index 100% rename from migrations/2018-04-22-093322_create_instances/down.sql rename to migrations/postgres/2018-04-22-093322_create_instances/down.sql diff --git a/migrations/2018-04-22-093322_create_instances/up.sql b/migrations/postgres/2018-04-22-093322_create_instances/up.sql similarity index 100% rename from migrations/2018-04-22-093322_create_instances/up.sql rename to migrations/postgres/2018-04-22-093322_create_instances/up.sql diff --git a/migrations/2018-04-22-151330_create_user/down.sql b/migrations/postgres/2018-04-22-151330_create_user/down.sql similarity index 100% rename from migrations/2018-04-22-151330_create_user/down.sql rename to migrations/postgres/2018-04-22-151330_create_user/down.sql diff --git a/migrations/2018-04-22-151330_create_user/up.sql b/migrations/postgres/2018-04-22-151330_create_user/up.sql similarity index 100% rename from migrations/2018-04-22-151330_create_user/up.sql rename to migrations/postgres/2018-04-22-151330_create_user/up.sql diff --git a/migrations/2018-04-23-101717_create_blogs/down.sql b/migrations/postgres/2018-04-23-101717_create_blogs/down.sql similarity index 100% rename from migrations/2018-04-23-101717_create_blogs/down.sql rename to migrations/postgres/2018-04-23-101717_create_blogs/down.sql diff --git a/migrations/2018-04-23-101717_create_blogs/up.sql b/migrations/postgres/2018-04-23-101717_create_blogs/up.sql similarity index 100% rename from migrations/2018-04-23-101717_create_blogs/up.sql rename to migrations/postgres/2018-04-23-101717_create_blogs/up.sql diff --git a/migrations/2018-04-23-111655_create_blog_authors/down.sql b/migrations/postgres/2018-04-23-111655_create_blog_authors/down.sql similarity index 100% rename from migrations/2018-04-23-111655_create_blog_authors/down.sql rename to migrations/postgres/2018-04-23-111655_create_blog_authors/down.sql diff --git a/migrations/2018-04-23-111655_create_blog_authors/up.sql b/migrations/postgres/2018-04-23-111655_create_blog_authors/up.sql similarity index 100% rename from migrations/2018-04-23-111655_create_blog_authors/up.sql rename to migrations/postgres/2018-04-23-111655_create_blog_authors/up.sql diff --git a/migrations/2018-04-23-132822_create_posts/down.sql b/migrations/postgres/2018-04-23-132822_create_posts/down.sql similarity index 100% rename from migrations/2018-04-23-132822_create_posts/down.sql rename to migrations/postgres/2018-04-23-132822_create_posts/down.sql diff --git a/migrations/2018-04-23-132822_create_posts/up.sql b/migrations/postgres/2018-04-23-132822_create_posts/up.sql similarity index 100% rename from migrations/2018-04-23-132822_create_posts/up.sql rename to migrations/postgres/2018-04-23-132822_create_posts/up.sql diff --git a/migrations/2018-04-23-142746_create_post_authors/down.sql b/migrations/postgres/2018-04-23-142746_create_post_authors/down.sql similarity index 100% rename from migrations/2018-04-23-142746_create_post_authors/down.sql rename to migrations/postgres/2018-04-23-142746_create_post_authors/down.sql diff --git a/migrations/2018-04-23-142746_create_post_authors/up.sql b/migrations/postgres/2018-04-23-142746_create_post_authors/up.sql similarity index 100% rename from migrations/2018-04-23-142746_create_post_authors/up.sql rename to migrations/postgres/2018-04-23-142746_create_post_authors/up.sql diff --git a/migrations/2018-04-30-170445_timestamps/down.sql b/migrations/postgres/2018-04-30-170445_timestamps/down.sql similarity index 100% rename from migrations/2018-04-30-170445_timestamps/down.sql rename to migrations/postgres/2018-04-30-170445_timestamps/down.sql diff --git a/migrations/2018-04-30-170445_timestamps/up.sql b/migrations/postgres/2018-04-30-170445_timestamps/up.sql similarity index 100% rename from migrations/2018-04-30-170445_timestamps/up.sql rename to migrations/postgres/2018-04-30-170445_timestamps/up.sql diff --git a/migrations/2018-05-01-124607_create_follow/down.sql b/migrations/postgres/2018-05-01-124607_create_follow/down.sql similarity index 100% rename from migrations/2018-05-01-124607_create_follow/down.sql rename to migrations/postgres/2018-05-01-124607_create_follow/down.sql diff --git a/migrations/2018-05-01-124607_create_follow/up.sql b/migrations/postgres/2018-05-01-124607_create_follow/up.sql similarity index 100% rename from migrations/2018-05-01-124607_create_follow/up.sql rename to migrations/postgres/2018-05-01-124607_create_follow/up.sql diff --git a/migrations/2018-05-01-165325_add_ap_url/down.sql b/migrations/postgres/2018-05-01-165325_add_ap_url/down.sql similarity index 100% rename from migrations/2018-05-01-165325_add_ap_url/down.sql rename to migrations/postgres/2018-05-01-165325_add_ap_url/down.sql diff --git a/migrations/2018-05-01-165325_add_ap_url/up.sql b/migrations/postgres/2018-05-01-165325_add_ap_url/up.sql similarity index 100% rename from migrations/2018-05-01-165325_add_ap_url/up.sql rename to migrations/postgres/2018-05-01-165325_add_ap_url/up.sql diff --git a/migrations/2018-05-02-113930_drop_instance_local_domain/down.sql b/migrations/postgres/2018-05-02-113930_drop_instance_local_domain/down.sql similarity index 100% rename from migrations/2018-05-02-113930_drop_instance_local_domain/down.sql rename to migrations/postgres/2018-05-02-113930_drop_instance_local_domain/down.sql diff --git a/migrations/2018-05-02-113930_drop_instance_local_domain/up.sql b/migrations/postgres/2018-05-02-113930_drop_instance_local_domain/up.sql similarity index 100% rename from migrations/2018-05-02-113930_drop_instance_local_domain/up.sql rename to migrations/postgres/2018-05-02-113930_drop_instance_local_domain/up.sql diff --git a/migrations/2018-05-03-163427_user_add_keys/down.sql b/migrations/postgres/2018-05-03-163427_user_add_keys/down.sql similarity index 100% rename from migrations/2018-05-03-163427_user_add_keys/down.sql rename to migrations/postgres/2018-05-03-163427_user_add_keys/down.sql diff --git a/migrations/2018-05-03-163427_user_add_keys/up.sql b/migrations/postgres/2018-05-03-163427_user_add_keys/up.sql similarity index 100% rename from migrations/2018-05-03-163427_user_add_keys/up.sql rename to migrations/postgres/2018-05-03-163427_user_add_keys/up.sql diff --git a/migrations/2018-05-03-182555_blogs_add_keys/down.sql b/migrations/postgres/2018-05-03-182555_blogs_add_keys/down.sql similarity index 100% rename from migrations/2018-05-03-182555_blogs_add_keys/down.sql rename to migrations/postgres/2018-05-03-182555_blogs_add_keys/down.sql diff --git a/migrations/2018-05-03-182555_blogs_add_keys/up.sql b/migrations/postgres/2018-05-03-182555_blogs_add_keys/up.sql similarity index 100% rename from migrations/2018-05-03-182555_blogs_add_keys/up.sql rename to migrations/postgres/2018-05-03-182555_blogs_add_keys/up.sql diff --git a/migrations/2018-05-09-192013_create_comments/down.sql b/migrations/postgres/2018-05-09-192013_create_comments/down.sql similarity index 100% rename from migrations/2018-05-09-192013_create_comments/down.sql rename to migrations/postgres/2018-05-09-192013_create_comments/down.sql diff --git a/migrations/2018-05-09-192013_create_comments/up.sql b/migrations/postgres/2018-05-09-192013_create_comments/up.sql similarity index 100% rename from migrations/2018-05-09-192013_create_comments/up.sql rename to migrations/postgres/2018-05-09-192013_create_comments/up.sql diff --git a/migrations/2018-05-10-101553_posts_add_ap_url/down.sql b/migrations/postgres/2018-05-10-101553_posts_add_ap_url/down.sql similarity index 100% rename from migrations/2018-05-10-101553_posts_add_ap_url/down.sql rename to migrations/postgres/2018-05-10-101553_posts_add_ap_url/down.sql diff --git a/migrations/2018-05-10-101553_posts_add_ap_url/up.sql b/migrations/postgres/2018-05-10-101553_posts_add_ap_url/up.sql similarity index 100% rename from migrations/2018-05-10-101553_posts_add_ap_url/up.sql rename to migrations/postgres/2018-05-10-101553_posts_add_ap_url/up.sql diff --git a/migrations/2018-05-10-154336_create_likes/down.sql b/migrations/postgres/2018-05-10-154336_create_likes/down.sql similarity index 100% rename from migrations/2018-05-10-154336_create_likes/down.sql rename to migrations/postgres/2018-05-10-154336_create_likes/down.sql diff --git a/migrations/2018-05-10-154336_create_likes/up.sql b/migrations/postgres/2018-05-10-154336_create_likes/up.sql similarity index 100% rename from migrations/2018-05-10-154336_create_likes/up.sql rename to migrations/postgres/2018-05-10-154336_create_likes/up.sql diff --git a/migrations/2018-05-12-213456_likes_add_ap_url/down.sql b/migrations/postgres/2018-05-12-213456_likes_add_ap_url/down.sql similarity index 100% rename from migrations/2018-05-12-213456_likes_add_ap_url/down.sql rename to migrations/postgres/2018-05-12-213456_likes_add_ap_url/down.sql diff --git a/migrations/2018-05-12-213456_likes_add_ap_url/up.sql b/migrations/postgres/2018-05-12-213456_likes_add_ap_url/up.sql similarity index 100% rename from migrations/2018-05-12-213456_likes_add_ap_url/up.sql rename to migrations/postgres/2018-05-12-213456_likes_add_ap_url/up.sql diff --git a/migrations/2018-05-13-122311_create_notifications/down.sql b/migrations/postgres/2018-05-13-122311_create_notifications/down.sql similarity index 100% rename from migrations/2018-05-13-122311_create_notifications/down.sql rename to migrations/postgres/2018-05-13-122311_create_notifications/down.sql diff --git a/migrations/2018-05-13-122311_create_notifications/up.sql b/migrations/postgres/2018-05-13-122311_create_notifications/up.sql similarity index 100% rename from migrations/2018-05-13-122311_create_notifications/up.sql rename to migrations/postgres/2018-05-13-122311_create_notifications/up.sql diff --git a/migrations/2018-05-13-175144_users_add_shared_inbox/down.sql b/migrations/postgres/2018-05-13-175144_users_add_shared_inbox/down.sql similarity index 100% rename from migrations/2018-05-13-175144_users_add_shared_inbox/down.sql rename to migrations/postgres/2018-05-13-175144_users_add_shared_inbox/down.sql diff --git a/migrations/2018-05-13-175144_users_add_shared_inbox/up.sql b/migrations/postgres/2018-05-13-175144_users_add_shared_inbox/up.sql similarity index 100% rename from migrations/2018-05-13-175144_users_add_shared_inbox/up.sql rename to migrations/postgres/2018-05-13-175144_users_add_shared_inbox/up.sql diff --git a/migrations/2018-05-19-091428_create_reshares/down.sql b/migrations/postgres/2018-05-19-091428_create_reshares/down.sql similarity index 100% rename from migrations/2018-05-19-091428_create_reshares/down.sql rename to migrations/postgres/2018-05-19-091428_create_reshares/down.sql diff --git a/migrations/2018-05-19-091428_create_reshares/up.sql b/migrations/postgres/2018-05-19-091428_create_reshares/up.sql similarity index 100% rename from migrations/2018-05-19-091428_create_reshares/up.sql rename to migrations/postgres/2018-05-19-091428_create_reshares/up.sql diff --git a/migrations/2018-05-24-100613_add_notifications_creation_date/down.sql b/migrations/postgres/2018-05-24-100613_add_notifications_creation_date/down.sql similarity index 100% rename from migrations/2018-05-24-100613_add_notifications_creation_date/down.sql rename to migrations/postgres/2018-05-24-100613_add_notifications_creation_date/down.sql diff --git a/migrations/2018-05-24-100613_add_notifications_creation_date/up.sql b/migrations/postgres/2018-05-24-100613_add_notifications_creation_date/up.sql similarity index 100% rename from migrations/2018-05-24-100613_add_notifications_creation_date/up.sql rename to migrations/postgres/2018-05-24-100613_add_notifications_creation_date/up.sql diff --git a/migrations/2018-06-17-200302_notification_add_data/down.sql b/migrations/postgres/2018-06-17-200302_notification_add_data/down.sql similarity index 100% rename from migrations/2018-06-17-200302_notification_add_data/down.sql rename to migrations/postgres/2018-06-17-200302_notification_add_data/down.sql diff --git a/migrations/2018-06-17-200302_notification_add_data/up.sql b/migrations/postgres/2018-06-17-200302_notification_add_data/up.sql similarity index 100% rename from migrations/2018-06-17-200302_notification_add_data/up.sql rename to migrations/postgres/2018-06-17-200302_notification_add_data/up.sql diff --git a/migrations/2018-06-20-175532_create_mentions/down.sql b/migrations/postgres/2018-06-20-175532_create_mentions/down.sql similarity index 100% rename from migrations/2018-06-20-175532_create_mentions/down.sql rename to migrations/postgres/2018-06-20-175532_create_mentions/down.sql diff --git a/migrations/2018-06-20-175532_create_mentions/up.sql b/migrations/postgres/2018-06-20-175532_create_mentions/up.sql similarity index 100% rename from migrations/2018-06-20-175532_create_mentions/up.sql rename to migrations/postgres/2018-06-20-175532_create_mentions/up.sql diff --git a/migrations/2018-06-20-194538_add_mentions_ap_url/down.sql b/migrations/postgres/2018-06-20-194538_add_mentions_ap_url/down.sql similarity index 100% rename from migrations/2018-06-20-194538_add_mentions_ap_url/down.sql rename to migrations/postgres/2018-06-20-194538_add_mentions_ap_url/down.sql diff --git a/migrations/2018-06-20-194538_add_mentions_ap_url/up.sql b/migrations/postgres/2018-06-20-194538_add_mentions_ap_url/up.sql similarity index 100% rename from migrations/2018-06-20-194538_add_mentions_ap_url/up.sql rename to migrations/postgres/2018-06-20-194538_add_mentions_ap_url/up.sql diff --git a/migrations/2018-07-25-165754_refactor_notifications/down.sql b/migrations/postgres/2018-07-25-165754_refactor_notifications/down.sql similarity index 100% rename from migrations/2018-07-25-165754_refactor_notifications/down.sql rename to migrations/postgres/2018-07-25-165754_refactor_notifications/down.sql diff --git a/migrations/2018-07-25-165754_refactor_notifications/up.sql b/migrations/postgres/2018-07-25-165754_refactor_notifications/up.sql similarity index 100% rename from migrations/2018-07-25-165754_refactor_notifications/up.sql rename to migrations/postgres/2018-07-25-165754_refactor_notifications/up.sql diff --git a/migrations/2018-07-27-102221_user_add_followers_endpoint/down.sql b/migrations/postgres/2018-07-27-102221_user_add_followers_endpoint/down.sql similarity index 100% rename from migrations/2018-07-27-102221_user_add_followers_endpoint/down.sql rename to migrations/postgres/2018-07-27-102221_user_add_followers_endpoint/down.sql diff --git a/migrations/2018-07-27-102221_user_add_followers_endpoint/up.sql b/migrations/postgres/2018-07-27-102221_user_add_followers_endpoint/up.sql similarity index 100% rename from migrations/2018-07-27-102221_user_add_followers_endpoint/up.sql rename to migrations/postgres/2018-07-27-102221_user_add_followers_endpoint/up.sql diff --git a/migrations/2018-07-27-125558_instance_customization/down.sql b/migrations/postgres/2018-07-27-125558_instance_customization/down.sql similarity index 100% rename from migrations/2018-07-27-125558_instance_customization/down.sql rename to migrations/postgres/2018-07-27-125558_instance_customization/down.sql diff --git a/migrations/2018-07-27-125558_instance_customization/up.sql b/migrations/postgres/2018-07-27-125558_instance_customization/up.sql similarity index 100% rename from migrations/2018-07-27-125558_instance_customization/up.sql rename to migrations/postgres/2018-07-27-125558_instance_customization/up.sql diff --git a/migrations/2018-07-27-194816_instance_description_html/down.sql b/migrations/postgres/2018-07-27-194816_instance_description_html/down.sql similarity index 100% rename from migrations/2018-07-27-194816_instance_description_html/down.sql rename to migrations/postgres/2018-07-27-194816_instance_description_html/down.sql diff --git a/migrations/2018-07-27-194816_instance_description_html/up.sql b/migrations/postgres/2018-07-27-194816_instance_description_html/up.sql similarity index 100% rename from migrations/2018-07-27-194816_instance_description_html/up.sql rename to migrations/postgres/2018-07-27-194816_instance_description_html/up.sql diff --git a/migrations/2018-09-02-111458_create_medias/down.sql b/migrations/postgres/2018-09-02-111458_create_medias/down.sql similarity index 100% rename from migrations/2018-09-02-111458_create_medias/down.sql rename to migrations/postgres/2018-09-02-111458_create_medias/down.sql diff --git a/migrations/2018-09-02-111458_create_medias/up.sql b/migrations/postgres/2018-09-02-111458_create_medias/up.sql similarity index 100% rename from migrations/2018-09-02-111458_create_medias/up.sql rename to migrations/postgres/2018-09-02-111458_create_medias/up.sql diff --git a/migrations/2018-09-02-123623_medias_owner_id/down.sql b/migrations/postgres/2018-09-02-123623_medias_owner_id/down.sql similarity index 100% rename from migrations/2018-09-02-123623_medias_owner_id/down.sql rename to migrations/postgres/2018-09-02-123623_medias_owner_id/down.sql diff --git a/migrations/2018-09-02-123623_medias_owner_id/up.sql b/migrations/postgres/2018-09-02-123623_medias_owner_id/up.sql similarity index 100% rename from migrations/2018-09-02-123623_medias_owner_id/up.sql rename to migrations/postgres/2018-09-02-123623_medias_owner_id/up.sql diff --git a/migrations/2018-09-03-102510_users_add_avatar/down.sql b/migrations/postgres/2018-09-03-102510_users_add_avatar/down.sql similarity index 100% rename from migrations/2018-09-03-102510_users_add_avatar/down.sql rename to migrations/postgres/2018-09-03-102510_users_add_avatar/down.sql diff --git a/migrations/2018-09-03-102510_users_add_avatar/up.sql b/migrations/postgres/2018-09-03-102510_users_add_avatar/up.sql similarity index 100% rename from migrations/2018-09-03-102510_users_add_avatar/up.sql rename to migrations/postgres/2018-09-03-102510_users_add_avatar/up.sql diff --git a/migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql b/migrations/postgres/2018-09-03-170848_user_add_last_fetched_date/down.sql similarity index 100% rename from migrations/2018-09-03-170848_user_add_last_fetched_date/down.sql rename to migrations/postgres/2018-09-03-170848_user_add_last_fetched_date/down.sql diff --git a/migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql b/migrations/postgres/2018-09-03-170848_user_add_last_fetched_date/up.sql similarity index 100% rename from migrations/2018-09-03-170848_user_add_last_fetched_date/up.sql rename to migrations/postgres/2018-09-03-170848_user_add_last_fetched_date/up.sql diff --git a/migrations/2018-09-04-103017_follows_add_ap_url/down.sql b/migrations/postgres/2018-09-04-103017_follows_add_ap_url/down.sql similarity index 100% rename from migrations/2018-09-04-103017_follows_add_ap_url/down.sql rename to migrations/postgres/2018-09-04-103017_follows_add_ap_url/down.sql diff --git a/migrations/2018-09-04-103017_follows_add_ap_url/up.sql b/migrations/postgres/2018-09-04-103017_follows_add_ap_url/up.sql similarity index 100% rename from migrations/2018-09-04-103017_follows_add_ap_url/up.sql rename to migrations/postgres/2018-09-04-103017_follows_add_ap_url/up.sql diff --git a/migrations/2018-09-04-104828_posts_add_subtitle/down.sql b/migrations/postgres/2018-09-04-104828_posts_add_subtitle/down.sql similarity index 100% rename from migrations/2018-09-04-104828_posts_add_subtitle/down.sql rename to migrations/postgres/2018-09-04-104828_posts_add_subtitle/down.sql diff --git a/migrations/2018-09-04-104828_posts_add_subtitle/up.sql b/migrations/postgres/2018-09-04-104828_posts_add_subtitle/up.sql similarity index 100% rename from migrations/2018-09-04-104828_posts_add_subtitle/up.sql rename to migrations/postgres/2018-09-04-104828_posts_add_subtitle/up.sql diff --git a/migrations/2018-09-05-174106_create_tags/down.sql b/migrations/postgres/2018-09-05-174106_create_tags/down.sql similarity index 100% rename from migrations/2018-09-05-174106_create_tags/down.sql rename to migrations/postgres/2018-09-05-174106_create_tags/down.sql diff --git a/migrations/2018-09-05-174106_create_tags/up.sql b/migrations/postgres/2018-09-05-174106_create_tags/up.sql similarity index 100% rename from migrations/2018-09-05-174106_create_tags/up.sql rename to migrations/postgres/2018-09-05-174106_create_tags/up.sql diff --git a/migrations/2018-09-06-182637_posts_add_source/down.sql b/migrations/postgres/2018-09-06-182637_posts_add_source/down.sql similarity index 100% rename from migrations/2018-09-06-182637_posts_add_source/down.sql rename to migrations/postgres/2018-09-06-182637_posts_add_source/down.sql diff --git a/migrations/2018-09-06-182637_posts_add_source/up.sql b/migrations/postgres/2018-09-06-182637_posts_add_source/up.sql similarity index 100% rename from migrations/2018-09-06-182637_posts_add_source/up.sql rename to migrations/postgres/2018-09-06-182637_posts_add_source/up.sql diff --git a/migrations/2018-09-07-212003_fix_avatar_deletion/down.sql b/migrations/postgres/2018-09-07-212003_fix_avatar_deletion/down.sql similarity index 100% rename from migrations/2018-09-07-212003_fix_avatar_deletion/down.sql rename to migrations/postgres/2018-09-07-212003_fix_avatar_deletion/down.sql diff --git a/migrations/2018-09-07-212003_fix_avatar_deletion/up.sql b/migrations/postgres/2018-09-07-212003_fix_avatar_deletion/up.sql similarity index 100% rename from migrations/2018-09-07-212003_fix_avatar_deletion/up.sql rename to migrations/postgres/2018-09-07-212003_fix_avatar_deletion/up.sql diff --git a/migrations/sqlite/2018-04-22-093322_create_instances/down.sql b/migrations/sqlite/2018-04-22-093322_create_instances/down.sql new file mode 100644 index 00000000..1ec93bf2 --- /dev/null +++ b/migrations/sqlite/2018-04-22-093322_create_instances/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE instances; diff --git a/migrations/sqlite/2018-04-22-093322_create_instances/up.sql b/migrations/sqlite/2018-04-22-093322_create_instances/up.sql new file mode 100644 index 00000000..37f1ef67 --- /dev/null +++ b/migrations/sqlite/2018-04-22-093322_create_instances/up.sql @@ -0,0 +1,15 @@ +-- Your SQL goes here +CREATE TABLE instances ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + public_domain VARCHAR NOT NULL, + name VARCHAR NOT NULL, + local BOOLEAN NOT NULL DEFAULT 'f', + blocked BOOLEAN NOT NULL DEFAULT 'f', + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + open_registrations BOOLEAN NOT NULL DEFAULT 't', + short_description TEXT NOT NULL DEFAULT '', + long_description TEXT NOT NULL DEFAULT '', + default_license TEXT NOT NULL DEFAULT 'CC-0', + long_description_html VARCHAR NOT NULL DEFAULT '', + short_description_html VARCHAR NOT NULL DEFAULT '' +) diff --git a/migrations/sqlite/2018-04-22-151330_create_user/down.sql b/migrations/sqlite/2018-04-22-151330_create_user/down.sql new file mode 100644 index 00000000..dc3714bd --- /dev/null +++ b/migrations/sqlite/2018-04-22-151330_create_user/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE users; diff --git a/migrations/sqlite/2018-04-22-151330_create_user/up.sql b/migrations/sqlite/2018-04-22-151330_create_user/up.sql new file mode 100644 index 00000000..3da556b5 --- /dev/null +++ b/migrations/sqlite/2018-04-22-151330_create_user/up.sql @@ -0,0 +1,23 @@ +-- Your SQL goes here +PRAGMA foreign_keys = ON; +CREATE TABLE users ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + username VARCHAR NOT NULL, + display_name VARCHAR NOT NULL DEFAULT '', + outbox_url VARCHAR NOT NULL, + inbox_url VARCHAR NOT NULL, + is_admin BOOLEAN NOT NULL DEFAULT 'f', + summary TEXT NOT NULL DEFAULT '', + email TEXT, + hashed_password TEXT, + instance_id INTEGER REFERENCES instances(id) ON DELETE CASCADE NOT NULL, + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ap_url TEXT NOT NULL default '', + private_key TEXT, + public_key TEXT NOT NULL DEFAULT '', + shared_inbox_url VARCHAR, + followers_endpoint VARCHAR NOT NULL DEFAULT '', + avatar_id INTEGER REFERENCES medias(id) ON DELETE CASCADE, + last_fetched_date TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (avatar_id) REFERENCES medias(id) ON DELETE SET NULL +); diff --git a/migrations/sqlite/2018-04-23-101717_create_blogs/down.sql b/migrations/sqlite/2018-04-23-101717_create_blogs/down.sql new file mode 100644 index 00000000..4f8b0a68 --- /dev/null +++ b/migrations/sqlite/2018-04-23-101717_create_blogs/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE blogs; diff --git a/migrations/sqlite/2018-04-23-101717_create_blogs/up.sql b/migrations/sqlite/2018-04-23-101717_create_blogs/up.sql new file mode 100644 index 00000000..30635a5c --- /dev/null +++ b/migrations/sqlite/2018-04-23-101717_create_blogs/up.sql @@ -0,0 +1,14 @@ +-- Your SQL goes here +CREATE TABLE blogs ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + actor_id VARCHAR NOT NULL, + title VARCHAR NOT NULL, + summary TEXT NOT NULL DEFAULT '', + outbox_url VARCHAR NOT NULL, + inbox_url VARCHAR NOT NULL, + instance_id INTEGER REFERENCES instances(id) ON DELETE CASCADE NOT NULL, + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ap_url text not null default '', + private_key TEXT, + public_key TEXT NOT NULL DEFAULT '' +) diff --git a/migrations/sqlite/2018-04-23-111655_create_blog_authors/down.sql b/migrations/sqlite/2018-04-23-111655_create_blog_authors/down.sql new file mode 100644 index 00000000..cfb62abd --- /dev/null +++ b/migrations/sqlite/2018-04-23-111655_create_blog_authors/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE blog_authors; diff --git a/migrations/sqlite/2018-04-23-111655_create_blog_authors/up.sql b/migrations/sqlite/2018-04-23-111655_create_blog_authors/up.sql new file mode 100644 index 00000000..10144614 --- /dev/null +++ b/migrations/sqlite/2018-04-23-111655_create_blog_authors/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE blog_authors ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + blog_id INTEGER REFERENCES blogs(id) ON DELETE CASCADE NOT NULL, + author_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + is_owner BOOLEAN NOT NULL DEFAULT 'f' +) diff --git a/migrations/sqlite/2018-04-23-132822_create_posts/down.sql b/migrations/sqlite/2018-04-23-132822_create_posts/down.sql new file mode 100644 index 00000000..56ed16e5 --- /dev/null +++ b/migrations/sqlite/2018-04-23-132822_create_posts/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE posts; diff --git a/migrations/sqlite/2018-04-23-132822_create_posts/up.sql b/migrations/sqlite/2018-04-23-132822_create_posts/up.sql new file mode 100644 index 00000000..d88337f5 --- /dev/null +++ b/migrations/sqlite/2018-04-23-132822_create_posts/up.sql @@ -0,0 +1,14 @@ +-- Your SQL goes here +CREATE TABLE posts ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + blog_id INTEGER REFERENCES blogs(id) ON DELETE CASCADE NOT NULL, + slug VARCHAR NOT NULL, + title VARCHAR NOT NULL, + content TEXT NOT NULL DEFAULT '', + published BOOLEAN NOT NULL DEFAULT 'f', + license VARCHAR NOT NULL DEFAULT 'CC-0', + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ap_url VARCHAR NOT NULL DEFAULT '', + subtitle TEXT NOT NULL DEFAULT '', + source TEXT NOT NULL DEFAULT '' +) diff --git a/migrations/sqlite/2018-04-23-142746_create_post_authors/down.sql b/migrations/sqlite/2018-04-23-142746_create_post_authors/down.sql new file mode 100644 index 00000000..129bf59a --- /dev/null +++ b/migrations/sqlite/2018-04-23-142746_create_post_authors/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE post_authors; diff --git a/migrations/sqlite/2018-04-23-142746_create_post_authors/up.sql b/migrations/sqlite/2018-04-23-142746_create_post_authors/up.sql new file mode 100644 index 00000000..214a6f3f --- /dev/null +++ b/migrations/sqlite/2018-04-23-142746_create_post_authors/up.sql @@ -0,0 +1,6 @@ +-- Your SQL goes here +CREATE TABLE post_authors ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE NOT NULL, + author_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL +) diff --git a/migrations/sqlite/2018-05-01-124607_create_follow/down.sql b/migrations/sqlite/2018-05-01-124607_create_follow/down.sql new file mode 100644 index 00000000..eee3b972 --- /dev/null +++ b/migrations/sqlite/2018-05-01-124607_create_follow/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE follows; diff --git a/migrations/sqlite/2018-05-01-124607_create_follow/up.sql b/migrations/sqlite/2018-05-01-124607_create_follow/up.sql new file mode 100644 index 00000000..7eeda5aa --- /dev/null +++ b/migrations/sqlite/2018-05-01-124607_create_follow/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE follows ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + follower_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + following_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + ap_url TEXT NOT NULL default '' +) diff --git a/migrations/sqlite/2018-05-09-192013_create_comments/down.sql b/migrations/sqlite/2018-05-09-192013_create_comments/down.sql new file mode 100644 index 00000000..d0841ffb --- /dev/null +++ b/migrations/sqlite/2018-05-09-192013_create_comments/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE comments; diff --git a/migrations/sqlite/2018-05-09-192013_create_comments/up.sql b/migrations/sqlite/2018-05-09-192013_create_comments/up.sql new file mode 100644 index 00000000..901c0699 --- /dev/null +++ b/migrations/sqlite/2018-05-09-192013_create_comments/up.sql @@ -0,0 +1,12 @@ +-- Your SQL goes here +CREATE TABLE comments ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + content TEXT NOT NULL DEFAULT '', + in_response_to_id INTEGER REFERENCES comments(id), + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE NOT NULL, + author_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ap_url VARCHAR, + sensitive BOOLEAN NOT NULL DEFAULT 'f', + spoiler_text TEXT NOT NULL DEFAULT '' +) diff --git a/migrations/sqlite/2018-05-10-154336_create_likes/down.sql b/migrations/sqlite/2018-05-10-154336_create_likes/down.sql new file mode 100644 index 00000000..2232ad5b --- /dev/null +++ b/migrations/sqlite/2018-05-10-154336_create_likes/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE likes; diff --git a/migrations/sqlite/2018-05-10-154336_create_likes/up.sql b/migrations/sqlite/2018-05-10-154336_create_likes/up.sql new file mode 100644 index 00000000..b406a7b6 --- /dev/null +++ b/migrations/sqlite/2018-05-10-154336_create_likes/up.sql @@ -0,0 +1,8 @@ +-- Your SQL goes here +CREATE TABLE likes ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE NOT NULL, + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + ap_url VARCHAR NOT NULL default '' +) diff --git a/migrations/sqlite/2018-05-13-122311_create_notifications/down.sql b/migrations/sqlite/2018-05-13-122311_create_notifications/down.sql new file mode 100644 index 00000000..bcebcc05 --- /dev/null +++ b/migrations/sqlite/2018-05-13-122311_create_notifications/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE notifications; diff --git a/migrations/sqlite/2018-05-13-122311_create_notifications/up.sql b/migrations/sqlite/2018-05-13-122311_create_notifications/up.sql new file mode 100644 index 00000000..ceb45ee8 --- /dev/null +++ b/migrations/sqlite/2018-05-13-122311_create_notifications/up.sql @@ -0,0 +1,8 @@ +-- Your SQL goes here +CREATE TABLE notifications ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + kind VARCHAR NOT NULL DEFAULT 'unknown', + object_id INTEGER NOT NULL DEFAULT 0 +) diff --git a/migrations/sqlite/2018-05-19-091428_create_reshares/down.sql b/migrations/sqlite/2018-05-19-091428_create_reshares/down.sql new file mode 100644 index 00000000..29a2d0fb --- /dev/null +++ b/migrations/sqlite/2018-05-19-091428_create_reshares/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE reshares; diff --git a/migrations/sqlite/2018-05-19-091428_create_reshares/up.sql b/migrations/sqlite/2018-05-19-091428_create_reshares/up.sql new file mode 100644 index 00000000..cee70f74 --- /dev/null +++ b/migrations/sqlite/2018-05-19-091428_create_reshares/up.sql @@ -0,0 +1,8 @@ +-- Your SQL goes here +CREATE TABLE reshares ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + user_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE NOT NULL, + ap_url VARCHAR NOT NULL DEFAULT '', + creation_date DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +) diff --git a/migrations/sqlite/2018-06-20-175532_create_mentions/down.sql b/migrations/sqlite/2018-06-20-175532_create_mentions/down.sql new file mode 100644 index 00000000..e860c9ad --- /dev/null +++ b/migrations/sqlite/2018-06-20-175532_create_mentions/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE mentions; diff --git a/migrations/sqlite/2018-06-20-175532_create_mentions/up.sql b/migrations/sqlite/2018-06-20-175532_create_mentions/up.sql new file mode 100644 index 00000000..3f28aa9c --- /dev/null +++ b/migrations/sqlite/2018-06-20-175532_create_mentions/up.sql @@ -0,0 +1,8 @@ +-- Your SQL goes here +CREATE TABLE mentions ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + mentioned_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL, + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE, + comment_id INTEGER REFERENCES comments(id) ON DELETE CASCADE, + ap_url VARCHAR NOT NULL DEFAULT '' +) diff --git a/migrations/sqlite/2018-09-02-111458_create_medias/down.sql b/migrations/sqlite/2018-09-02-111458_create_medias/down.sql new file mode 100644 index 00000000..3ba01786 --- /dev/null +++ b/migrations/sqlite/2018-09-02-111458_create_medias/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE medias; diff --git a/migrations/sqlite/2018-09-02-111458_create_medias/up.sql b/migrations/sqlite/2018-09-02-111458_create_medias/up.sql new file mode 100644 index 00000000..e2ac093c --- /dev/null +++ b/migrations/sqlite/2018-09-02-111458_create_medias/up.sql @@ -0,0 +1,11 @@ +-- Your SQL goes here +CREATE TABLE medias ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + file_path TEXT NOT NULL DEFAULT '', + alt_text TEXT NOT NULL DEFAULT '', + is_remote BOOLEAN NOT NULL DEFAULT 'f', + remote_url TEXT, + sensitive BOOLEAN NOT NULL DEFAULT 'f', + content_warning TEXT, + owner_id INTEGER REFERENCES users(id) ON DELETE CASCADE NOT NULL +) diff --git a/migrations/sqlite/2018-09-05-174106_create_tags/down.sql b/migrations/sqlite/2018-09-05-174106_create_tags/down.sql new file mode 100644 index 00000000..43c79a4b --- /dev/null +++ b/migrations/sqlite/2018-09-05-174106_create_tags/down.sql @@ -0,0 +1,2 @@ +-- This file should undo anything in `up.sql` +DROP TABLE tags; diff --git a/migrations/sqlite/2018-09-05-174106_create_tags/up.sql b/migrations/sqlite/2018-09-05-174106_create_tags/up.sql new file mode 100644 index 00000000..031b4ed2 --- /dev/null +++ b/migrations/sqlite/2018-09-05-174106_create_tags/up.sql @@ -0,0 +1,7 @@ +-- Your SQL goes here +CREATE TABLE tags ( + id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + tag TEXT NOT NULL DEFAULT '', + is_hastag BOOLEAN NOT NULL DEFAULT 'f', + post_id INTEGER REFERENCES posts(id) ON DELETE CASCADE NOT NULL +) diff --git a/plume-models/Cargo.toml b/plume-models/Cargo.toml index 8f634cbb..fbbe332d 100644 --- a/plume-models/Cargo.toml +++ b/plume-models/Cargo.toml @@ -23,7 +23,7 @@ features = ["serde"] version = "0.4" [dependencies.diesel] -features = ["postgres", "r2d2", "chrono"] +features = ["r2d2", "chrono"] version = "1.3.2" [dependencies.plume-api] @@ -35,3 +35,7 @@ path = "../plume-common" [dependencies.rocket] git = "https://github.com/SergioBenitez/Rocket" rev = "55459db7732b9a240826a5c120c650f87e3372ce" + +[features] +postgres = ["diesel/postgres"] +sqlite = ["diesel/sqlite"] diff --git a/plume-models/src/blog_authors.rs b/plume-models/src/blog_authors.rs index 106832a8..65e04969 100644 --- a/plume-models/src/blog_authors.rs +++ b/plume-models/src/blog_authors.rs @@ -1,8 +1,8 @@ -use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, PgConnection}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use schema::blog_authors; -#[derive(Queryable, Identifiable)] +#[derive(Clone, Queryable, Identifiable)] pub struct BlogAuthor { pub id: i32, pub blog_id: i32, diff --git a/plume-models/src/blogs.rs b/plume-models/src/blogs.rs index c825f6aa..741cc7ef 100644 --- a/plume-models/src/blogs.rs +++ b/plume-models/src/blogs.rs @@ -1,4 +1,5 @@ use activitypub::{Actor, Object, CustomObject, actor::Group, collection::OrderedCollection}; +use chrono::NaiveDateTime; use reqwest::{ Client, header::{Accept, qitem}, @@ -6,8 +7,7 @@ use reqwest::{ }; use serde_json; use url::Url; -use chrono::NaiveDateTime; -use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, PgConnection, dsl::any}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use openssl::{ hash::MessageDigest, pkey::{PKey, Private}, @@ -16,7 +16,7 @@ use openssl::{ }; use webfinger::*; -use {BASE_URL, USE_HTTPS}; +use {BASE_URL, USE_HTTPS, Connection}; use plume_common::activity_pub::{ ap_accept_header, ApSignature, ActivityStream, Id, IntoId, PublicKey, inbox::WithInbox, @@ -66,32 +66,32 @@ impl Blog { find_by!(blogs, find_by_ap_url, ap_url as String); find_by!(blogs, find_by_name, actor_id as String, instance_id as i32); - pub fn get_instance(&self, conn: &PgConnection) -> Instance { + pub fn get_instance(&self, conn: &Connection) -> Instance { Instance::get(conn, self.instance_id).expect("Couldn't find instance") } - pub fn list_authors(&self, conn: &PgConnection) -> Vec { + pub fn list_authors(&self, conn: &Connection) -> Vec { use schema::blog_authors; use schema::users; let authors_ids = blog_authors::table.filter(blog_authors::blog_id.eq(self.id)).select(blog_authors::author_id); - users::table.filter(users::id.eq(any(authors_ids))) + users::table.filter(users::id.eq_any(authors_ids)) .load::(conn) .expect("Couldn't load authors of a blog") } - pub fn find_for_author(conn: &PgConnection, author_id: i32) -> Vec { + pub fn find_for_author(conn: &Connection, author_id: i32) -> Vec { use schema::blog_authors; let author_ids = blog_authors::table.filter(blog_authors::author_id.eq(author_id)).select(blog_authors::blog_id); - blogs::table.filter(blogs::id.eq(any(author_ids))) + blogs::table.filter(blogs::id.eq_any(author_ids)) .load::(conn) .expect("Couldn't load blogs ") } - pub fn find_local(conn: &PgConnection, name: String) -> Option { + pub fn find_local(conn: &Connection, name: String) -> Option { Blog::find_by_name(conn, name, Instance::local_id(conn)) } - pub fn find_by_fqn(conn: &PgConnection, fqn: String) -> Option { + pub fn find_by_fqn(conn: &Connection, fqn: String) -> Option { if fqn.contains("@") { // remote blog match Instance::find_by_domain(conn, String::from(fqn.split("@").last().unwrap())) { Some(instance) => { @@ -107,7 +107,7 @@ impl Blog { } } - fn fetch_from_webfinger(conn: &PgConnection, acct: String) -> Option { + fn fetch_from_webfinger(conn: &Connection, acct: String) -> Option { match resolve(acct.clone(), *USE_HTTPS) { Ok(wf) => wf.links.into_iter().find(|l| l.mime_type == Some(String::from("application/activity+json"))).and_then(|l| Blog::fetch_from_url(conn, l.href.expect("No href for AP WF link"))), Err(details) => { @@ -117,7 +117,7 @@ impl Blog { } } - fn fetch_from_url(conn: &PgConnection, url: String) -> Option { + fn fetch_from_url(conn: &Connection, url: String) -> Option { let req = Client::new() .get(&url[..]) .header(Accept(ap_accept_header().into_iter().map(|h| qitem(h.parse::().expect("Invalid Content-Type"))).collect())) @@ -134,7 +134,7 @@ impl Blog { } } - fn from_activity(conn: &PgConnection, acct: CustomGroup, inst: String) -> Blog { + fn from_activity(conn: &Connection, acct: CustomGroup, inst: String) -> Blog { let instance = match Instance::find_by_domain(conn, inst.clone()) { Some(instance) => instance, None => { @@ -166,7 +166,7 @@ impl Blog { }) } - pub fn into_activity(&self, _conn: &PgConnection) -> CustomGroup { + pub fn into_activity(&self, _conn: &Connection) -> CustomGroup { let mut blog = Group::default(); blog.ap_actor_props.set_preferred_username_string(self.actor_id.clone()).expect("Blog::into_activity: preferredUsername error"); blog.object_props.set_name_string(self.title.clone()).expect("Blog::into_activity: name error"); @@ -185,35 +185,35 @@ impl Blog { CustomGroup::new(blog, ap_signature) } - pub fn update_boxes(&self, conn: &PgConnection) { + pub fn update_boxes(&self, conn: &Connection) { let instance = self.get_instance(conn); if self.outbox_url.len() == 0 { diesel::update(self) .set(blogs::outbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "outbox"))) - .get_result::(conn).expect("Couldn't update outbox URL"); + .execute(conn).expect("Couldn't update outbox URL"); } if self.inbox_url.len() == 0 { diesel::update(self) .set(blogs::inbox_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), "inbox"))) - .get_result::(conn).expect("Couldn't update inbox URL"); + .execute(conn).expect("Couldn't update inbox URL"); } if self.ap_url.len() == 0 { diesel::update(self) .set(blogs::ap_url.eq(instance.compute_box(BLOG_PREFIX, self.actor_id.clone(), ""))) - .get_result::(conn).expect("Couldn't update AP URL"); + .execute(conn).expect("Couldn't update AP URL"); } } - pub fn outbox(&self, conn: &PgConnection) -> ActivityStream { + pub fn outbox(&self, conn: &Connection) -> ActivityStream { let mut coll = OrderedCollection::default(); coll.collection_props.items = serde_json::to_value(self.get_activities(conn)).unwrap(); coll.collection_props.set_total_items_u64(self.get_activities(conn).len() as u64).unwrap(); ActivityStream::new(coll) } - fn get_activities(&self, _conn: &PgConnection) -> Vec { + fn get_activities(&self, _conn: &Connection) -> Vec { vec![] } @@ -221,7 +221,7 @@ impl Blog { PKey::from_rsa(Rsa::private_key_from_pem(self.private_key.clone().unwrap().as_ref()).unwrap()).unwrap() } - pub fn webfinger(&self, conn: &PgConnection) -> Webfinger { + pub fn webfinger(&self, conn: &Connection) -> Webfinger { Webfinger { subject: format!("acct:{}@{}", self.actor_id, self.get_instance(conn).public_domain), aliases: vec![self.ap_url.clone()], @@ -248,7 +248,7 @@ impl Blog { } } - pub fn from_url(conn: &PgConnection, url: String) -> Option { + pub fn from_url(conn: &Connection, url: String) -> Option { Blog::find_by_ap_url(conn, url.clone()).or_else(|| { // The requested user was not in the DB // We try to fetch it if it is remote @@ -260,7 +260,7 @@ impl Blog { }) } - pub fn get_fqn(&self, conn: &PgConnection) -> String { + pub fn get_fqn(&self, conn: &Connection) -> String { if self.instance_id == Instance::local_id(conn) { self.actor_id.clone() } else { @@ -268,7 +268,7 @@ impl Blog { } } - pub fn to_json(&self, conn: &PgConnection) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection) -> serde_json::Value { let mut json = serde_json::to_value(self).unwrap(); json["fqn"] = json!(self.get_fqn(conn)); json diff --git a/plume-models/src/comments.rs b/plume-models/src/comments.rs index 040193e0..b7e2b756 100644 --- a/plume-models/src/comments.rs +++ b/plume-models/src/comments.rs @@ -3,8 +3,8 @@ use activitypub::{ link, object::{Note} }; -use chrono; -use diesel::{self, PgConnection, RunQueryDsl, QueryDsl, ExpressionMethods, dsl::any}; +use chrono::{self, NaiveDateTime}; +use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods}; use serde_json; use plume_common::activity_pub::{ @@ -12,6 +12,7 @@ use plume_common::activity_pub::{ inbox::{FromActivity, Notify} }; use plume_common::utils; +use Connection; use instance::Instance; use mentions::Mention; use notifications::*; @@ -27,7 +28,7 @@ pub struct Comment { pub in_response_to_id: Option, pub post_id: i32, pub author_id: i32, - pub creation_date: chrono::NaiveDateTime, + pub creation_date: NaiveDateTime, pub ap_url: Option, pub sensitive: bool, pub spoiler_text: String @@ -51,24 +52,24 @@ impl Comment { list_by!(comments, list_by_post, post_id as i32); find_by!(comments, find_by_ap_url, ap_url as String); - pub fn get_author(&self, conn: &PgConnection) -> User { + pub fn get_author(&self, conn: &Connection) -> User { User::get(conn, self.author_id).unwrap() } - pub fn get_post(&self, conn: &PgConnection) -> Post { + pub fn get_post(&self, conn: &Connection) -> Post { Post::get(conn, self.post_id).unwrap() } - pub fn count_local(conn: &PgConnection) -> usize { + pub fn count_local(conn: &Connection) -> usize { use schema::users; let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id); - comments::table.filter(comments::author_id.eq(any(local_authors))) + comments::table.filter(comments::author_id.eq_any(local_authors)) .load::(conn) .expect("Couldn't load local comments") .len() } - pub fn to_json(&self, conn: &PgConnection, others: &Vec) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection, others: &Vec) -> serde_json::Value { let mut json = serde_json::to_value(self).unwrap(); json["author"] = self.get_author(conn).to_json(conn); let mentions = Mention::list_for_comment(conn, self.id).into_iter() @@ -82,22 +83,23 @@ impl Comment { json } - pub fn update_ap_url(&self, conn: &PgConnection) -> Comment { + pub fn update_ap_url(&self, conn: &Connection) -> Comment { if self.ap_url.is_none() { diesel::update(self) .set(comments::ap_url.eq(self.compute_id(conn))) - .get_result(conn) - .expect("Failed to update comment AP URL") + .execute(conn) + .expect("Failed to update comment AP URL"); + Comment::get(conn, self.id).expect("Couldn't get the updated comment") } else { self.clone() } } - pub fn compute_id(&self, conn: &PgConnection) -> String { + pub fn compute_id(&self, conn: &Connection) -> String { format!("{}comment/{}", self.get_post(conn).ap_url, self.id) } - pub fn into_activity(&self, conn: &PgConnection) -> Note { + pub fn into_activity(&self, conn: &Connection) -> Note { let (html, mentions) = utils::md_to_html(self.content.get().as_ref()); let author = User::get(conn, self.author_id).unwrap(); @@ -119,7 +121,7 @@ impl Comment { note } - pub fn create_activity(&self, conn: &PgConnection) -> Create { + pub fn create_activity(&self, conn: &Connection) -> Create { let author = User::get(conn, self.author_id).unwrap(); let note = self.into_activity(conn); @@ -133,8 +135,8 @@ impl Comment { } } -impl FromActivity for Comment { - fn from_activity(conn: &PgConnection, note: Note, actor: Id) -> Comment { +impl FromActivity for Comment { + fn from_activity(conn: &Connection, note: Note, actor: Id) -> Comment { let previous_url = note.object_props.in_reply_to.clone().unwrap().as_str().unwrap().to_string(); let previous_comment = Comment::find_by_ap_url(conn, previous_url.clone()); @@ -167,8 +169,8 @@ impl FromActivity for Comment { } } -impl Notify for Comment { - fn notify(&self, conn: &PgConnection) { +impl Notify for Comment { + fn notify(&self, conn: &Connection) { for author in self.get_post(conn).get_authors(conn) { Notification::insert(conn, NewNotification { kind: notification_kind::COMMENT.to_string(), diff --git a/plume-models/src/db_conn.rs b/plume-models/src/db_conn.rs index 797496b2..eef62825 100644 --- a/plume-models/src/db_conn.rs +++ b/plume-models/src/db_conn.rs @@ -1,16 +1,17 @@ use diesel::{ - pg::PgConnection, r2d2::{ConnectionManager, Pool, PooledConnection} }; use rocket::{Request, State, Outcome, http::Status, request::{self, FromRequest}}; use std::ops::Deref; -pub type PgPool = Pool>; +use Connection; + +pub type DbPool = Pool>; // From rocket documentation // Connection request guard type: a wrapper around an r2d2 pooled connection. -pub struct DbConn(pub PooledConnection>); +pub struct DbConn(pub PooledConnection>); /// Attempts to retrieve a single connection from the managed database pool. If /// no pool is currently managed, fails with an `InternalServerError` status. If @@ -19,7 +20,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn { type Error = (); fn from_request(request: &'a Request<'r>) -> request::Outcome { - let pool = request.guard::>()?; + let pool = request.guard::>()?; match pool.get() { Ok(conn) => Outcome::Success(DbConn(conn)), Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())) @@ -27,9 +28,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn { } } -// For the convenience of using an &DbConn as an &PgConnection. +// For the convenience of using an &DbConn as an &Connection. impl Deref for DbConn { - type Target = PgConnection; + type Target = Connection; fn deref(&self) -> &Self::Target { &self.0 diff --git a/plume-models/src/follows.rs b/plume-models/src/follows.rs index 9c29aacc..81606c02 100644 --- a/plume-models/src/follows.rs +++ b/plume-models/src/follows.rs @@ -1,13 +1,14 @@ use activitypub::{Actor, activity::{Accept, Follow as FollowAct, Undo}, actor::Person}; -use diesel::{self, PgConnection, ExpressionMethods, QueryDsl, RunQueryDsl}; +use diesel::{self, ExpressionMethods, QueryDsl, RunQueryDsl}; use plume_common::activity_pub::{broadcast, Id, IntoId, inbox::{FromActivity, Notify, WithInbox, Deletable}, sign::Signer}; +use Connection; use blogs::Blog; use notifications::*; use users::User; use schema::follows; -#[derive(Queryable, Identifiable, Associations)] +#[derive(Clone, Queryable, Identifiable, Associations)] #[belongs_to(User, foreign_key = "following_id")] pub struct Follow { pub id: i32, @@ -29,14 +30,14 @@ impl Follow { get!(follows); find_by!(follows, find_by_ap_url, ap_url as String); - pub fn find(conn: &PgConnection, from: i32, to: i32) -> Option { + pub fn find(conn: &Connection, from: i32, to: i32) -> Option { follows::table.filter(follows::follower_id.eq(from)) .filter(follows::following_id.eq(to)) .get_result(conn) .ok() } - pub fn into_activity(&self, conn: &PgConnection) -> FollowAct { + pub fn into_activity(&self, conn: &Connection) -> FollowAct { let user = User::get(conn, self.follower_id).unwrap(); let target = User::get(conn, self.following_id).unwrap(); @@ -52,7 +53,7 @@ impl Follow { /// from -> The one sending the follow request /// target -> The target of the request, responding with Accept pub fn accept_follow( - conn: &PgConnection, + conn: &Connection, from: &B, target: &A, follow: FollowAct, @@ -79,8 +80,8 @@ impl Follow { } } -impl FromActivity for Follow { - fn from_activity(conn: &PgConnection, follow: FollowAct, _actor: Id) -> Follow { +impl FromActivity for Follow { + fn from_activity(conn: &Connection, follow: FollowAct, _actor: Id) -> Follow { let from_id = follow.follow_props.actor_link::().map(|l| l.into()) .unwrap_or_else(|_| follow.follow_props.actor_object::().expect("No actor object (nor ID) on Follow").object_props.id_string().expect("No ID on actor on Follow")); let from = User::from_url(conn, from_id).unwrap(); @@ -94,8 +95,8 @@ impl FromActivity for Follow { } } -impl Notify for Follow { - fn notify(&self, conn: &PgConnection) { +impl Notify for Follow { + fn notify(&self, conn: &Connection) { Notification::insert(conn, NewNotification { kind: notification_kind::FOLLOW.to_string(), object_id: self.id, @@ -104,8 +105,8 @@ impl Notify for Follow { } } -impl Deletable for Follow { - fn delete(&self, conn: &PgConnection) -> Undo { +impl Deletable for Follow { + fn delete(&self, conn: &Connection) -> Undo { diesel::delete(self).execute(conn).expect("Coudn't delete follow"); // delete associated notification if any @@ -120,7 +121,7 @@ impl Deletable for Follow { undo } - fn delete_id(id: String, conn: &PgConnection) { + fn delete_id(id: String, conn: &Connection) { if let Some(follow) = Follow::find_by_ap_url(conn, id) { follow.delete(conn); } diff --git a/plume-models/src/instance.rs b/plume-models/src/instance.rs index 8466fa68..ed072111 100644 --- a/plume-models/src/instance.rs +++ b/plume-models/src/instance.rs @@ -1,14 +1,15 @@ use chrono::NaiveDateTime; -use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, PgConnection}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use std::iter::Iterator; use plume_common::utils::md_to_html; +use Connection; use safe_string::SafeString; use ap_url; use users::User; use schema::{instances, users}; -#[derive(Identifiable, Queryable, Serialize)] +#[derive(Clone, Identifiable, Queryable, Serialize)] pub struct Instance { pub id: i32, pub public_domain: String, @@ -39,7 +40,7 @@ pub struct NewInstance { } impl Instance { - pub fn get_local(conn: &PgConnection) -> Option { + pub fn get_local(conn: &Connection) -> Option { instances::table.filter(instances::local.eq(true)) .limit(1) .load::(conn) @@ -47,13 +48,13 @@ impl Instance { .into_iter().nth(0) } - pub fn get_remotes(conn: &PgConnection) -> Vec { + pub fn get_remotes(conn: &Connection) -> Vec { instances::table.filter(instances::local.eq(false)) .load::(conn) .expect("Error loading remote instances infos") } - pub fn page(conn: &PgConnection, (min, max): (i32, i32)) -> Vec { + pub fn page(conn: &Connection, (min, max): (i32, i32)) -> Vec { instances::table.order(instances::public_domain.asc()) .offset(min.into()) .limit((max - min).into()) @@ -61,7 +62,7 @@ impl Instance { .expect("Error loading a page of instances") } - pub fn local_id(conn: &PgConnection) -> i32 { + pub fn local_id(conn: &Connection) -> i32 { Instance::get_local(conn).unwrap().id } @@ -69,15 +70,15 @@ impl Instance { get!(instances); find_by!(instances, find_by_domain, public_domain as String); - pub fn toggle_block(&self, conn: &PgConnection) { + pub fn toggle_block(&self, conn: &Connection) { diesel::update(self) .set(instances::blocked.eq(!self.blocked)) - .get_result::(conn) + .execute(conn) .expect("Couldn't block/unblock instance"); } /// id: AP object id - pub fn is_blocked(conn: &PgConnection, id: String) -> bool { + pub fn is_blocked(conn: &Connection, id: String) -> bool { for block in instances::table.filter(instances::blocked.eq(true)) .get_results::(conn) .expect("Error listing blocked instances") { @@ -89,7 +90,7 @@ impl Instance { false } - pub fn has_admin(&self, conn: &PgConnection) -> bool { + pub fn has_admin(&self, conn: &Connection) -> bool { users::table.filter(users::instance_id.eq(self.id)) .filter(users::is_admin.eq(true)) .load::(conn) @@ -97,7 +98,7 @@ impl Instance { .len() > 0 } - pub fn main_admin(&self, conn: &PgConnection) -> User { + pub fn main_admin(&self, conn: &Connection) -> User { users::table.filter(users::instance_id.eq(self.id)) .filter(users::is_admin.eq(true)) .limit(1) @@ -115,7 +116,7 @@ impl Instance { )) } - pub fn update(&self, conn: &PgConnection, name: String, open_registrations: bool, short_description: SafeString, long_description: SafeString) -> Instance { + pub fn update(&self, conn: &Connection, name: String, open_registrations: bool, short_description: SafeString, long_description: SafeString) { let (sd, _) = md_to_html(short_description.as_ref()); let (ld, _) = md_to_html(long_description.as_ref()); diesel::update(self) @@ -126,11 +127,11 @@ impl Instance { instances::long_description.eq(long_description), instances::short_description_html.eq(sd), instances::long_description_html.eq(ld) - )).get_result::(conn) - .expect("Couldn't update instance") + )).execute(conn) + .expect("Couldn't update instance"); } - pub fn count(conn: &PgConnection) -> i64 { + pub fn count(conn: &Connection) -> i64 { instances::table.count().get_result(conn).expect("Couldn't count instances") } } diff --git a/plume-models/src/lib.rs b/plume-models/src/lib.rs index 71c4f4e5..775c8edc 100644 --- a/plume-models/src/lib.rs +++ b/plume-models/src/lib.rs @@ -1,4 +1,5 @@ #![allow(proc_macro_derive_resolution_fallback)] // This can be removed after diesel-1.4 +#![feature(crate_in_paths)] extern crate activitypub; extern crate ammonia; @@ -25,10 +26,29 @@ extern crate webfinger; use std::env; +#[cfg(all(feature = "sqlite", not(feature = "postgres")))] +pub type Connection = diesel::SqliteConnection; + +#[cfg(all(not(feature = "sqlite"), feature = "postgres"))] +pub type Connection = diesel::PgConnection; + +/// Adds a function to a model, that returns the first +/// matching row for a given list of fields. +/// +/// Usage: +/// +/// ```rust +/// impl Model { +/// find_by!(model_table, name_of_the_function, field1 as String, field2 as i32); +/// } +/// +/// // Get the Model with field1 == "", and field2 == 0 +/// Model::name_of_the_function(connection, String::new(), 0); +/// ``` macro_rules! find_by { ($table:ident, $fn:ident, $($col:ident as $type:ident),+) => { /// Try to find a $table with a given $col - pub fn $fn(conn: &PgConnection, $($col: $type),+) -> Option { + pub fn $fn(conn: &crate::Connection, $($col: $type),+) -> Option { $table::table $(.filter($table::$col.eq($col)))+ .limit(1) @@ -39,10 +59,22 @@ macro_rules! find_by { }; } +/// List all rows of a model, with field-based filtering. +/// +/// Usage: +/// +/// ```rust +/// impl Model { +/// list_by!(model_table, name_of_the_function, field1 as String); +/// } +/// +/// // To get all Models with field1 == "" +/// Model::name_of_the_function(connection, String::new()); +/// ``` macro_rules! list_by { ($table:ident, $fn:ident, $($col:ident as $type:ident),+) => { /// Try to find a $table with a given $col - pub fn $fn(conn: &PgConnection, $($col: $type),+) -> Vec { + pub fn $fn(conn: &crate::Connection, $($col: $type),+) -> Vec { $table::table $(.filter($table::$col.eq($col)))+ .load::(conn) @@ -51,9 +83,21 @@ macro_rules! list_by { }; } +/// Adds a function to a model to retrieve a row by ID +/// +/// # Usage +/// +/// ```rust +/// impl Model { +/// get!(model_table); +/// } +/// +/// // Get the Model with ID 1 +/// Model::get(connection, 1); +/// ``` macro_rules! get { ($table:ident) => { - pub fn get(conn: &PgConnection, id: i32) -> Option { + pub fn get(conn: &crate::Connection, id: i32) -> Option { $table::table.filter($table::id.eq(id)) .limit(1) .load::(conn) @@ -63,24 +107,82 @@ macro_rules! get { }; } +/// Adds a function to a model to insert a new row +/// +/// # Usage +/// +/// ```rust +/// impl Model { +/// insert!(model_table, NewModelType); +/// } +/// +/// // Insert a new row +/// Model::insert(connection, NewModelType::new()); +/// ``` macro_rules! insert { ($table:ident, $from:ident) => { - pub fn insert(conn: &PgConnection, new: $from) -> Self { + last!($table); + + pub fn insert(conn: &crate::Connection, new: $from) -> Self { diesel::insert_into($table::table) .values(new) - .get_result(conn) - .expect("Error saving new $table") + .execute(conn) + .expect("Error saving new $table"); + Self::last(conn) } }; } +/// Adds a function to a model to save changes to a model. +/// The model should derive diesel::AsChangeset. +/// +/// # Usage +/// +/// ```rust +/// impl Model { +/// update!(model_table); +/// } +/// +/// // Update and save changes +/// let m = Model::get(connection, 1); +/// m.foo = 42; +/// m.update(connection); +/// ``` macro_rules! update { ($table:ident) => { - pub fn update(&self, conn: &PgConnection) -> Self { + pub fn update(&self, conn: &crate::Connection) -> Self { diesel::update(self) .set(self) - .get_result(conn) - .expect(concat!("Error updating ", stringify!($table))) + .execute(conn) + .expect(concat!("Error updating ", stringify!($table))); + Self::get(conn, self.id) + .expect(concat!(stringify!($table), " we just updated doesn't exist anymore???")) + } + }; +} + +/// Returns the last row of a table. +/// +/// # Usage +/// +/// ```rust +/// impl Model { +/// last!(model_table); +/// } +/// +/// // Get the last Model +/// Model::last(connection) +/// ``` +macro_rules! last { + ($table:ident) => { + pub fn last(conn: &crate::Connection) -> Self { + $table::table.order_by($table::id.desc()) + .limit(1) + .load::(conn) + .expect(concat!("Error getting last ", stringify!($table))) + .iter().next() + .expect(concat!("No last ", stringify!($table))) + .clone() } }; } diff --git a/plume-models/src/likes.rs b/plume-models/src/likes.rs index 9bf1bec3..a7617b9e 100644 --- a/plume-models/src/likes.rs +++ b/plume-models/src/likes.rs @@ -1,6 +1,6 @@ use activitypub::activity; -use chrono; -use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; +use chrono::NaiveDateTime; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use plume_common::activity_pub::{ PUBLIC_VISIBILTY, @@ -8,17 +8,18 @@ use plume_common::activity_pub::{ IntoId, inbox::{FromActivity, Deletable, Notify} }; +use Connection; use notifications::*; use posts::Post; use users::User; use schema::likes; -#[derive(Queryable, Identifiable)] +#[derive(Clone, Queryable, Identifiable)] pub struct Like { pub id: i32, pub user_id: i32, pub post_id: i32, - pub creation_date: chrono::NaiveDateTime, + pub creation_date: NaiveDateTime, pub ap_url: String } @@ -36,7 +37,7 @@ impl Like { find_by!(likes, find_by_ap_url, ap_url as String); find_by!(likes, find_by_user_on_post, user_id as i32, post_id as i32); - pub fn update_ap_url(&self, conn: &PgConnection) { + pub fn update_ap_url(&self, conn: &Connection) { if self.ap_url.len() == 0 { diesel::update(self) .set(likes::ap_url.eq(format!( @@ -44,11 +45,11 @@ impl Like { User::get(conn, self.user_id).unwrap().ap_url, Post::get(conn, self.post_id).unwrap().ap_url ))) - .get_result::(conn).expect("Couldn't update AP URL"); + .execute(conn).expect("Couldn't update AP URL"); } } - pub fn into_activity(&self, conn: &PgConnection) -> activity::Like { + pub fn into_activity(&self, conn: &Connection) -> activity::Like { let mut act = activity::Like::default(); act.like_props.set_actor_link(User::get(conn, self.user_id).unwrap().into_id()).expect("Like::into_activity: actor error"); act.like_props.set_object_link(Post::get(conn, self.post_id).unwrap().into_id()).expect("Like::into_activity: object error"); @@ -60,8 +61,8 @@ impl Like { } } -impl FromActivity for Like { - fn from_activity(conn: &PgConnection, like: activity::Like, _actor: Id) -> Like { +impl FromActivity for Like { + fn from_activity(conn: &Connection, like: activity::Like, _actor: Id) -> Like { let liker = User::from_url(conn, like.like_props.actor.as_str().unwrap().to_string()); let post = Post::find_by_ap_url(conn, like.like_props.object.as_str().unwrap().to_string()); let res = Like::insert(conn, NewLike { @@ -74,8 +75,8 @@ impl FromActivity for Like { } } -impl Notify for Like { - fn notify(&self, conn: &PgConnection) { +impl Notify for Like { + fn notify(&self, conn: &Connection) { let post = Post::get(conn, self.post_id).unwrap(); for author in post.get_authors(conn) { Notification::insert(conn, NewNotification { @@ -87,8 +88,8 @@ impl Notify for Like { } } -impl Deletable for Like { - fn delete(&self, conn: &PgConnection) -> activity::Undo { +impl Deletable for Like { + fn delete(&self, conn: &Connection) -> activity::Undo { diesel::delete(self).execute(conn).unwrap(); // delete associated notification if any @@ -106,7 +107,7 @@ impl Deletable for Like { act } - fn delete_id(id: String, conn: &PgConnection) { + fn delete_id(id: String, conn: &Connection) { if let Some(like) = Like::find_by_ap_url(conn, id.into()) { like.delete(conn); } diff --git a/plume-models/src/medias.rs b/plume-models/src/medias.rs index c78edbd9..01639b18 100644 --- a/plume-models/src/medias.rs +++ b/plume-models/src/medias.rs @@ -1,12 +1,12 @@ -use diesel::{self, PgConnection, QueryDsl, ExpressionMethods, RunQueryDsl}; +use diesel::{self, QueryDsl, ExpressionMethods, RunQueryDsl}; use serde_json; use std::fs; -use ap_url; +use {ap_url, Connection}; use instance::Instance; use schema::medias; -#[derive(Identifiable, Queryable, Serialize)] +#[derive(Clone, Identifiable, Queryable, Serialize)] pub struct Media { pub id: i32, pub file_path: String, @@ -35,7 +35,7 @@ impl Media { get!(medias); list_by!(medias, for_user, owner_id as i32); - pub fn to_json(&self, conn: &PgConnection) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection) -> serde_json::Value { let mut json = serde_json::to_value(self).unwrap(); let url = self.url(conn); let (preview, html, md) = match self.file_path.rsplitn(2, '.').next().unwrap() { @@ -63,7 +63,7 @@ impl Media { json } - pub fn url(&self, conn: &PgConnection) -> String { + pub fn url(&self, conn: &Connection) -> String { if self.is_remote { self.remote_url.clone().unwrap_or(String::new()) } else { @@ -71,12 +71,12 @@ impl Media { } } - pub fn delete(&self, conn: &PgConnection) { + pub fn delete(&self, conn: &Connection) { fs::remove_file(self.file_path.as_str()).expect("Couldn't delete media from disk"); diesel::delete(self).execute(conn).expect("Couldn't remove media from DB"); } - pub fn save_remote(conn: &PgConnection, url: String) -> Media { + pub fn save_remote(conn: &Connection, url: String) -> Media { Media::insert(conn, NewMedia { file_path: String::new(), alt_text: String::new(), @@ -88,7 +88,7 @@ impl Media { }) } - pub fn set_owner(&self, conn: &PgConnection, id: i32) { + pub fn set_owner(&self, conn: &Connection, id: i32) { diesel::update(self) .set(medias::owner_id.eq(id)) .execute(conn) diff --git a/plume-models/src/mentions.rs b/plume-models/src/mentions.rs index 237a49a7..b7328b9c 100644 --- a/plume-models/src/mentions.rs +++ b/plume-models/src/mentions.rs @@ -1,14 +1,15 @@ use activitypub::link; -use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use plume_common::activity_pub::inbox::Notify; +use Connection; use comments::Comment; use notifications::*; use posts::Post; use users::User; use schema::mentions; -#[derive(Queryable, Identifiable, Serialize, Deserialize)] +#[derive(Clone, Queryable, Identifiable, Serialize, Deserialize)] pub struct Mention { pub id: i32, pub mentioned_id: i32, @@ -34,26 +35,26 @@ impl Mention { list_by!(mentions, list_for_post, post_id as i32); list_by!(mentions, list_for_comment, comment_id as i32); - pub fn get_mentioned(&self, conn: &PgConnection) -> Option { + pub fn get_mentioned(&self, conn: &Connection) -> Option { User::get(conn, self.mentioned_id) } - pub fn get_post(&self, conn: &PgConnection) -> Option { + pub fn get_post(&self, conn: &Connection) -> Option { self.post_id.and_then(|id| Post::get(conn, id)) } - pub fn get_comment(&self, conn: &PgConnection) -> Option { + pub fn get_comment(&self, conn: &Connection) -> Option { self.comment_id.and_then(|id| Comment::get(conn, id)) } - pub fn get_user(&self, conn: &PgConnection) -> Option { + pub fn get_user(&self, conn: &Connection) -> Option { match self.get_post(conn) { Some(p) => p.get_authors(conn).into_iter().next(), None => self.get_comment(conn).map(|c| c.get_author(conn)) } } - pub fn build_activity(conn: &PgConnection, ment: String) -> link::Mention { + pub fn build_activity(conn: &Connection, ment: String) -> link::Mention { let user = User::find_by_fqn(conn, ment.clone()); let mut mention = link::Mention::default(); mention.link_props.set_href_string(user.clone().map(|u| u.ap_url).unwrap_or(String::new())).expect("Error setting mention's href"); @@ -61,7 +62,7 @@ impl Mention { mention } - pub fn to_activity(&self, conn: &PgConnection) -> link::Mention { + pub fn to_activity(&self, conn: &Connection) -> link::Mention { let user = self.get_mentioned(conn); let mut mention = link::Mention::default(); mention.link_props.set_href_string(user.clone().map(|u| u.ap_url).unwrap_or(String::new())).expect("Error setting mention's href"); @@ -69,7 +70,7 @@ impl Mention { mention } - pub fn from_activity(conn: &PgConnection, ment: link::Mention, inside: i32, in_post: bool, notify: bool) -> Option { + pub fn from_activity(conn: &Connection, ment: link::Mention, inside: i32, in_post: bool, notify: bool) -> Option { let ap_url = ment.link_props.href_string().ok()?; let mentioned = User::find_by_ap_url(conn, ap_url)?; @@ -103,8 +104,8 @@ impl Mention { } } -impl Notify for Mention { - fn notify(&self, conn: &PgConnection) { +impl Notify for Mention { + fn notify(&self, conn: &Connection) { self.get_mentioned(conn).map(|m| { Notification::insert(conn, NewNotification { kind: notification_kind::MENTION.to_string(), diff --git a/plume-models/src/notifications.rs b/plume-models/src/notifications.rs index c30d16d2..268096ff 100644 --- a/plume-models/src/notifications.rs +++ b/plume-models/src/notifications.rs @@ -1,7 +1,8 @@ use chrono::NaiveDateTime; -use diesel::{self, PgConnection, RunQueryDsl, QueryDsl, ExpressionMethods}; +use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods}; use serde_json; +use Connection; use comments::Comment; use follows::Follow; use likes::Like; @@ -19,7 +20,7 @@ pub mod notification_kind { pub const RESHARE: &'static str = "RESHARE"; } -#[derive(Queryable, Identifiable, Serialize)] +#[derive(Clone, Queryable, Identifiable, Serialize)] pub struct Notification { pub id: i32, pub user_id: i32, @@ -40,14 +41,14 @@ impl Notification { insert!(notifications, NewNotification); get!(notifications); - pub fn find_for_user(conn: &PgConnection, user: &User) -> Vec { + pub fn find_for_user(conn: &Connection, user: &User) -> Vec { notifications::table.filter(notifications::user_id.eq(user.id)) .order_by(notifications::creation_date.desc()) .load::(conn) .expect("Couldn't load user notifications") } - pub fn page_for_user(conn: &PgConnection, user: &User, (min, max): (i32, i32)) -> Vec { + pub fn page_for_user(conn: &Connection, user: &User, (min, max): (i32, i32)) -> Vec { notifications::table.filter(notifications::user_id.eq(user.id)) .order_by(notifications::creation_date.desc()) .offset(min.into()) @@ -56,14 +57,14 @@ impl Notification { .expect("Couldn't load user notifications page") } - pub fn find>(conn: &PgConnection, kind: S, obj: i32) -> Option { + pub fn find>(conn: &Connection, kind: S, obj: i32) -> Option { notifications::table.filter(notifications::kind.eq(kind.into())) .filter(notifications::object_id.eq(obj)) .get_result::(conn) .ok() } - pub fn to_json(&self, conn: &PgConnection) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection) -> serde_json::Value { let mut json = json!(self); json["object"] = json!(match self.kind.as_ref() { notification_kind::COMMENT => Comment::get(conn, self.object_id).map(|comment| diff --git a/plume-models/src/post_authors.rs b/plume-models/src/post_authors.rs index 25b90e70..56b11c65 100644 --- a/plume-models/src/post_authors.rs +++ b/plume-models/src/post_authors.rs @@ -1,10 +1,10 @@ -use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use posts::Post; use users::User; use schema::post_authors; -#[derive(Queryable, Identifiable, Associations)] +#[derive(Clone, Queryable, Identifiable, Associations)] #[belongs_to(Post)] #[belongs_to(User, foreign_key = "author_id")] pub struct PostAuthor { diff --git a/plume-models/src/posts.rs b/plume-models/src/posts.rs index 0e1b2c58..727db413 100644 --- a/plume-models/src/posts.rs +++ b/plume-models/src/posts.rs @@ -5,7 +5,7 @@ use activitypub::{ }; use canapi::{Error, Provider}; use chrono::{NaiveDateTime, TimeZone, Utc}; -use diesel::{self, PgConnection, RunQueryDsl, QueryDsl, ExpressionMethods, BelongingToDsl, dsl::any}; +use diesel::{self, RunQueryDsl, QueryDsl, ExpressionMethods, BelongingToDsl}; use heck::KebabCase; use serde_json; @@ -15,7 +15,7 @@ use plume_common::activity_pub::{ PUBLIC_VISIBILTY, Id, IntoId, inbox::{Deletable, FromActivity} }; -use {BASE_URL, ap_url}; +use {BASE_URL, ap_url, Connection}; use blogs::Blog; use instance::Instance; use likes::Like; @@ -57,10 +57,10 @@ pub struct NewPost { pub source: String, } -impl Provider for Post { +impl Provider for Post { type Data = PostEndpoint; - fn get(conn: &PgConnection, id: i32) -> Result { + fn get(conn: &Connection, id: i32) -> Result { Post::get(conn, id).map(|p| Ok(PostEndpoint { id: Some(p.id), title: Some(p.title.clone()), @@ -69,7 +69,7 @@ impl Provider for Post { })).unwrap_or(Err(Error::NotFound("Get Post".to_string()))) } - fn list(conn: &PgConnection, filter: PostEndpoint) -> Vec { + fn list(conn: &Connection, filter: PostEndpoint) -> Vec { let mut query = posts::table.into_boxed(); if let Some(title) = filter.title { query = query.filter(posts::title.eq(title)); @@ -92,15 +92,15 @@ impl Provider for Post { ).unwrap_or(vec![]) } - fn create(_conn: &PgConnection, _query: PostEndpoint) -> Result { + fn create(_conn: &Connection, _query: PostEndpoint) -> Result { unimplemented!() } - fn update(_conn: &PgConnection, _id: i32, _new_data: PostEndpoint) -> Result { + fn update(_conn: &Connection, _id: i32, _new_data: PostEndpoint) -> Result { unimplemented!() } - fn delete(conn: &PgConnection, id: i32) { + fn delete(conn: &Connection, id: i32) { Post::get(conn, id).map(|p| p.delete(conn)); } } @@ -112,46 +112,47 @@ impl Post { find_by!(posts, find_by_slug, slug as String, blog_id as i32); find_by!(posts, find_by_ap_url, ap_url as String); - pub fn list_by_tag(conn: &PgConnection, tag: String, (min, max): (i32, i32)) -> Vec { + pub fn list_by_tag(conn: &Connection, tag: String, (min, max): (i32, i32)) -> Vec { use schema::tags; let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id); - posts::table.filter(posts::id.eq(any(ids))) + posts::table.filter(posts::id.eq_any(ids)) .filter(posts::published.eq(true)) .order(posts::creation_date.desc()) .offset(min.into()) .limit((max - min).into()) - .get_results::(conn) + .load(conn) .expect("Error loading posts by tag") } - pub fn count_for_tag(conn: &PgConnection, tag: String) -> i64 { + pub fn count_for_tag(conn: &Connection, tag: String) -> i64 { use schema::tags; let ids = tags::table.filter(tags::tag.eq(tag)).select(tags::post_id); - posts::table.filter(posts::id.eq(any(ids))) + *posts::table.filter(posts::id.eq_any(ids)) .filter(posts::published.eq(true)) .count() - .get_result(conn) + .load(conn) .expect("Error counting posts by tag") + .iter().next().unwrap() } - pub fn count_local(conn: &PgConnection) -> usize { + pub fn count_local(conn: &Connection) -> usize { use schema::post_authors; use schema::users; let local_authors = users::table.filter(users::instance_id.eq(Instance::local_id(conn))).select(users::id); - let local_posts_id = post_authors::table.filter(post_authors::author_id.eq(any(local_authors))).select(post_authors::post_id); - posts::table.filter(posts::id.eq(any(local_posts_id))) + let local_posts_id = post_authors::table.filter(post_authors::author_id.eq_any(local_authors)).select(post_authors::post_id); + posts::table.filter(posts::id.eq_any(local_posts_id)) .filter(posts::published.eq(true)) .load::(conn) .expect("Couldn't load local posts") .len() } - pub fn count(conn: &PgConnection) -> i64 { + pub fn count(conn: &Connection) -> i64 { posts::table.filter(posts::published.eq(true)).count().get_result(conn).expect("Couldn't count posts") } - pub fn get_recents(conn: &PgConnection, limit: i64) -> Vec { + pub fn get_recents(conn: &Connection, limit: i64) -> Vec { posts::table.order(posts::creation_date.desc()) .filter(posts::published.eq(true)) .limit(limit) @@ -159,11 +160,11 @@ impl Post { .expect("Error loading recent posts") } - pub fn get_recents_for_author(conn: &PgConnection, author: &User, limit: i64) -> Vec { + pub fn get_recents_for_author(conn: &Connection, author: &User, limit: i64) -> Vec { use schema::post_authors; let posts = PostAuthor::belonging_to(author).select(post_authors::post_id); - posts::table.filter(posts::id.eq(any(posts))) + posts::table.filter(posts::id.eq_any(posts)) .filter(posts::published.eq(true)) .order(posts::creation_date.desc()) .limit(limit) @@ -171,7 +172,7 @@ impl Post { .expect("Error loading recent posts for author") } - pub fn get_recents_for_blog(conn: &PgConnection, blog: &Blog, limit: i64) -> Vec { + pub fn get_recents_for_blog(conn: &Connection, blog: &Blog, limit: i64) -> Vec { posts::table.filter(posts::blog_id.eq(blog.id)) .filter(posts::published.eq(true)) .order(posts::creation_date.desc()) @@ -180,14 +181,14 @@ impl Post { .expect("Error loading recent posts for blog") } - pub fn get_for_blog(conn: &PgConnection, blog:&Blog) -> Vec { + pub fn get_for_blog(conn: &Connection, blog:&Blog) -> Vec { posts::table.filter(posts::blog_id.eq(blog.id)) .filter(posts::published.eq(true)) .load::(conn) .expect("Error loading posts for blog") } - pub fn blog_page(conn: &PgConnection, blog: &Blog, (min, max): (i32, i32)) -> Vec { + pub fn blog_page(conn: &Connection, blog: &Blog, (min, max): (i32, i32)) -> Vec { posts::table.filter(posts::blog_id.eq(blog.id)) .filter(posts::published.eq(true)) .order(posts::creation_date.desc()) @@ -198,7 +199,7 @@ impl Post { } /// Give a page of all the recent posts known to this instance (= federated timeline) - pub fn get_recents_page(conn: &PgConnection, (min, max): (i32, i32)) -> Vec { + pub fn get_recents_page(conn: &Connection, (min, max): (i32, i32)) -> Vec { posts::table.order(posts::creation_date.desc()) .filter(posts::published.eq(true)) .offset(min.into()) @@ -208,14 +209,14 @@ impl Post { } /// Give a page of posts from a specific instance - pub fn get_instance_page(conn: &PgConnection, instance_id: i32, (min, max): (i32, i32)) -> Vec { + pub fn get_instance_page(conn: &Connection, instance_id: i32, (min, max): (i32, i32)) -> Vec { use schema::blogs; let blog_ids = blogs::table.filter(blogs::instance_id.eq(instance_id)).select(blogs::id); posts::table.order(posts::creation_date.desc()) .filter(posts::published.eq(true)) - .filter(posts::blog_id.eq(any(blog_ids))) + .filter(posts::blog_id.eq_any(blog_ids)) .offset(min.into()) .limit((max - min).into()) .load::(conn) @@ -223,39 +224,40 @@ impl Post { } /// Give a page of customized user feed, based on a list of followed users - pub fn user_feed_page(conn: &PgConnection, followed: Vec, (min, max): (i32, i32)) -> Vec { + pub fn user_feed_page(conn: &Connection, followed: Vec, (min, max): (i32, i32)) -> Vec { use schema::post_authors; - let post_ids = post_authors::table.filter(post_authors::author_id.eq(any(followed))) + let post_ids = post_authors::table + .filter(post_authors::author_id.eq_any(followed)) .select(post_authors::post_id); posts::table.order(posts::creation_date.desc()) .filter(posts::published.eq(true)) - .filter(posts::id.eq(any(post_ids))) + .filter(posts::id.eq_any(post_ids)) .offset(min.into()) .limit((max - min).into()) .load::(conn) .expect("Error loading user feed page") } - pub fn drafts_by_author(conn: &PgConnection, author: &User) -> Vec { + pub fn drafts_by_author(conn: &Connection, author: &User) -> Vec { use schema::post_authors; let posts = PostAuthor::belonging_to(author).select(post_authors::post_id); posts::table.order(posts::creation_date.desc()) .filter(posts::published.eq(false)) - .filter(posts::id.eq(any(posts))) + .filter(posts::id.eq_any(posts)) .load::(conn) .expect("Error listing drafts") } - pub fn get_authors(&self, conn: &PgConnection) -> Vec { + pub fn get_authors(&self, conn: &Connection) -> Vec { use schema::users; use schema::post_authors; let author_list = PostAuthor::belonging_to(self).select(post_authors::author_id); - users::table.filter(users::id.eq(any(author_list))).load::(conn).unwrap() + users::table.filter(users::id.eq_any(author_list)).load::(conn).unwrap() } - pub fn get_blog(&self, conn: &PgConnection) -> Blog { + pub fn get_blog(&self, conn: &Connection) -> Blog { use schema::blogs; blogs::table.filter(blogs::id.eq(self.blog_id)) .limit(1) @@ -264,31 +266,32 @@ impl Post { .into_iter().nth(0).unwrap() } - pub fn get_likes(&self, conn: &PgConnection) -> Vec { + pub fn get_likes(&self, conn: &Connection) -> Vec { use schema::likes; likes::table.filter(likes::post_id.eq(self.id)) .load::(conn) .expect("Couldn't load likes associted to post") } - pub fn get_reshares(&self, conn: &PgConnection) -> Vec { + pub fn get_reshares(&self, conn: &Connection) -> Vec { use schema::reshares; reshares::table.filter(reshares::post_id.eq(self.id)) .load::(conn) .expect("Couldn't load reshares associted to post") } - pub fn update_ap_url(&self, conn: &PgConnection) -> Post { + pub fn update_ap_url(&self, conn: &Connection) -> Post { if self.ap_url.len() == 0 { diesel::update(self) .set(posts::ap_url.eq(self.compute_id(conn))) - .get_result::(conn).expect("Couldn't update AP URL") + .execute(conn).expect("Couldn't update AP URL"); + Post::get(conn, self.id).unwrap() } else { self.clone() } } - pub fn get_receivers_urls(&self, conn: &PgConnection) -> Vec { + pub fn get_receivers_urls(&self, conn: &Connection) -> Vec { let followers = self.get_authors(conn).into_iter().map(|a| a.get_followers(conn)).collect::>>(); let to = followers.into_iter().fold(vec![], |mut acc, f| { for x in f { @@ -299,7 +302,7 @@ impl Post { to } - pub fn into_activity(&self, conn: &PgConnection) -> Article { + pub fn into_activity(&self, conn: &Connection) -> Article { let mut to = self.get_receivers_urls(conn); to.push(PUBLIC_VISIBILTY.to_string()); @@ -328,7 +331,7 @@ impl Post { article } - pub fn create_activity(&self, conn: &PgConnection) -> Create { + pub fn create_activity(&self, conn: &Connection) -> Create { let article = self.into_activity(conn); let mut act = Create::default(); act.object_props.set_id_string(format!("{}activity", self.ap_url)).expect("Post::create_activity: id error"); @@ -341,7 +344,7 @@ impl Post { act } - pub fn update_activity(&self, conn: &PgConnection) -> Update { + pub fn update_activity(&self, conn: &Connection) -> Update { let article = self.into_activity(conn); let mut act = Update::default(); act.object_props.set_id_string(format!("{}/update-{}", self.ap_url, Utc::now().timestamp())).expect("Post::update_activity: id error"); @@ -354,7 +357,7 @@ impl Post { act } - pub fn handle_update(conn: &PgConnection, updated: Article) { + pub fn handle_update(conn: &Connection, updated: Article) { let id = updated.object_props.id_string().expect("Post::handle_update: id error"); let mut post = Post::find_by_ap_url(conn, id).unwrap(); @@ -382,7 +385,7 @@ impl Post { post.update(conn); } - pub fn to_json(&self, conn: &PgConnection) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection) -> serde_json::Value { let blog = self.get_blog(conn); json!({ "post": self, @@ -394,13 +397,13 @@ impl Post { }) } - pub fn compute_id(&self, conn: &PgConnection) -> String { + pub fn compute_id(&self, conn: &Connection) -> String { ap_url(format!("{}/~/{}/{}/", BASE_URL.as_str(), self.get_blog(conn).get_fqn(conn), self.slug)) } } -impl FromActivity for Post { - fn from_activity(conn: &PgConnection, article: Article, _actor: Id) -> Post { +impl FromActivity for Post { + fn from_activity(conn: &Connection, article: Article, _actor: Id) -> Post { if let Some(post) = Post::find_by_ap_url(conn, article.object_props.id_string().unwrap_or(String::new())) { post } else { @@ -457,8 +460,8 @@ impl FromActivity for Post { } } -impl Deletable for Post { - fn delete(&self, conn: &PgConnection) -> Delete { +impl Deletable for Post { + fn delete(&self, conn: &Connection) -> Delete { let mut act = Delete::default(); act.delete_props.set_actor_link(self.get_authors(conn)[0].clone().into_id()).expect("Post::delete: actor error"); @@ -473,7 +476,7 @@ impl Deletable for Post { act } - fn delete_id(id: String, conn: &PgConnection) { + fn delete_id(id: String, conn: &Connection) { Post::find_by_ap_url(conn, id).map(|p| p.delete(conn)); } } diff --git a/plume-models/src/reshares.rs b/plume-models/src/reshares.rs index 17012637..db04bcc9 100644 --- a/plume-models/src/reshares.rs +++ b/plume-models/src/reshares.rs @@ -1,14 +1,15 @@ use activitypub::activity::{Announce, Undo}; use chrono::NaiveDateTime; -use diesel::{self, PgConnection, QueryDsl, RunQueryDsl, ExpressionMethods}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods}; use plume_common::activity_pub::{Id, IntoId, inbox::{FromActivity, Notify, Deletable}, PUBLIC_VISIBILTY}; +use Connection; use notifications::*; use posts::Post; use users::User; use schema::reshares; -#[derive(Serialize, Deserialize, Queryable, Identifiable)] +#[derive(Clone, Serialize, Deserialize, Queryable, Identifiable)] pub struct Reshare { pub id: i32, pub user_id: i32, @@ -31,7 +32,7 @@ impl Reshare { find_by!(reshares, find_by_ap_url, ap_url as String); find_by!(reshares, find_by_user_on_post, user_id as i32, post_id as i32); - pub fn update_ap_url(&self, conn: &PgConnection) { + pub fn update_ap_url(&self, conn: &Connection) { if self.ap_url.len() == 0 { diesel::update(self) .set(reshares::ap_url.eq(format!( @@ -39,11 +40,11 @@ impl Reshare { User::get(conn, self.user_id).unwrap().ap_url, Post::get(conn, self.post_id).unwrap().ap_url ))) - .get_result::(conn).expect("Couldn't update AP URL"); + .execute(conn).expect("Couldn't update AP URL"); } } - pub fn get_recents_for_author(conn: &PgConnection, user: &User, limit: i64) -> Vec { + pub fn get_recents_for_author(conn: &Connection, user: &User, limit: i64) -> Vec { reshares::table.filter(reshares::user_id.eq(user.id)) .order(reshares::creation_date.desc()) .limit(limit) @@ -51,15 +52,15 @@ impl Reshare { .expect("Error loading recent reshares for user") } - pub fn get_post(&self, conn: &PgConnection) -> Option { + pub fn get_post(&self, conn: &Connection) -> Option { Post::get(conn, self.post_id) } - pub fn get_user(&self, conn: &PgConnection) -> Option { + pub fn get_user(&self, conn: &Connection) -> Option { User::get(conn, self.user_id) } - pub fn into_activity(&self, conn: &PgConnection) -> Announce { + pub fn into_activity(&self, conn: &Connection) -> Announce { let mut act = Announce::default(); act.announce_props.set_actor_link(User::get(conn, self.user_id).unwrap().into_id()).unwrap(); act.announce_props.set_object_link(Post::get(conn, self.post_id).unwrap().into_id()).unwrap(); @@ -71,8 +72,8 @@ impl Reshare { } } -impl FromActivity for Reshare { - fn from_activity(conn: &PgConnection, announce: Announce, _actor: Id) -> Reshare { +impl FromActivity for Reshare { + fn from_activity(conn: &Connection, announce: Announce, _actor: Id) -> Reshare { let user = User::from_url(conn, announce.announce_props.actor_link::().expect("Reshare::from_activity: actor error").into()); let post = Post::find_by_ap_url(conn, announce.announce_props.object_link::().expect("Reshare::from_activity: object error").into()); let reshare = Reshare::insert(conn, NewReshare { @@ -85,8 +86,8 @@ impl FromActivity for Reshare { } } -impl Notify for Reshare { - fn notify(&self, conn: &PgConnection) { +impl Notify for Reshare { + fn notify(&self, conn: &Connection) { let post = self.get_post(conn).unwrap(); for author in post.get_authors(conn) { Notification::insert(conn, NewNotification { @@ -98,8 +99,8 @@ impl Notify for Reshare { } } -impl Deletable for Reshare { - fn delete(&self, conn: &PgConnection) -> Undo { +impl Deletable for Reshare { + fn delete(&self, conn: &Connection) -> Undo { diesel::delete(self).execute(conn).unwrap(); // delete associated notification if any @@ -117,7 +118,7 @@ impl Deletable for Reshare { act } - fn delete_id(id: String, conn: &PgConnection) { + fn delete_id(id: String, conn: &Connection) { if let Some(reshare) = Reshare::find_by_ap_url(conn, id) { reshare.delete(conn); } diff --git a/plume-models/src/safe_string.rs b/plume-models/src/safe_string.rs index 767e4bc6..ce813d45 100644 --- a/plume-models/src/safe_string.rs +++ b/plume-models/src/safe_string.rs @@ -92,6 +92,13 @@ impl Queryable for SafeString { } } +impl Queryable for SafeString { + type Row = String; + fn build(value: Self::Row) -> Self { + SafeString::new(&value) + } +} + impl ToSql for SafeString where DB: diesel::backend::Backend, diff --git a/plume-models/src/tags.rs b/plume-models/src/tags.rs index b8ac6147..d906601f 100644 --- a/plume-models/src/tags.rs +++ b/plume-models/src/tags.rs @@ -1,7 +1,7 @@ -use diesel::{self, PgConnection, ExpressionMethods, RunQueryDsl, QueryDsl}; +use diesel::{self, ExpressionMethods, RunQueryDsl, QueryDsl}; use plume_common::activity_pub::Hashtag; -use ap_url; +use {ap_url, Connection}; use instance::Instance; use schema::tags; @@ -27,14 +27,14 @@ impl Tag { find_by!(tags, find_by_name, tag as String); list_by!(tags, for_post, post_id as i32); - pub fn into_activity(&self, conn: &PgConnection) -> Hashtag { + pub fn into_activity(&self, conn: &Connection) -> Hashtag { let mut ht = Hashtag::default(); ht.set_href_string(ap_url(format!("{}/tag/{}", Instance::get_local(conn).unwrap().public_domain, self.tag))).expect("Tag::into_activity: href error"); ht.set_name_string(self.tag.clone()).expect("Tag::into_activity: name error"); ht } - pub fn from_activity(conn: &PgConnection, tag: Hashtag, post: i32) -> Tag { + pub fn from_activity(conn: &Connection, tag: Hashtag, post: i32) -> Tag { Tag::insert(conn, NewTag { tag: tag.name_string().expect("Tag::from_activity: name error"), is_hastag: false, diff --git a/plume-models/src/users.rs b/plume-models/src/users.rs index d8addb19..e8fa6729 100644 --- a/plume-models/src/users.rs +++ b/plume-models/src/users.rs @@ -5,8 +5,8 @@ use activitypub::{ object::Image, }; use bcrypt; -use chrono::{NaiveDateTime, Utc}; -use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, BelongingToDsl, PgConnection, dsl::any}; +use chrono::{Utc, NaiveDateTime}; +use diesel::{self, QueryDsl, RunQueryDsl, ExpressionMethods, BelongingToDsl}; use openssl::{ hash::MessageDigest, pkey::{PKey, Private}, @@ -31,7 +31,7 @@ use serde_json; use url::Url; use webfinger::*; -use {BASE_URL, USE_HTTPS, ap_url}; +use {BASE_URL, USE_HTTPS, ap_url, Connection}; use db_conn::DbConn; use blogs::Blog; use blog_authors::BlogAuthor; @@ -100,50 +100,50 @@ impl User { find_by!(users, find_by_name, username as String, instance_id as i32); find_by!(users, find_by_ap_url, ap_url as String); - pub fn one_by_instance(conn: &PgConnection) -> Vec { - users::table.distinct_on(users::instance_id) - .get_results::(conn) + pub fn one_by_instance(conn: &Connection) -> Vec { + users::table.filter(users::instance_id.eq_any(users::table.select(users::instance_id).distinct())) + .load::(conn) .expect("Error in User::on_by_instance") } - pub fn delete(&self, conn: &PgConnection) { + pub fn delete(&self, conn: &Connection) { diesel::delete(self).execute(conn).expect("Couldn't remove user from DB"); } - pub fn get_instance(&self, conn: &PgConnection) -> Instance { + pub fn get_instance(&self, conn: &Connection) -> Instance { Instance::get(conn, self.instance_id).expect("Couldn't find instance") } - pub fn grant_admin_rights(&self, conn: &PgConnection) { + pub fn grant_admin_rights(&self, conn: &Connection) { diesel::update(self) .set(users::is_admin.eq(true)) - .load::(conn) + .execute(conn) .expect("Couldn't grant admin rights"); } - pub fn update(&self, conn: &PgConnection, name: String, email: String, summary: String) -> User { + pub fn update(&self, conn: &Connection, name: String, email: String, summary: String) -> User { diesel::update(self) .set(( users::display_name.eq(name), users::email.eq(email), users::summary.eq(summary), - )).load::(conn) - .expect("Couldn't update user") - .into_iter().nth(0).unwrap() + )).execute(conn) + .expect("Couldn't update user"); + User::get(conn, self.id).unwrap() } - pub fn count_local(conn: &PgConnection) -> usize { + pub fn count_local(conn: &Connection) -> usize { users::table.filter(users::instance_id.eq(Instance::local_id(conn))) .load::(conn) .expect("Couldn't load local users") .len() } - pub fn find_local(conn: &PgConnection, username: String) -> Option { + pub fn find_local(conn: &Connection, username: String) -> Option { User::find_by_name(conn, username, Instance::local_id(conn)) } - pub fn find_by_fqn(conn: &PgConnection, fqn: String) -> Option { + pub fn find_by_fqn(conn: &Connection, fqn: String) -> Option { if fqn.contains("@") { // remote user match Instance::find_by_domain(conn, String::from(fqn.split("@").last().unwrap())) { Some(instance) => { @@ -159,7 +159,7 @@ impl User { } } - fn fetch_from_webfinger(conn: &PgConnection, acct: String) -> Option { + fn fetch_from_webfinger(conn: &Connection, acct: String) -> Option { match resolve(acct.clone(), *USE_HTTPS) { Ok(wf) => wf.links.into_iter().find(|l| l.mime_type == Some(String::from("application/activity+json"))).and_then(|l| User::fetch_from_url(conn, l.href.expect("No href for AP WF link"))), Err(details) => { @@ -192,11 +192,11 @@ impl User { } } - pub fn fetch_from_url(conn: &PgConnection, url: String) -> Option { + pub fn fetch_from_url(conn: &Connection, url: String) -> Option { User::fetch(url.clone()).map(|json| (User::from_activity(conn, json, Url::parse(url.as_ref()).unwrap().host_str().unwrap().to_string()))) } - fn from_activity(conn: &PgConnection, acct: CustomPerson, inst: String) -> User { + fn from_activity(conn: &Connection, acct: CustomPerson, inst: String) -> User { let instance = match Instance::find_by_domain(conn, inst.clone()) { Some(instance) => instance, None => { @@ -242,7 +242,7 @@ impl User { user } - pub fn refetch(&self, conn: &PgConnection) { + pub fn refetch(&self, conn: &Connection) { User::fetch(self.ap_url.clone()).map(|json| { let avatar = Media::save_remote(conn, json.object.object_props.icon_image().expect("User::refetch: icon error") .object_props.url_string().expect("User::refetch: icon.url error")); @@ -274,40 +274,40 @@ impl User { } } - pub fn update_boxes(&self, conn: &PgConnection) { + pub fn update_boxes(&self, conn: &Connection) { let instance = self.get_instance(conn); if self.outbox_url.len() == 0 { diesel::update(self) .set(users::outbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "outbox"))) - .get_result::(conn).expect("Couldn't update outbox URL"); + .execute(conn).expect("Couldn't update outbox URL"); } if self.inbox_url.len() == 0 { diesel::update(self) .set(users::inbox_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "inbox"))) - .get_result::(conn).expect("Couldn't update inbox URL"); + .execute(conn).expect("Couldn't update inbox URL"); } if self.ap_url.len() == 0 { diesel::update(self) .set(users::ap_url.eq(instance.compute_box(USER_PREFIX, self.username.clone(), ""))) - .get_result::(conn).expect("Couldn't update AP URL"); + .execute(conn).expect("Couldn't update AP URL"); } if self.shared_inbox_url.is_none() { diesel::update(self) .set(users::shared_inbox_url.eq(ap_url(format!("{}/inbox", Instance::get_local(conn).unwrap().public_domain)))) - .get_result::(conn).expect("Couldn't update shared inbox URL"); + .execute(conn).expect("Couldn't update shared inbox URL"); } if self.followers_endpoint.len() == 0 { diesel::update(self) .set(users::followers_endpoint.eq(instance.compute_box(USER_PREFIX, self.username.clone(), "followers"))) - .get_result::(conn).expect("Couldn't update followers endpoint"); + .execute(conn).expect("Couldn't update followers endpoint"); } } - pub fn get_local_page(conn: &PgConnection, (min, max): (i32, i32)) -> Vec { + pub fn get_local_page(conn: &Connection, (min, max): (i32, i32)) -> Vec { users::table.filter(users::instance_id.eq(1)) .order(users::username.asc()) .offset(min.into()) @@ -316,7 +316,7 @@ impl User { .expect("Error getting local users page") } - pub fn outbox(&self, conn: &PgConnection) -> ActivityStream { + pub fn outbox(&self, conn: &Connection) -> ActivityStream { let acts = self.get_activities(conn); let n_acts = acts.len(); let mut coll = OrderedCollection::default(); @@ -369,20 +369,20 @@ impl User { } } - fn get_activities(&self, conn: &PgConnection) -> Vec { + fn get_activities(&self, conn: &Connection) -> Vec { use schema::posts; use schema::post_authors; let posts_by_self = PostAuthor::belonging_to(self).select(post_authors::post_id); let posts = posts::table .filter(posts::published.eq(true)) - .filter(posts::id.eq(any(posts_by_self))) + .filter(posts::id.eq_any(posts_by_self)) .load::(conn).unwrap(); posts.into_iter().map(|p| { serde_json::to_value(p.create_activity(conn)).unwrap() }).collect::>() } - pub fn get_fqn(&self, conn: &PgConnection) -> String { + pub fn get_fqn(&self, conn: &Connection) -> String { if self.instance_id == Instance::local_id(conn) { self.username.clone() } else { @@ -390,28 +390,28 @@ impl User { } } - pub fn get_followers(&self, conn: &PgConnection) -> Vec { + pub fn get_followers(&self, conn: &Connection) -> Vec { use schema::follows; let follows = Follow::belonging_to(self).select(follows::follower_id); - users::table.filter(users::id.eq(any(follows))).load::(conn).unwrap() + users::table.filter(users::id.eq_any(follows)).load::(conn).unwrap() } - pub fn get_followers_page(&self, conn: &PgConnection, (min, max): (i32, i32)) -> Vec { + pub fn get_followers_page(&self, conn: &Connection, (min, max): (i32, i32)) -> Vec { use schema::follows; let follows = Follow::belonging_to(self).select(follows::follower_id); - users::table.filter(users::id.eq(any(follows))) + users::table.filter(users::id.eq_any(follows)) .offset(min.into()) .limit((max - min).into()) .load::(conn).unwrap() } - pub fn get_following(&self, conn: &PgConnection) -> Vec { - use schema::follows; - let follows = follows::table.filter(follows::follower_id.eq(self.id)).select(follows::following_id); - users::table.filter(users::id.eq(any(follows))).load::(conn).unwrap() + pub fn get_following(&self, conn: &Connection) -> Vec { + use schema::follows::dsl::*; + let f = follows.filter(follower_id.eq(self.id)).select(following_id); + users::table.filter(users::id.eq_any(f)).load::(conn).unwrap() } - pub fn is_followed_by(&self, conn: &PgConnection, other_id: i32) -> bool { + pub fn is_followed_by(&self, conn: &Connection, other_id: i32) -> bool { use schema::follows; follows::table .filter(follows::follower_id.eq(other_id)) @@ -421,7 +421,7 @@ impl User { .len() > 0 } - pub fn is_following(&self, conn: &PgConnection, other_id: i32) -> bool { + pub fn is_following(&self, conn: &Connection, other_id: i32) -> bool { use schema::follows; follows::table .filter(follows::follower_id.eq(self.id)) @@ -431,7 +431,7 @@ impl User { .len() > 0 } - pub fn has_liked(&self, conn: &PgConnection, post: &Post) -> bool { + pub fn has_liked(&self, conn: &Connection, post: &Post) -> bool { use schema::likes; likes::table .filter(likes::post_id.eq(post.id)) @@ -441,7 +441,7 @@ impl User { .len() > 0 } - pub fn has_reshared(&self, conn: &PgConnection, post: &Post) -> bool { + pub fn has_reshared(&self, conn: &Connection, post: &Post) -> bool { use schema::reshares; reshares::table .filter(reshares::post_id.eq(post.id)) @@ -451,7 +451,7 @@ impl User { .len() > 0 } - pub fn is_author_in(&self, conn: &PgConnection, blog: Blog) -> bool { + pub fn is_author_in(&self, conn: &Connection, blog: Blog) -> bool { use schema::blog_authors; blog_authors::table.filter(blog_authors::author_id.eq(self.id)) .filter(blog_authors::blog_id.eq(blog.id)) @@ -464,7 +464,7 @@ impl User { PKey::from_rsa(Rsa::private_key_from_pem(self.private_key.clone().unwrap().as_ref()).unwrap()).unwrap() } - pub fn into_activity(&self, conn: &PgConnection) -> CustomPerson { + pub fn into_activity(&self, conn: &Connection) -> CustomPerson { let mut actor = Person::default(); actor.object_props.set_id_string(self.ap_url.clone()).expect("User::into_activity: id error"); actor.object_props.set_name_string(self.display_name.clone()).expect("User::into_activity: name error"); @@ -494,7 +494,7 @@ impl User { CustomPerson::new(actor, ap_signature) } - pub fn to_json(&self, conn: &PgConnection) -> serde_json::Value { + pub fn to_json(&self, conn: &Connection) -> serde_json::Value { let mut json = serde_json::to_value(self).unwrap(); json["fqn"] = serde_json::Value::String(self.get_fqn(conn)); json["name"] = if self.display_name.len() > 0 { @@ -506,7 +506,7 @@ impl User { json } - pub fn webfinger(&self, conn: &PgConnection) -> Webfinger { + pub fn webfinger(&self, conn: &Connection) -> Webfinger { Webfinger { subject: format!("acct:{}@{}", self.username, self.get_instance(conn).public_domain), aliases: vec![self.ap_url.clone()], @@ -533,7 +533,7 @@ impl User { } } - pub fn from_url(conn: &PgConnection, url: String) -> Option { + pub fn from_url(conn: &Connection, url: String) -> Option { User::find_by_ap_url(conn, url.clone()).or_else(|| { // The requested user was not in the DB // We try to fetch it if it is remote @@ -545,7 +545,7 @@ impl User { }) } - pub fn set_avatar(&self, conn: &PgConnection, id: i32) { + pub fn set_avatar(&self, conn: &Connection, id: i32) { diesel::update(self) .set(users::avatar_id.eq(id)) .execute(conn) @@ -609,7 +609,7 @@ impl Signer for User { impl NewUser { /// Creates a new local user pub fn new_local( - conn: &PgConnection, + conn: &Connection, username: String, display_name: String, is_admin: bool, diff --git a/src/api/posts.rs b/src/api/posts.rs index bfd96dfd..f29c402a 100644 --- a/src/api/posts.rs +++ b/src/api/posts.rs @@ -1,23 +1,25 @@ use canapi::Provider; -use diesel::PgConnection; use rocket::http::uri::Origin; use rocket_contrib::Json; use serde_json; use serde_qs; use plume_api::posts::PostEndpoint; -use plume_models::db_conn::DbConn; -use plume_models::posts::Post; +use plume_models::{ + Connection, + db_conn::DbConn, + posts::Post, +}; #[get("/posts/")] fn get(id: i32, conn: DbConn) -> Json { - let post = >::get(&*conn, id).ok(); + let post = >::get(&*conn, id).ok(); Json(json!(post)) } #[get("/posts")] fn list(conn: DbConn, uri: &Origin) -> Json { let query: PostEndpoint = serde_qs::from_str(uri.query().unwrap_or("")).expect("Invalid query string"); - let post = >::list(&*conn, query); + let post = >::list(&*conn, query); Json(json!(post)) } diff --git a/src/inbox.rs b/src/inbox.rs index 19e4efe3..d70fed5d 100644 --- a/src/inbox.rs +++ b/src/inbox.rs @@ -1,10 +1,10 @@ use activitypub::{activity::{Announce, Create, Delete, Like, Undo, Update}, object::Tombstone}; -use diesel::PgConnection; use failure::Error; use serde_json; use plume_common::activity_pub::{Id, inbox::{Deletable, FromActivity, InboxError}}; use plume_models::{ + Connection, comments::Comment, follows::Follow, instance::Instance, @@ -15,7 +15,7 @@ use plume_models::{ }; pub trait Inbox { - fn received(&self, conn: &PgConnection, act: serde_json::Value) -> Result<(), Error> { + fn received(&self, conn: &Connection, act: serde_json::Value) -> Result<(), Error> { let actor_id = Id::new(act["actor"].as_str().unwrap_or_else(|| act["actor"]["id"].as_str().expect("No actor ID for incoming activity"))); match act["type"].as_str() { Some(t) => { diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 25abb2a6..dfd51e22 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -1,5 +1,4 @@ use atom_syndication::{ContentBuilder, Entry, EntryBuilder, LinkBuilder, Person, PersonBuilder}; -use diesel::PgConnection; use rocket::{ http::uri::{FromUriParam, UriDisplay}, response::NamedFile @@ -9,7 +8,7 @@ use std::{ path::{Path, PathBuf} }; -use plume_models::posts::Post; +use plume_models::{Connection, posts::Post}; macro_rules! may_fail { ($account:expr, $expr:expr, $template:expr, $msg:expr, | $res:ident | $block:block) => { @@ -79,7 +78,7 @@ impl Page { } } -pub fn post_to_atom(post: Post, conn: &PgConnection) -> Entry { +pub fn post_to_atom(post: Post, conn: &Connection) -> Entry { EntryBuilder::default() .title(post.title.clone()) .content(ContentBuilder::default() diff --git a/src/setup.rs b/src/setup.rs index 83c86c5c..16736d1e 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -1,5 +1,5 @@ use colored::Colorize; -use diesel::{pg::PgConnection, r2d2::{ConnectionManager, Pool}}; +use diesel::r2d2::{ConnectionManager, Pool}; use dotenv::dotenv; use std::fs::{self, File}; use std::io; @@ -9,21 +9,22 @@ use rpassword; use plume_models::safe_string::SafeString; use plume_models::{ + Connection, DB_URL, - db_conn::{DbConn, PgPool}, + db_conn::{DbConn, DbPool}, instance::*, users::* }; /// Initializes a database pool. -fn init_pool() -> Option { +fn init_pool() -> Option { dotenv().ok(); - let manager = ConnectionManager::::new(DB_URL.as_str()); + let manager = ConnectionManager::::new(DB_URL.as_str()); Pool::new(manager).ok() } -pub fn check() -> PgPool { +pub fn check() -> DbPool { if let Some(pool) = init_pool() { match pool.get() { Ok(conn) => {