Merge branch 'main' into timeline-cli

This commit is contained in:
trinity-1686a 2023-02-26 16:48:40 +01:00
commit 7e4d081027
58 changed files with 1444 additions and 1089 deletions

View File

@ -10,7 +10,7 @@ executors:
type: boolean type: boolean
default: false default: false
docker: docker:
- image: plumeorg/plume-buildenv:v0.7.0 - image: plumeorg/plume-buildenv:v0.8.0
- image: <<#parameters.postgres>>cimg/postgres:14.2<</parameters.postgres>><<^parameters.postgres>>alpine:latest<</parameters.postgres>> - image: <<#parameters.postgres>>cimg/postgres:14.2<</parameters.postgres>><<^parameters.postgres>>alpine:latest<</parameters.postgres>>
environment: environment:
POSTGRES_USER: postgres POSTGRES_USER: postgres
@ -63,7 +63,7 @@ commands:
type: boolean type: boolean
default: false default: false
steps: steps:
- run: rustup component add clippy --toolchain nightly-2022-01-27-x86_64-unknown-linux-gnu - run: rustup component add clippy --toolchain nightly-2022-07-19-x86_64-unknown-linux-gnu
- run: cargo clippy <<^parameters.no_feature>>--no-default-features --features="${FEATURES}"<</parameters.no_feature>> --release -p <<parameters.package>> -- -D warnings - run: cargo clippy <<^parameters.no_feature>>--no-default-features --features="${FEATURES}"<</parameters.no_feature>> --release -p <<parameters.package>> -- -D warnings
run_with_coverage: run_with_coverage:
@ -112,7 +112,7 @@ jobs:
name: default name: default
steps: steps:
- restore_env - restore_env
- run: rustup component add rustfmt --toolchain nightly-2022-01-27-x86_64-unknown-linux-gnu - run: rustup component add rustfmt --toolchain nightly-2022-07-19-x86_64-unknown-linux-gnu
- run: cargo fmt --all -- --check - run: cargo fmt --all -- --check
clippy: clippy:

View File

@ -1,4 +1,4 @@
FROM rust:1-buster FROM rust:1
ENV PATH="/root/.cargo/bin:${PATH}" ENV PATH="/root/.cargo/bin:${PATH}"
#install native/circleci/build dependancies #install native/circleci/build dependancies
@ -14,6 +14,7 @@ RUN apt update &&\
#stick rust environment #stick rust environment
COPY rust-toolchain ./ COPY rust-toolchain ./
RUN rustup component add rustfmt clippy
#compile some deps #compile some deps
RUN cargo install wasm-pack &&\ RUN cargo install wasm-pack &&\

View File

@ -1 +1 @@
nightly-2022-01-27 nightly-2022-07-19

View File

@ -3,3 +3,5 @@ data
Dockerfile Dockerfile
docker-compose.yml docker-compose.yml
.env .env
target
data

View File

@ -11,20 +11,20 @@ jobs:
steps: steps:
- -
name: Set up QEMU name: Set up QEMU
uses: docker/setup-qemu-action@v1 uses: docker/setup-qemu-action@v2
- -
name: Set up Docker Buildx name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v2
- -
name: Login to DockerHub name: Login to DockerHub
uses: docker/login-action@v1 uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- -
name: Build and push name: Build and push
id: docker_build id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v3
with: with:
push: true push: true
tags: plumeorg/plume:latest tags: plumeorg/plume:latest

View File

@ -11,10 +11,10 @@ jobs:
steps: steps:
- -
name: Set up QEMU name: Set up QEMU
uses: docker/setup-qemu-action@v1 uses: docker/setup-qemu-action@v2
- -
name: Set up Docker Buildx name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v2
- -
name: Docker meta name: Docker meta
id: meta id: meta
@ -23,14 +23,14 @@ jobs:
images: plumeorg/plume images: plumeorg/plume
- -
name: Login to DockerHub name: Login to DockerHub
uses: docker/login-action@v1 uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- -
name: Build and push name: Build and push
id: docker_build id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v3
with: with:
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}

View File

@ -4,6 +4,25 @@
## [Unreleased] - ReleaseDate ## [Unreleased] - ReleaseDate
### Added
- Add 'My feed' to i18n timeline name (#1084)
- Bidirectional support for user page header (#1092)
### Changed
- Use blog title as slug (#1094, #1126, #1127)
- Bump Rust to nightly 2022-07-19 (#1119)
### Fixed
- Malfunction while creating a blog post in Persian (#1116)
- Email block list is ignored when email sign-up (#1122)
- Bug that some Activity Sytreams properties are not parsed properly (#1129)
- Allow empty avatar for remote users (#1129)
- Percent encode blog FQN for federation interoperability (#1129)
- The same to `preferredUsername` (#1129)
## [[0.7.2]] - 2022-05-11 ## [[0.7.2]] - 2022-05-11
### Added ### Added

1512
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,7 @@ repository = "https://github.com/Plume-org/Plume"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
atom_syndication = "0.11.0" atom_syndication = "0.12.0"
clap = "2.33" clap = "2.33"
dotenv = "0.15.0" dotenv = "0.15.0"
gettext = "0.4.0" gettext = "0.4.0"
@ -19,7 +19,7 @@ rocket = "0.4.11"
rocket_contrib = { version = "0.4.11", features = ["json"] } rocket_contrib = { version = "0.4.11", features = ["json"] }
rocket_i18n = "0.4.1" rocket_i18n = "0.4.1"
scheduled-thread-pool = "0.2.6" scheduled-thread-pool = "0.2.6"
serde = "1.0" serde = "1.0.137"
serde_json = "1.0.81" serde_json = "1.0.81"
shrinkwraprs = "0.3.0" shrinkwraprs = "0.3.0"
validator = { version = "0.15", features = ["derive"] } validator = { version = "0.15", features = ["derive"] }
@ -27,7 +27,7 @@ webfinger = "0.4.1"
tracing = "0.1.35" tracing = "0.1.35"
tracing-subscriber = "0.3.10" tracing-subscriber = "0.3.10"
riker = "0.4.2" riker = "0.4.2"
activitystreams = "0.7.0-alpha.18" activitystreams = "=0.7.0-alpha.20"
[[bin]] [[bin]]
name = "plume" name = "plume"
@ -60,12 +60,12 @@ path = "plume-common"
path = "plume-models" path = "plume-models"
[dependencies.rocket_csrf] [dependencies.rocket_csrf]
git = "https://github.com/fdb-hiroshima/rocket_csrf" git = "https://git.joinplu.me/plume/rocket_csrf"
rev = "29910f2829e7e590a540da3804336577b48c7b31" rev = "0.1.2"
[build-dependencies] [build-dependencies]
ructe = "0.14.0" ructe = "0.15.0"
rsass = "0.25" rsass = "0.26"
[features] [features]
default = ["postgres"] default = ["postgres"]

View File

@ -1,4 +1,4 @@
FROM rust:1-buster as builder FROM rust:1 as builder
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \ ca-certificates \
@ -18,17 +18,15 @@ COPY script/wasm-deps.sh .
RUN chmod a+x ./wasm-deps.sh && sleep 1 && ./wasm-deps.sh RUN chmod a+x ./wasm-deps.sh && sleep 1 && ./wasm-deps.sh
WORKDIR /app WORKDIR /app
COPY Cargo.toml Cargo.lock rust-toolchain ./
RUN cargo install wasm-pack
COPY . . COPY . .
RUN cargo install wasm-pack
RUN chmod a+x ./script/plume-front.sh && sleep 1 && ./script/plume-front.sh RUN chmod a+x ./script/plume-front.sh && sleep 1 && ./script/plume-front.sh
RUN cargo install --path ./ --force --no-default-features --features postgres RUN cargo install --path ./ --force --no-default-features --features postgres
RUN cargo install --path plume-cli --force --no-default-features --features postgres RUN cargo install --path plume-cli --force --no-default-features --features postgres
RUN cargo clean RUN cargo clean
FROM debian:buster-slim FROM debian:stable-slim
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \ ca-certificates \

View File

@ -5,5 +5,5 @@ authors = ["Plume contributors"]
edition = "2018" edition = "2018"
[dependencies] [dependencies]
serde = "1.0" serde = "1.0.137"
serde_derive = "1.0" serde_derive = "1.0"

View File

@ -11,7 +11,7 @@ hex = "0.4"
openssl = "0.10.40" openssl = "0.10.40"
rocket = "0.4.11" rocket = "0.4.11"
reqwest = { version = "0.11.11", features = ["blocking", "json", "socks"] } reqwest = { version = "0.11.11", features = ["blocking", "json", "socks"] }
serde = "1.0" serde = "1.0.137"
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0.81" serde_json = "1.0.81"
shrinkwraprs = "0.3.0" shrinkwraprs = "0.3.0"
@ -19,12 +19,12 @@ syntect = "4.5.0"
regex-syntax = { version = "0.6.26", default-features = false, features = ["unicode-perl"] } regex-syntax = { version = "0.6.26", default-features = false, features = ["unicode-perl"] }
tracing = "0.1.35" tracing = "0.1.35"
askama_escape = "0.10.3" askama_escape = "0.10.3"
activitystreams = "0.7.0-alpha.18" activitystreams = "=0.7.0-alpha.20"
activitystreams-ext = "0.1.0-alpha.2" activitystreams-ext = "0.1.0-alpha.2"
url = "2.2.2" url = "2.2.2"
flume = "0.10.13" flume = "0.10.13"
tokio = { version = "1.19.2", features = ["full"] } tokio = { version = "1.19.2", features = ["full"] }
futures = "0.3.21" futures = "0.3.25"
[dependencies.chrono] [dependencies.chrono]
features = ["serde"] features = ["serde"]

View File

@ -561,7 +561,7 @@ mod tests {
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use openssl::{hash::MessageDigest, pkey::PKey, rsa::Rsa}; use openssl::{hash::MessageDigest, pkey::PKey, rsa::Rsa};
static MY_SIGNER: Lazy<MySigner> = Lazy::new(|| MySigner::new()); static MY_SIGNER: Lazy<MySigner> = Lazy::new(MySigner::new);
struct MySigner { struct MySigner {
public_key: String, public_key: String,
@ -596,7 +596,7 @@ mod tests {
.unwrap(); .unwrap();
let mut verifier = openssl::sign::Verifier::new(MessageDigest::sha256(), &key).unwrap(); let mut verifier = openssl::sign::Verifier::new(MessageDigest::sha256(), &key).unwrap();
verifier.update(data.as_bytes()).unwrap(); verifier.update(data.as_bytes()).unwrap();
verifier.verify(&signature).map_err(|_| SignError()) verifier.verify(signature).map_err(|_| SignError())
} }
} }
@ -782,7 +782,7 @@ mod tests {
.done(); .done();
assert!(res.is_err()); assert!(res.is_err());
let res: Result<(), ()> = Inbox::handle(&(), act.clone()) let res: Result<(), ()> = Inbox::handle(&(), act)
.with::<FailingActor, Create, MyObject>(None) .with::<FailingActor, Create, MyObject>(None)
.with::<MyActor, Create, MyObject>(None) .with::<MyActor, Create, MyObject>(None)
.done(); .done();

View File

@ -518,7 +518,8 @@ mod tests {
use super::*; use super::*;
use activitystreams::{ use activitystreams::{
activity::{ActorAndObjectRef, Create}, activity::{ActorAndObjectRef, Create},
object::kind::ArticleType, object::{kind::ArticleType, Image},
prelude::{ApActorExt, BaseExt, ExtendsExt, ObjectExt},
}; };
use assert_json_diff::assert_json_eq; use assert_json_diff::assert_json_eq;
use serde_json::{from_str, json, to_value}; use serde_json::{from_str, json, to_value};
@ -592,7 +593,7 @@ mod tests {
} }
#[test] #[test]
fn de_custom_group() { fn se_custom_group() {
let group = CustomGroup::new( let group = CustomGroup::new(
ApActor::new("https://example.com/inbox".parse().unwrap(), Group::new()), ApActor::new("https://example.com/inbox".parse().unwrap(), Group::new()),
ApSignature { ApSignature {
@ -625,6 +626,71 @@ mod tests {
assert_eq!(to_value(group).unwrap(), expected); assert_eq!(to_value(group).unwrap(), expected);
} }
#[test]
fn de_custom_group() {
let value: CustomGroup = from_str(
r#"
{
"icon": {
"type": "Image"
},
"id": "https://plume01.localhost/~/Plume01%20Blog%202/",
"image": {
"type": "Image"
},
"inbox": "https://plume01.localhost/~/Plume01%20Blog%202/inbox",
"name": "Plume01 Blog 2",
"outbox": "https://plume01.localhost/~/Plume01%20Blog%202/outbox",
"preferredUsername": "Plume01 Blog 2",
"publicKey": {
"id": "https://plume01.localhost/~/Plume01%20Blog%202/#main-key",
"owner": "https://plume01.localhost/~/Plume01%20Blog%202/",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwPGtKkl/iMsNAyeVaJGz\noEz5PoNkjRnKK7G97MFvb4zw9zs5SpzWW7b/pKHa4dODcGDJXmkCJ1H5JWyguzN8\n2GNoFjtEOJHxEGwBHSYDsTmhuLNB0DKxMU2iu55g8iIiXhZiIW1FBNGs/Geaymvr\nh/TEtzdReN8wzloRR55kOVcU49xBkqx8cfDSk/lrrDLlpveHdqgaFnIvuw2vycK0\nxFzS3xlEUpzJk9kHxoR1uEAfZ+gCv26Sgo/HqOAhqSD5IU3QZC3kdkr/hwVqtr8U\nXGkGG6Mo1rgzhkYiCFkWrV2WoKkcEHD4nEzbgoZZ5MyuSoloxnyF3NiScqmqW+Yx\nkQIDAQAB\n-----END PUBLIC KEY-----\n"
},
"source": {
"content": "",
"mediaType": "text/markdown"
},
"summary": "",
"type": "Group"
}
"#
).unwrap();
let mut expected = CustomGroup::new(
ApActor::new("https://plume01.localhost/~/Plume01%20Blog%202/inbox".parse().unwrap(), Group::new()),
ApSignature {
public_key: PublicKey {
id: "https://plume01.localhost/~/Plume01%20Blog%202/#main-key".parse().unwrap(),
owner: "https://plume01.localhost/~/Plume01%20Blog%202/".parse().unwrap(),
public_key_pem: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwPGtKkl/iMsNAyeVaJGz\noEz5PoNkjRnKK7G97MFvb4zw9zs5SpzWW7b/pKHa4dODcGDJXmkCJ1H5JWyguzN8\n2GNoFjtEOJHxEGwBHSYDsTmhuLNB0DKxMU2iu55g8iIiXhZiIW1FBNGs/Geaymvr\nh/TEtzdReN8wzloRR55kOVcU49xBkqx8cfDSk/lrrDLlpveHdqgaFnIvuw2vycK0\nxFzS3xlEUpzJk9kHxoR1uEAfZ+gCv26Sgo/HqOAhqSD5IU3QZC3kdkr/hwVqtr8U\nXGkGG6Mo1rgzhkYiCFkWrV2WoKkcEHD4nEzbgoZZ5MyuSoloxnyF3NiScqmqW+Yx\nkQIDAQAB\n-----END PUBLIC KEY-----\n".into(),
}
},
SourceProperty {
source: Source {
content: String::from(""),
media_type: String::from("text/markdown")
}
}
);
expected.set_icon(Image::new().into_any_base().unwrap());
expected.set_id(
"https://plume01.localhost/~/Plume01%20Blog%202/"
.parse()
.unwrap(),
);
expected.set_image(Image::new().into_any_base().unwrap());
expected.set_name("Plume01 Blog 2");
expected.set_outbox(
"https://plume01.localhost/~/Plume01%20Blog%202/outbox"
.parse()
.unwrap(),
);
expected.set_preferred_username("Plume01 Blog 2");
expected.set_summary("");
assert_json_eq!(value, expected);
}
#[test] #[test]
fn se_licensed_article() { fn se_licensed_article() {
let object = ApObject::new(Article::new()); let object = ApObject::new(Article::new());

View File

@ -253,7 +253,7 @@ mod tests {
.unwrap(); .unwrap();
let mut verifier = openssl::sign::Verifier::new(MessageDigest::sha256(), &key).unwrap(); let mut verifier = openssl::sign::Verifier::new(MessageDigest::sha256(), &key).unwrap();
verifier.update(data.as_bytes()).unwrap(); verifier.update(data.as_bytes()).unwrap();
verifier.verify(&signature).map_err(|_| Error()) verifier.verify(signature).map_err(|_| Error())
} }
} }
@ -262,7 +262,7 @@ mod tests {
let signer = MySigner::new(); let signer = MySigner::new();
let headers = HeaderMap::new(); let headers = HeaderMap::new();
let result = signature(&signer, &headers, ("post", "/inbox", None)).unwrap(); let result = signature(&signer, &headers, ("post", "/inbox", None)).unwrap();
let fields: Vec<&str> = result.to_str().unwrap().split(",").collect(); let fields: Vec<&str> = result.to_str().unwrap().split(',').collect();
assert_eq!(r#"headers="(request-target)""#, fields[2]); assert_eq!(r#"headers="(request-target)""#, fields[2]);
let sign = &fields[3][11..(fields[3].len() - 1)]; let sign = &fields[3][11..(fields[3].len() - 1)];
assert!(signer.verify("post /inbox", sign.as_bytes()).is_ok()); assert!(signer.verify("post /inbox", sign.as_bytes()).is_ok());

View File

@ -119,7 +119,7 @@ impl Signable for serde_json::Value {
} }
} }
#[derive(Debug, Copy, Clone, PartialEq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum SignatureValidity { pub enum SignatureValidity {
Invalid, Invalid,
ValidNoDigest, ValidNoDigest,

View File

@ -4,6 +4,9 @@ version = "0.7.2"
authors = ["Plume contributors"] authors = ["Plume contributors"]
edition = "2018" edition = "2018"
[package.metadata.wasm-pack.profile.release]
wasm-opt = false
[lib] [lib]
crate-type = ["cdylib"] crate-type = ["cdylib"]
@ -12,7 +15,7 @@ gettext = "0.4.0"
gettext-macros = "0.6.1" gettext-macros = "0.6.1"
gettext-utils = "0.1.0" gettext-utils = "0.1.0"
lazy_static = "1.3" lazy_static = "1.3"
serde = "1.0" serde = "1.0.137"
serde_json = "1.0" serde_json = "1.0"
wasm-bindgen = "0.2.81" wasm-bindgen = "0.2.81"
js-sys = "0.3.58" js-sys = "0.3.58"

View File

@ -10,31 +10,31 @@ bcrypt = "0.12.1"
guid-create = "0.2" guid-create = "0.2"
itertools = "0.10.3" itertools = "0.10.3"
lazy_static = "1.0" lazy_static = "1.0"
ldap3 = "0.10.5" ldap3 = "0.11.1"
migrations_internals= "1.4.0" migrations_internals= "1.4.0"
openssl = "0.10.40" openssl = "0.10.40"
rocket = "0.4.11" rocket = "0.4.11"
rocket_i18n = "0.4.1" rocket_i18n = "0.4.1"
reqwest = "0.11.11" reqwest = "0.11.11"
scheduled-thread-pool = "0.2.6" scheduled-thread-pool = "0.2.6"
serde = "1.0" serde = "1.0.137"
serde_derive = "1.0" serde_derive = "1.0"
serde_json = "1.0.81" serde_json = "1.0.81"
tantivy = "0.13.3" tantivy = "0.13.3"
url = "2.1" url = "2.1"
walkdir = "2.2" walkdir = "2.2"
webfinger = "0.4.1" webfinger = "0.4.1"
whatlang = "0.16.0" whatlang = "0.16.2"
shrinkwraprs = "0.3.0" shrinkwraprs = "0.3.0"
diesel-derive-newtype = "1.0.0" diesel-derive-newtype = "1.0.0"
glob = "0.3.0" glob = "0.3.1"
lindera-tantivy = { version = "0.7.1", optional = true } lindera-tantivy = { version = "0.7.1", optional = true }
tracing = "0.1.35" tracing = "0.1.35"
riker = "0.4.2" riker = "0.4.2"
once_cell = "1.12.0" once_cell = "1.12.0"
lettre = "0.9.6" lettre = "0.9.6"
native-tls = "0.2.10" native-tls = "0.2.10"
activitystreams = "0.7.0-alpha.18" activitystreams = "=0.7.0-alpha.20"
[dependencies.chrono] [dependencies.chrono]
features = ["serde"] features = ["serde"]

View File

@ -103,7 +103,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for ApiToken {
let conn = request let conn = request
.guard::<DbConn>() .guard::<DbConn>()
.map_failure(|_| (Status::InternalServerError, TokenError::DbError))?; .map_failure(|_| (Status::InternalServerError, TokenError::DbError))?;
if let Ok(token) = ApiToken::find_by_value(&*conn, val) { if let Ok(token) = ApiToken::find_by_value(&conn, val) {
return Outcome::Success(token); return Outcome::Success(token);
} }
} }

View File

@ -126,12 +126,9 @@ pub(crate) mod tests {
.id, .id,
various[1].id various[1].id
); );
assert_eq!( assert!(BlocklistedEmail::matches_blocklist(&conn, no_match)
BlocklistedEmail::matches_blocklist(&conn, no_match)
.unwrap() .unwrap()
.is_none(), .is_none());
true
);
Ok(()) Ok(())
}); });
} }

View File

@ -18,10 +18,13 @@ use openssl::{
rsa::Rsa, rsa::Rsa,
sign::{Signer, Verifier}, sign::{Signer, Verifier},
}; };
use plume_common::activity_pub::{ use plume_common::{
activity_pub::{
inbox::{AsActor, FromId}, inbox::{AsActor, FromId},
sign, ActivityStream, ApSignature, CustomGroup, Id, IntoId, PublicKey, Source, SourceProperty, sign, ActivityStream, ApSignature, CustomGroup, Id, IntoId, PublicKey, Source,
ToAsString, ToAsUri, SourceProperty, ToAsString, ToAsUri,
},
utils::iri_percent_encode_seg,
}; };
use webfinger::*; use webfinger::*;
@ -83,9 +86,13 @@ impl Blog {
if inserted.fqn.is_empty() { if inserted.fqn.is_empty() {
if instance.local { if instance.local {
inserted.fqn = inserted.actor_id.clone(); inserted.fqn = iri_percent_encode_seg(&inserted.actor_id);
} else { } else {
inserted.fqn = format!("{}@{}", inserted.actor_id, instance.public_domain); inserted.fqn = format!(
"{}@{}",
iri_percent_encode_seg(&inserted.actor_id),
instance.public_domain
);
} }
} }
@ -166,7 +173,7 @@ impl Blog {
pub fn to_activity(&self, conn: &Connection) -> Result<CustomGroup> { pub fn to_activity(&self, conn: &Connection) -> Result<CustomGroup> {
let mut blog = ApActor::new(self.inbox_url.parse()?, Group::new()); let mut blog = ApActor::new(self.inbox_url.parse()?, Group::new());
blog.set_preferred_username(self.actor_id.clone()); blog.set_preferred_username(iri_percent_encode_seg(&self.actor_id));
blog.set_name(self.title.clone()); blog.set_name(self.title.clone());
blog.set_outbox(self.outbox_url.parse()?); blog.set_outbox(self.outbox_url.parse()?);
blog.set_summary(self.summary_html.to_string()); blog.set_summary(self.summary_html.to_string());
@ -381,6 +388,7 @@ impl FromId<Connection> for Blog {
.ok_or(Error::MissingApProperty)? .ok_or(Error::MissingApProperty)?
.to_string(); .to_string();
if name.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) { if name.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) {
tracing::error!("preferredUsername includes invalid character(s): {}", &name);
return Err(Error::InvalidValue); return Err(Error::InvalidValue);
} }
( (
@ -660,7 +668,7 @@ pub(crate) mod tests {
.unwrap() .unwrap()
.id, .id,
); );
let _: Blog = blog1.save_changes(&*conn).unwrap(); let _: Blog = blog1.save_changes(conn).unwrap();
(users, vec![blog1, blog2, blog3]) (users, vec![blog1, blog2, blog3])
} }
@ -669,10 +677,10 @@ pub(crate) mod tests {
fn get_instance() { fn get_instance() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
fill_database(&conn); fill_database(conn);
let blog = Blog::insert( let blog = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"SomeName".to_owned(), "SomeName".to_owned(),
"Some name".to_owned(), "Some name".to_owned(),
@ -684,7 +692,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
blog.get_instance(&conn).unwrap().id, blog.get_instance(conn).unwrap().id,
Instance::get_local().unwrap().id Instance::get_local().unwrap().id
); );
// TODO add tests for remote instance // TODO add tests for remote instance
@ -696,10 +704,10 @@ pub(crate) mod tests {
fn authors() { fn authors() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (user, _) = fill_database(&conn); let (user, _) = fill_database(conn);
let b1 = Blog::insert( let b1 = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"SomeName".to_owned(), "SomeName".to_owned(),
"Some name".to_owned(), "Some name".to_owned(),
@ -710,7 +718,7 @@ pub(crate) mod tests {
) )
.unwrap(); .unwrap();
let b2 = Blog::insert( let b2 = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"Blog".to_owned(), "Blog".to_owned(),
"Blog".to_owned(), "Blog".to_owned(),
@ -723,7 +731,7 @@ pub(crate) mod tests {
let blog = vec![b1, b2]; let blog = vec![b1, b2];
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[0].id, blog_id: blog[0].id,
author_id: user[0].id, author_id: user[0].id,
@ -733,7 +741,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[0].id, blog_id: blog[0].id,
author_id: user[1].id, author_id: user[1].id,
@ -743,7 +751,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[1].id, blog_id: blog[1].id,
author_id: user[0].id, author_id: user[0].id,
@ -753,39 +761,39 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
assert!(blog[0] assert!(blog[0]
.list_authors(&conn) .list_authors(conn)
.unwrap() .unwrap()
.iter() .iter()
.any(|a| a.id == user[0].id)); .any(|a| a.id == user[0].id));
assert!(blog[0] assert!(blog[0]
.list_authors(&conn) .list_authors(conn)
.unwrap() .unwrap()
.iter() .iter()
.any(|a| a.id == user[1].id)); .any(|a| a.id == user[1].id));
assert!(blog[1] assert!(blog[1]
.list_authors(&conn) .list_authors(conn)
.unwrap() .unwrap()
.iter() .iter()
.any(|a| a.id == user[0].id)); .any(|a| a.id == user[0].id));
assert!(!blog[1] assert!(!blog[1]
.list_authors(&conn) .list_authors(conn)
.unwrap() .unwrap()
.iter() .iter()
.any(|a| a.id == user[1].id)); .any(|a| a.id == user[1].id));
assert!(Blog::find_for_author(&conn, &user[0]) assert!(Blog::find_for_author(conn, &user[0])
.unwrap() .unwrap()
.iter() .iter()
.any(|b| b.id == blog[0].id)); .any(|b| b.id == blog[0].id));
assert!(Blog::find_for_author(&conn, &user[1]) assert!(Blog::find_for_author(conn, &user[1])
.unwrap() .unwrap()
.iter() .iter()
.any(|b| b.id == blog[0].id)); .any(|b| b.id == blog[0].id));
assert!(Blog::find_for_author(&conn, &user[0]) assert!(Blog::find_for_author(conn, &user[0])
.unwrap() .unwrap()
.iter() .iter()
.any(|b| b.id == blog[1].id)); .any(|b| b.id == blog[1].id));
assert!(!Blog::find_for_author(&conn, &user[1]) assert!(!Blog::find_for_author(conn, &user[1])
.unwrap() .unwrap()
.iter() .iter()
.any(|b| b.id == blog[1].id)); .any(|b| b.id == blog[1].id));
@ -797,10 +805,10 @@ pub(crate) mod tests {
fn find_local() { fn find_local() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
fill_database(&conn); fill_database(conn);
let blog = Blog::insert( let blog = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"SomeName".to_owned(), "SomeName".to_owned(),
"Some name".to_owned(), "Some name".to_owned(),
@ -811,7 +819,7 @@ pub(crate) mod tests {
) )
.unwrap(); .unwrap();
assert_eq!(Blog::find_by_fqn(&conn, "SomeName").unwrap().id, blog.id); assert_eq!(Blog::find_by_fqn(conn, "SomeName").unwrap().id, blog.id);
Ok(()) Ok(())
}) })
} }
@ -820,10 +828,10 @@ pub(crate) mod tests {
fn get_fqn() { fn get_fqn() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
fill_database(&conn); fill_database(conn);
let blog = Blog::insert( let blog = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"SomeName".to_owned(), "SomeName".to_owned(),
"Some name".to_owned(), "Some name".to_owned(),
@ -843,10 +851,10 @@ pub(crate) mod tests {
fn delete() { fn delete() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (_, blogs) = fill_database(&conn); let (_, blogs) = fill_database(conn);
blogs[0].delete(&conn).unwrap(); blogs[0].delete(conn).unwrap();
assert!(Blog::get(&conn, blogs[0].id).is_err()); assert!(Blog::get(conn, blogs[0].id).is_err());
Ok(()) Ok(())
}) })
} }
@ -855,10 +863,10 @@ pub(crate) mod tests {
fn delete_via_user() { fn delete_via_user() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (user, _) = fill_database(&conn); let (user, _) = fill_database(conn);
let b1 = Blog::insert( let b1 = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"SomeName".to_owned(), "SomeName".to_owned(),
"Some name".to_owned(), "Some name".to_owned(),
@ -869,7 +877,7 @@ pub(crate) mod tests {
) )
.unwrap(); .unwrap();
let b2 = Blog::insert( let b2 = Blog::insert(
&conn, conn,
NewBlog::new_local( NewBlog::new_local(
"Blog".to_owned(), "Blog".to_owned(),
"Blog".to_owned(), "Blog".to_owned(),
@ -882,7 +890,7 @@ pub(crate) mod tests {
let blog = vec![b1, b2]; let blog = vec![b1, b2];
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[0].id, blog_id: blog[0].id,
author_id: user[0].id, author_id: user[0].id,
@ -892,7 +900,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[0].id, blog_id: blog[0].id,
author_id: user[1].id, author_id: user[1].id,
@ -902,7 +910,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
BlogAuthor::insert( BlogAuthor::insert(
&conn, conn,
NewBlogAuthor { NewBlogAuthor {
blog_id: blog[1].id, blog_id: blog[1].id,
author_id: user[0].id, author_id: user[0].id,
@ -911,11 +919,11 @@ pub(crate) mod tests {
) )
.unwrap(); .unwrap();
user[0].delete(&conn).unwrap(); user[0].delete(conn).unwrap();
assert!(Blog::get(&conn, blog[0].id).is_ok()); assert!(Blog::get(conn, blog[0].id).is_ok());
assert!(Blog::get(&conn, blog[1].id).is_err()); assert!(Blog::get(conn, blog[1].id).is_err());
user[1].delete(&conn).unwrap(); user[1].delete(conn).unwrap();
assert!(Blog::get(&conn, blog[0].id).is_err()); assert!(Blog::get(conn, blog[0].id).is_err());
Ok(()) Ok(())
}) })
} }
@ -924,10 +932,10 @@ pub(crate) mod tests {
fn self_federation() { fn self_federation() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, mut blogs) = fill_database(&conn); let (users, mut blogs) = fill_database(conn);
blogs[0].icon_id = Some( blogs[0].icon_id = Some(
Media::insert( Media::insert(
&conn, conn,
NewMedia { NewMedia {
file_path: "aaa.png".into(), file_path: "aaa.png".into(),
alt_text: String::new(), alt_text: String::new(),
@ -943,7 +951,7 @@ pub(crate) mod tests {
); );
blogs[0].banner_id = Some( blogs[0].banner_id = Some(
Media::insert( Media::insert(
&conn, conn,
NewMedia { NewMedia {
file_path: "bbb.png".into(), file_path: "bbb.png".into(),
alt_text: String::new(), alt_text: String::new(),
@ -958,9 +966,9 @@ pub(crate) mod tests {
.id, .id,
); );
let _: Blog = blogs[0].save_changes(&**conn).unwrap(); let _: Blog = blogs[0].save_changes(&**conn).unwrap();
let ap_repr = blogs[0].to_activity(&conn).unwrap(); let ap_repr = blogs[0].to_activity(conn).unwrap();
blogs[0].delete(&conn).unwrap(); blogs[0].delete(conn).unwrap();
let blog = Blog::from_activity(&conn, ap_repr).unwrap(); let blog = Blog::from_activity(conn, ap_repr).unwrap();
assert_eq!(blog.actor_id, blogs[0].actor_id); assert_eq!(blog.actor_id, blogs[0].actor_id);
assert_eq!(blog.title, blogs[0].title); assert_eq!(blog.title, blogs[0].title);
@ -972,8 +980,8 @@ pub(crate) mod tests {
assert_eq!(blog.public_key, blogs[0].public_key); assert_eq!(blog.public_key, blogs[0].public_key);
assert_eq!(blog.fqn, blogs[0].fqn); assert_eq!(blog.fqn, blogs[0].fqn);
assert_eq!(blog.summary_html, blogs[0].summary_html); assert_eq!(blog.summary_html, blogs[0].summary_html);
assert_eq!(blog.icon_url(&conn), blogs[0].icon_url(&conn)); assert_eq!(blog.icon_url(conn), blogs[0].icon_url(conn));
assert_eq!(blog.banner_url(&conn), blogs[0].banner_url(&conn)); assert_eq!(blog.banner_url(conn), blogs[0].banner_url(conn));
Ok(()) Ok(())
}) })
@ -983,7 +991,7 @@ pub(crate) mod tests {
fn to_activity() { fn to_activity() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, Error, _>(|| { conn.test_transaction::<_, Error, _>(|| {
let (_users, blogs) = fill_database(&conn); let (_users, blogs) = fill_database(conn);
let blog = &blogs[0]; let blog = &blogs[0];
let act = blog.to_activity(conn)?; let act = blog.to_activity(conn)?;

View File

@ -430,7 +430,7 @@ mod tests {
use serde_json::{json, to_value}; use serde_json::{json, to_value};
fn prepare_activity(conn: &DbConn) -> (Comment, Vec<Post>, Vec<User>, Vec<Blog>) { fn prepare_activity(conn: &DbConn) -> (Comment, Vec<Post>, Vec<User>, Vec<Blog>) {
let (posts, users, blogs) = fill_database(&conn); let (posts, users, blogs) = fill_database(conn);
let comment = Comment::insert( let comment = Comment::insert(
conn, conn,
@ -456,8 +456,8 @@ mod tests {
fn self_federation() { fn self_federation() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (original_comm, posts, users, _blogs) = prepare_activity(&conn); let (original_comm, posts, users, _blogs) = prepare_activity(conn);
let act = original_comm.create_activity(&conn).unwrap(); let act = original_comm.create_activity(conn).unwrap();
assert_json_eq!(to_value(&act).unwrap(), json!({ assert_json_eq!(to_value(&act).unwrap(), json!({
"actor": "https://plu.me/@/admin/", "actor": "https://plu.me/@/admin/",
@ -499,7 +499,7 @@ mod tests {
}, },
) )
.unwrap(); .unwrap();
let reply_act = reply.create_activity(&conn).unwrap(); let reply_act = reply.create_activity(conn).unwrap();
assert_json_eq!(to_value(&reply_act).unwrap(), json!({ assert_json_eq!(to_value(&reply_act).unwrap(), json!({
"actor": "https://plu.me/@/user/", "actor": "https://plu.me/@/user/",
@ -521,12 +521,12 @@ mod tests {
})); }));
inbox( inbox(
&conn, conn,
serde_json::to_value(original_comm.build_delete(&conn).unwrap()).unwrap(), serde_json::to_value(original_comm.build_delete(conn).unwrap()).unwrap(),
) )
.unwrap(); .unwrap();
match inbox(&conn, to_value(act).unwrap()).unwrap() { match inbox(conn, to_value(act).unwrap()).unwrap() {
InboxResult::Commented(c) => { InboxResult::Commented(c) => {
// TODO: one is HTML, the other markdown: assert_eq!(c.content, original_comm.content); // TODO: one is HTML, the other markdown: assert_eq!(c.content, original_comm.content);
assert_eq!(c.in_response_to_id, original_comm.in_response_to_id); assert_eq!(c.in_response_to_id, original_comm.in_response_to_id);

View File

@ -69,7 +69,8 @@ pub(crate) mod tests {
impl CustomizeConnection<Connection, ConnError> for TestConnectionCustomizer { impl CustomizeConnection<Connection, ConnError> for TestConnectionCustomizer {
fn on_acquire(&self, conn: &mut Connection) -> Result<(), ConnError> { fn on_acquire(&self, conn: &mut Connection) -> Result<(), ConnError> {
PragmaForeignKey.on_acquire(conn)?; PragmaForeignKey.on_acquire(conn)?;
Ok(conn.begin_test_transaction().unwrap()) conn.begin_test_transaction().unwrap();
Ok(())
} }
} }
} }

View File

@ -1,4 +1,5 @@
use crate::{ use crate::{
blocklisted_emails::BlocklistedEmail,
db_conn::DbConn, db_conn::DbConn,
schema::email_signups, schema::email_signups,
users::{NewUser, Role, User}, users::{NewUser, Role, User},
@ -60,6 +61,8 @@ pub struct NewEmailSignup<'a> {
impl EmailSignup { impl EmailSignup {
pub fn start(conn: &DbConn, email: &str) -> Result<Token> { pub fn start(conn: &DbConn, email: &str) -> Result<Token> {
Self::ensure_email_not_blocked(conn, email)?;
conn.transaction(|| { conn.transaction(|| {
Self::ensure_user_not_exist_by_email(conn, email)?; Self::ensure_user_not_exist_by_email(conn, email)?;
let _rows = Self::delete_existings_by_email(conn, email)?; let _rows = Self::delete_existings_by_email(conn, email)?;
@ -90,6 +93,8 @@ impl EmailSignup {
} }
pub fn confirm(&self, conn: &DbConn) -> Result<()> { pub fn confirm(&self, conn: &DbConn) -> Result<()> {
Self::ensure_email_not_blocked(conn, &self.email)?;
conn.transaction(|| { conn.transaction(|| {
Self::ensure_user_not_exist_by_email(conn, &self.email)?; Self::ensure_user_not_exist_by_email(conn, &self.email)?;
if self.expired() { if self.expired() {
@ -101,6 +106,8 @@ impl EmailSignup {
} }
pub fn complete(&self, conn: &DbConn, username: String, password: String) -> Result<User> { pub fn complete(&self, conn: &DbConn, username: String, password: String) -> Result<User> {
Self::ensure_email_not_blocked(conn, &self.email)?;
conn.transaction(|| { conn.transaction(|| {
Self::ensure_user_not_exist_by_email(conn, &self.email)?; Self::ensure_user_not_exist_by_email(conn, &self.email)?;
let user = NewUser::new_local( let user = NewUser::new_local(
@ -122,6 +129,14 @@ impl EmailSignup {
Ok(()) Ok(())
} }
fn ensure_email_not_blocked(conn: &DbConn, email: &str) -> Result<()> {
if let Some(x) = BlocklistedEmail::matches_blocklist(conn, email)? {
Err(Error::Blocklisted(x.notify_user, x.notification_text))
} else {
Ok(())
}
}
fn ensure_user_not_exist_by_email(conn: &DbConn, email: &str) -> Result<()> { fn ensure_user_not_exist_by_email(conn: &DbConn, email: &str) -> Result<()> {
if User::email_used(conn, email)? { if User::email_used(conn, email)? {
let _rows = Self::delete_existings_by_email(conn, email)?; let _rows = Self::delete_existings_by_email(conn, email)?;

View File

@ -107,12 +107,7 @@ impl Follow {
res.notify(conn)?; res.notify(conn)?;
let accept = res.build_accept(from, target, follow)?; let accept = res.build_accept(from, target, follow)?;
broadcast( broadcast(target, accept, vec![from.clone()], CONFIG.proxy().cloned());
&*target,
accept,
vec![from.clone()],
CONFIG.proxy().cloned(),
);
Ok(res) Ok(res)
} }

View File

@ -82,9 +82,9 @@ pub(crate) mod tests {
use crate::post_authors::*; use crate::post_authors::*;
use crate::posts::*; use crate::posts::*;
let (users, blogs) = blog_fill_db(&conn); let (users, blogs) = blog_fill_db(conn);
let post = Post::insert( let post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "testing".to_owned(), slug: "testing".to_owned(),
@ -102,7 +102,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
PostAuthor::insert( PostAuthor::insert(
&conn, conn,
NewPostAuthor { NewPostAuthor {
post_id: post.id, post_id: post.id,
author_id: users[0].id, author_id: users[0].id,
@ -190,7 +190,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))
@ -221,7 +221,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))
@ -249,7 +249,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))
@ -324,7 +324,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))
@ -362,7 +362,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))
@ -397,7 +397,7 @@ pub(crate) mod tests {
}); });
assert!(matches!( assert!(matches!(
super::inbox(&conn, act.clone()), super::inbox(&conn, act),
Err(super::Error::Inbox( Err(super::Error::Inbox(
box plume_common::activity_pub::inbox::InboxError::InvalidObject(_), box plume_common::activity_pub::inbox::InboxError::InvalidObject(_),
)) ))

View File

@ -9,7 +9,7 @@ use crate::{
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use diesel::{self, result::Error::NotFound, ExpressionMethods, QueryDsl, RunQueryDsl}; use diesel::{self, result::Error::NotFound, ExpressionMethods, QueryDsl, RunQueryDsl};
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use plume_common::utils::md_to_html; use plume_common::utils::{iri_percent_encode_seg, md_to_html};
use std::sync::RwLock; use std::sync::RwLock;
#[derive(Clone, Identifiable, Queryable)] #[derive(Clone, Identifiable, Queryable)]
@ -173,8 +173,8 @@ impl Instance {
"{instance}/{prefix}/{name}/{box_name}", "{instance}/{prefix}/{name}/{box_name}",
instance = self.public_domain, instance = self.public_domain,
prefix = prefix, prefix = prefix,
name = name, name = iri_percent_encode_seg(name),
box_name = box_name box_name = iri_percent_encode_seg(box_name)
)) ))
} }
@ -523,7 +523,7 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
let inst = Instance::get(conn, inst.id).unwrap(); let inst = Instance::get(conn, inst.id).unwrap();
assert_eq!(inst.name, "NewName".to_owned()); assert_eq!(inst.name, "NewName".to_owned());
assert_eq!(inst.open_registrations, false); assert!(!inst.open_registrations);
assert_eq!( assert_eq!(
inst.long_description.get(), inst.long_description.get(),
"[long_description](/with_link)" "[long_description](/with_link)"

View File

@ -177,7 +177,7 @@ pub type Result<T> = std::result::Result<T, Error>;
/// ///
/// Usage: /// Usage:
/// ///
/// ```rust /// ```ignore
/// impl Model { /// impl Model {
/// find_by!(model_table, name_of_the_function, field1 as String, field2 as i32); /// find_by!(model_table, name_of_the_function, field1 as String, field2 as i32);
/// } /// }
@ -201,7 +201,7 @@ macro_rules! find_by {
/// ///
/// Usage: /// Usage:
/// ///
/// ```rust /// ```ignore
/// impl Model { /// impl Model {
/// list_by!(model_table, name_of_the_function, field1 as String); /// list_by!(model_table, name_of_the_function, field1 as String);
/// } /// }
@ -225,7 +225,7 @@ macro_rules! list_by {
/// ///
/// # Usage /// # Usage
/// ///
/// ```rust /// ```ignore
/// impl Model { /// impl Model {
/// get!(model_table); /// get!(model_table);
/// } /// }
@ -248,7 +248,7 @@ macro_rules! get {
/// ///
/// # Usage /// # Usage
/// ///
/// ```rust /// ```ignore
/// impl Model { /// impl Model {
/// insert!(model_table, NewModelType); /// insert!(model_table, NewModelType);
/// } /// }
@ -280,7 +280,7 @@ macro_rules! insert {
/// ///
/// # Usage /// # Usage
/// ///
/// ```rust /// ```ignore
/// impl Model { /// impl Model {
/// last!(model_table); /// last!(model_table);
/// } /// }
@ -354,7 +354,7 @@ mod tests {
}; };
} }
pub fn db<'a>() -> db_conn::DbConn { pub fn db() -> db_conn::DbConn {
db_conn::DbConn((*DB_POOL).get().unwrap()) db_conn::DbConn((*DB_POOL).get().unwrap())
} }

View File

@ -199,7 +199,7 @@ mod tests {
let (posts, _users, _blogs) = fill_database(&conn); let (posts, _users, _blogs) = fill_database(&conn);
let post = &posts[0]; let post = &posts[0];
let user = &post.get_authors(&conn)?[0]; let user = &post.get_authors(&conn)?[0];
let like = Like::insert(&*conn, NewLike::new(post, user))?; let like = Like::insert(&conn, NewLike::new(post, user))?;
let act = like.to_activity(&conn).unwrap(); let act = like.to_activity(&conn).unwrap();
let expected = json!({ let expected = json!({
@ -223,8 +223,8 @@ mod tests {
let (posts, _users, _blogs) = fill_database(&conn); let (posts, _users, _blogs) = fill_database(&conn);
let post = &posts[0]; let post = &posts[0];
let user = &post.get_authors(&conn)?[0]; let user = &post.get_authors(&conn)?[0];
let like = Like::insert(&*conn, NewLike::new(post, user))?; let like = Like::insert(&conn, NewLike::new(post, user))?;
let act = like.build_undo(&*conn)?; let act = like.build_undo(&conn)?;
let expected = json!({ let expected = json!({
"actor": "https://plu.me/@/admin/", "actor": "https://plu.me/@/admin/",

View File

@ -435,7 +435,7 @@ mod tests {
&List::find_for_user_by_name(conn, l1.user_id, &l1.name).unwrap(), &List::find_for_user_by_name(conn, l1.user_id, &l1.name).unwrap(),
); );
l_eq( l_eq(
&&l1u, &l1u,
&List::find_for_user_by_name(conn, l1u.user_id, &l1u.name).unwrap(), &List::find_for_user_by_name(conn, l1u.user_id, &l1u.name).unwrap(),
); );
Ok(()) Ok(())

View File

@ -42,7 +42,7 @@ pub struct NewMedia {
pub owner_id: i32, pub owner_id: i32,
} }
#[derive(PartialEq)] #[derive(PartialEq, Eq)]
pub enum MediaCategory { pub enum MediaCategory {
Image, Image,
Audio, Audio,
@ -343,7 +343,7 @@ pub(crate) mod tests {
use std::path::Path; use std::path::Path;
pub(crate) fn fill_database(conn: &Conn) -> (Vec<User>, Vec<Media>) { pub(crate) fn fill_database(conn: &Conn) -> (Vec<User>, Vec<Media>) {
let mut wd = current_dir().unwrap().to_path_buf(); let mut wd = current_dir().unwrap();
while wd.pop() { while wd.pop() {
if wd.join(".git").exists() { if wd.join(".git").exists() {
set_current_dir(wd).unwrap(); set_current_dir(wd).unwrap();
@ -456,7 +456,7 @@ pub(crate) mod tests {
let media = Media::insert( let media = Media::insert(
conn, conn,
NewMedia { NewMedia {
file_path: path.clone(), file_path: path,
alt_text: "alt message".to_owned(), alt_text: "alt message".to_owned(),
is_remote: false, is_remote: false,
remote_url: None, remote_url: None,

View File

@ -89,7 +89,7 @@ mod tests {
let request = PasswordResetRequest::find_by_token(&conn, &token) let request = PasswordResetRequest::find_by_token(&conn, &token)
.expect("couldn't retrieve request"); .expect("couldn't retrieve request");
assert!(&token.len() > &32); assert!(token.len() > 32);
assert_eq!(&request.email, &admin_email); assert_eq!(&request.email, &admin_email);
Ok(()) Ok(())
@ -103,8 +103,8 @@ mod tests {
user_tests::fill_database(&conn); user_tests::fill_database(&conn);
let admin_email = "admin@example.com"; let admin_email = "admin@example.com";
PasswordResetRequest::insert(&conn, &admin_email).expect("couldn't insert new request"); PasswordResetRequest::insert(&conn, admin_email).expect("couldn't insert new request");
PasswordResetRequest::insert(&conn, &admin_email) PasswordResetRequest::insert(&conn, admin_email)
.expect("couldn't insert second request"); .expect("couldn't insert second request");
let count = password_reset_requests::table.count().get_result(&*conn); let count = password_reset_requests::table.count().get_result(&*conn);
@ -132,7 +132,7 @@ mod tests {
.execute(&*conn) .execute(&*conn)
.expect("could not insert request"); .expect("could not insert request");
match PasswordResetRequest::find_by_token(&conn, &token) { match PasswordResetRequest::find_by_token(&conn, token) {
Err(Error::Expired) => (), Err(Error::Expired) => (),
_ => panic!("Received unexpected result finding expired token"), _ => panic!("Received unexpected result finding expired token"),
} }
@ -148,7 +148,7 @@ mod tests {
user_tests::fill_database(&conn); user_tests::fill_database(&conn);
let admin_email = "admin@example.com"; let admin_email = "admin@example.com";
let token = PasswordResetRequest::insert(&conn, &admin_email) let token = PasswordResetRequest::insert(&conn, admin_email)
.expect("couldn't insert new request"); .expect("couldn't insert new request");
PasswordResetRequest::find_and_delete_by_token(&conn, &token) PasswordResetRequest::find_and_delete_by_token(&conn, &token)
.expect("couldn't find and delete request"); .expect("couldn't find and delete request");

View File

@ -134,7 +134,7 @@ impl Post {
.filter(posts::published.eq(true)) .filter(posts::published.eq(true))
.count() .count()
.load(conn)? .load(conn)?
.get(0) .first()
.cloned() .cloned()
.ok_or(Error::NotFound) .ok_or(Error::NotFound)
} }
@ -255,7 +255,7 @@ impl Post {
ap_url(&format!( ap_url(&format!(
"{}/~/{}/{}/", "{}/~/{}/{}/",
CONFIG.base_url, CONFIG.base_url,
blog.fqn, iri_percent_encode_seg(&blog.fqn),
iri_percent_encode_seg(slug) iri_percent_encode_seg(slug)
)) ))
} }
@ -465,7 +465,7 @@ impl Post {
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
for (m, id) in &mentions { for (m, id) in &mentions {
if !old_user_mentioned.contains(id) { if !old_user_mentioned.contains(id) {
Mention::from_activity(&*conn, m, self.id, true, true)?; Mention::from_activity(conn, m, self.id, true, true)?;
} }
} }
@ -488,7 +488,7 @@ impl Post {
.filter_map(|t| t.name.as_ref().map(|name| name.as_str().to_string())) .filter_map(|t| t.name.as_ref().map(|name| name.as_str().to_string()))
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let old_tags = Tag::for_post(&*conn, self.id)?; let old_tags = Tag::for_post(conn, self.id)?;
let old_tags_name = old_tags let old_tags_name = old_tags
.iter() .iter()
.filter_map(|tag| { .filter_map(|tag| {
@ -525,7 +525,7 @@ impl Post {
.filter_map(|t| t.name.as_ref().map(|name| name.as_str().to_string())) .filter_map(|t| t.name.as_ref().map(|name| name.as_str().to_string()))
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let old_tags = Tag::for_post(&*conn, self.id)?; let old_tags = Tag::for_post(conn, self.id)?;
let old_tags_name = old_tags let old_tags_name = old_tags
.iter() .iter()
.filter_map(|tag| { .filter_map(|tag| {
@ -756,7 +756,11 @@ impl FromId<Connection> for Post {
let timestamp_secs = published.unix_timestamp(); let timestamp_secs = published.unix_timestamp();
let timestamp_nanos = published.unix_timestamp_nanos() let timestamp_nanos = published.unix_timestamp_nanos()
- (timestamp_secs as i128) * 1000i128 * 1000i128 * 1000i128; - (timestamp_secs as i128) * 1000i128 * 1000i128 * 1000i128;
NaiveDateTime::from_timestamp(timestamp_secs, timestamp_nanos as u32) NaiveDateTime::from_timestamp_opt(
timestamp_secs,
timestamp_nanos as u32,
)
.unwrap()
}), }),
subtitle: article subtitle: article
.summary() .summary()
@ -1036,7 +1040,7 @@ mod tests {
let post = &posts[0]; let post = &posts[0];
let mentioned = &users[1]; let mentioned = &users[1];
let mention = Mention::insert( let mention = Mention::insert(
&conn, conn,
NewMention { NewMention {
mentioned_id: mentioned.id, mentioned_id: mentioned.id,
post_id: Some(post.id), post_id: Some(post.id),
@ -1044,7 +1048,7 @@ mod tests {
}, },
) )
.unwrap(); .unwrap();
(post.to_owned(), mention.to_owned(), posts, users, blogs) (post.to_owned(), mention, posts, users, blogs)
} }
// creates a post, get it's Create activity, delete the post, // creates a post, get it's Create activity, delete the post,
@ -1053,9 +1057,9 @@ mod tests {
fn self_federation() { fn self_federation() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (_, users, blogs) = fill_database(&conn); let (_, users, blogs) = fill_database(conn);
let post = Post::insert( let post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "yo".into(), slug: "yo".into(),
@ -1072,19 +1076,19 @@ mod tests {
) )
.unwrap(); .unwrap();
PostAuthor::insert( PostAuthor::insert(
&conn, conn,
NewPostAuthor { NewPostAuthor {
post_id: post.id, post_id: post.id,
author_id: users[0].id, author_id: users[0].id,
}, },
) )
.unwrap(); .unwrap();
let create = post.create_activity(&conn).unwrap(); let create = post.create_activity(conn).unwrap();
post.delete(&conn).unwrap(); post.delete(conn).unwrap();
match inbox(&conn, serde_json::to_value(create).unwrap()).unwrap() { match inbox(conn, serde_json::to_value(create).unwrap()).unwrap() {
InboxResult::Post(p) => { InboxResult::Post(p) => {
assert!(p.is_author(&conn, users[0].id).unwrap()); assert!(p.is_author(conn, users[0].id).unwrap());
assert_eq!(p.source, "Hello".to_owned()); assert_eq!(p.source, "Hello".to_owned());
assert_eq!(p.blog_id, blogs[0].id); assert_eq!(p.blog_id, blogs[0].id);
assert_eq!(p.content, SafeString::new("Hello")); assert_eq!(p.content, SafeString::new("Hello"));
@ -1221,7 +1225,7 @@ mod tests {
let actual = to_value(act)?; let actual = to_value(act)?;
let id = actual["id"].to_string(); let id = actual["id"].to_string();
let (id_pre, id_post) = id.rsplit_once("-").unwrap(); let (id_pre, id_post) = id.rsplit_once('-').unwrap();
assert_eq!(post.ap_url, "https://plu.me/~/BlogName/testing"); assert_eq!(post.ap_url, "https://plu.me/~/BlogName/testing");
assert_eq!( assert_eq!(
id_pre, id_pre,

View File

@ -45,6 +45,12 @@ impl Actor for RemoteFetchActor {
RemoteUserFound(user) => match self.conn.get() { RemoteUserFound(user) => match self.conn.get() {
Ok(conn) => { Ok(conn) => {
let conn = DbConn(conn); let conn = DbConn(conn);
if user
.get_instance(&conn)
.map_or(false, |instance| instance.blocked)
{
return;
}
// Don't call these functions in parallel // Don't call these functions in parallel
// for the case database connections limit is too small // for the case database connections limit is too small
fetch_and_cache_articles(&user, &conn); fetch_and_cache_articles(&user, &conn);

View File

@ -229,7 +229,7 @@ mod test {
let (posts, _users, _blogs) = fill_database(&conn); let (posts, _users, _blogs) = fill_database(&conn);
let post = &posts[0]; let post = &posts[0];
let user = &post.get_authors(&conn)?[0]; let user = &post.get_authors(&conn)?[0];
let reshare = Reshare::insert(&*conn, NewReshare::new(post, user))?; let reshare = Reshare::insert(&conn, NewReshare::new(post, user))?;
let act = reshare.to_activity(&conn).unwrap(); let act = reshare.to_activity(&conn).unwrap();
let expected = json!({ let expected = json!({
@ -253,8 +253,8 @@ mod test {
let (posts, _users, _blogs) = fill_database(&conn); let (posts, _users, _blogs) = fill_database(&conn);
let post = &posts[0]; let post = &posts[0];
let user = &post.get_authors(&conn)?[0]; let user = &post.get_authors(&conn)?[0];
let reshare = Reshare::insert(&*conn, NewReshare::new(post, user))?; let reshare = Reshare::insert(&conn, NewReshare::new(post, user))?;
let act = reshare.build_undo(&*conn)?; let act = reshare.build_undo(&conn)?;
let expected = json!({ let expected = json!({
"actor": "https://plu.me/@/admin/", "actor": "https://plu.me/@/admin/",

View File

@ -93,7 +93,7 @@ fn url_add_prefix(url: &str) -> Option<Cow<'_, str>> {
} }
} }
#[derive(Debug, Clone, PartialEq, AsExpression, FromSqlRow, Default)] #[derive(Debug, Clone, PartialEq, Eq, AsExpression, FromSqlRow, Default)]
#[sql_type = "Text"] #[sql_type = "Text"]
pub struct SafeString { pub struct SafeString {
value: String, value: String,

View File

@ -108,7 +108,7 @@ mod tests {
let searcher = Arc::new(get_searcher(&CONFIG.search_tokenizers)); let searcher = Arc::new(get_searcher(&CONFIG.search_tokenizers));
SearchActor::init(searcher.clone(), db_pool.clone()); SearchActor::init(searcher.clone(), db_pool.clone());
let conn = db_pool.clone().get().unwrap(); let conn = db_pool.get().unwrap();
let title = random_hex()[..8].to_owned(); let title = random_hex()[..8].to_owned();
let (_instance, _user, blog) = fill_database(&conn); let (_instance, _user, blog) = fill_database(&conn);
@ -161,41 +161,43 @@ mod tests {
long_description_html: "<p>Good morning</p>".to_string(), long_description_html: "<p>Good morning</p>".to_string(),
short_description: SafeString::new("Hello"), short_description: SafeString::new("Hello"),
short_description_html: "<p>Hello</p>".to_string(), short_description_html: "<p>Hello</p>".to_string(),
name: random_hex().to_string(), name: random_hex(),
open_registrations: true, open_registrations: true,
public_domain: random_hex().to_string(), public_domain: random_hex(),
}, },
) )
.unwrap(); .unwrap();
let user = User::insert( let user = User::insert(
conn, conn,
NewUser { NewUser {
username: random_hex().to_string(), username: random_hex(),
display_name: random_hex().to_string(), display_name: random_hex(),
outbox_url: random_hex().to_string(), outbox_url: random_hex(),
inbox_url: random_hex().to_string(), inbox_url: random_hex(),
summary: "".to_string(), summary: "".to_string(),
email: None, email: None,
hashed_password: None, hashed_password: None,
instance_id: instance.id, instance_id: instance.id,
ap_url: random_hex().to_string(), ap_url: random_hex(),
private_key: None, private_key: None,
public_key: "".to_string(), public_key: "".to_string(),
shared_inbox_url: None, shared_inbox_url: None,
followers_endpoint: random_hex().to_string(), followers_endpoint: random_hex(),
avatar_id: None, avatar_id: None,
summary_html: SafeString::new(""), summary_html: SafeString::new(""),
role: 0, role: 0,
fqn: random_hex().to_string(), fqn: random_hex(),
}, },
) )
.unwrap(); .unwrap();
let mut blog = NewBlog::default(); let blog = NewBlog {
blog.instance_id = instance.id; instance_id: instance.id,
blog.actor_id = random_hex().to_string(); actor_id: random_hex(),
blog.ap_url = random_hex().to_string(); ap_url: random_hex(),
blog.inbox_url = random_hex().to_string(); inbox_url: random_hex(),
blog.outbox_url = random_hex().to_string(); outbox_url: random_hex(),
..Default::default()
};
let blog = Blog::insert(conn, blog).unwrap(); let blog = Blog::insert(conn, blog).unwrap();
BlogAuthor::insert( BlogAuthor::insert(
conn, conn,

View File

@ -154,7 +154,7 @@ pub(crate) mod tests {
}, },
) )
.unwrap(); .unwrap();
searcher.add_document(&conn, &post).unwrap(); searcher.add_document(conn, &post).unwrap();
searcher.commit(); searcher.commit();
assert_eq!( assert_eq!(
searcher.search_document(conn, Query::from_str(&title).unwrap(), (0, 1))[0].id, searcher.search_document(conn, Query::from_str(&title).unwrap(), (0, 1))[0].id,

View File

@ -94,7 +94,7 @@ macro_rules! gen_to_string {
)* )*
$( $(
for val in &$self.$date { for val in &$self.$date {
$result.push_str(&format!("{}:{} ", stringify!($date), NaiveDate::from_num_days_from_ce(*val as i32).format("%Y-%m-%d"))); $result.push_str(&format!("{}:{} ", stringify!($date), NaiveDate::from_num_days_from_ce_opt(*val as i32).unwrap().format("%Y-%m-%d")));
} }
)* )*
} }
@ -180,12 +180,16 @@ impl PlumeQuery {
if self.before.is_some() || self.after.is_some() { if self.before.is_some() || self.after.is_some() {
// if at least one range bound is provided // if at least one range bound is provided
let after = self let after = self.after.unwrap_or_else(|| {
.after i64::from(
.unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); NaiveDate::from_ymd_opt(2000, 1, 1)
.unwrap()
.num_days_from_ce(),
)
});
let before = self let before = self
.before .before
.unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); .unwrap_or_else(|| i64::from(Utc::now().date_naive().num_days_from_ce()));
let field = Searcher::schema().get_field("creation_date").unwrap(); let field = Searcher::schema().get_field("creation_date").unwrap();
let range = let range =
RangeQuery::new_i64_bounds(field, Bound::Included(after), Bound::Included(before)); RangeQuery::new_i64_bounds(field, Bound::Included(after), Bound::Included(before));
@ -202,16 +206,20 @@ impl PlumeQuery {
pub fn before<D: Datelike>(&mut self, date: &D) -> &mut Self { pub fn before<D: Datelike>(&mut self, date: &D) -> &mut Self {
let before = self let before = self
.before .before
.unwrap_or_else(|| i64::from(Utc::today().num_days_from_ce())); .unwrap_or_else(|| i64::from(Utc::now().date_naive().num_days_from_ce()));
self.before = Some(cmp::min(before, i64::from(date.num_days_from_ce()))); self.before = Some(cmp::min(before, i64::from(date.num_days_from_ce())));
self self
} }
// documents older than the provided date will be ignored // documents older than the provided date will be ignored
pub fn after<D: Datelike>(&mut self, date: &D) -> &mut Self { pub fn after<D: Datelike>(&mut self, date: &D) -> &mut Self {
let after = self let after = self.after.unwrap_or_else(|| {
.after i64::from(
.unwrap_or_else(|| i64::from(NaiveDate::from_ymd(2000, 1, 1).num_days_from_ce())); NaiveDate::from_ymd_opt(2000, 1, 1)
.unwrap()
.num_days_from_ce(),
)
});
self.after = Some(cmp::max(after, i64::from(date.num_days_from_ce()))); self.after = Some(cmp::max(after, i64::from(date.num_days_from_ce())));
self self
} }

View File

@ -92,7 +92,7 @@ mod tests {
let (posts, _users, _blogs) = fill_database(conn); let (posts, _users, _blogs) = fill_database(conn);
let post_id = posts[0].id; let post_id = posts[0].id;
let mut ht = Hashtag::new(); let mut ht = Hashtag::new();
ht.set_href(ap_url(&format!("https://plu.me/tag/a_tag")).parse::<IriString>()?); ht.set_href(ap_url("https://plu.me/tag/a_tag").parse::<IriString>()?);
ht.set_name("a_tag".to_string()); ht.set_name("a_tag".to_string());
let tag = Tag::from_activity(conn, &ht, post_id, true)?; let tag = Tag::from_activity(conn, &ht, post_id, true)?;

View File

@ -13,7 +13,7 @@ pub(crate) mod query;
pub use self::query::Kind; pub use self::query::Kind;
pub use self::query::{QueryError, TimelineQuery}; pub use self::query::{QueryError, TimelineQuery};
#[derive(Clone, Debug, PartialEq, Queryable, Identifiable, AsChangeset)] #[derive(Clone, Debug, PartialEq, Eq, Queryable, Identifiable, AsChangeset)]
#[table_name = "timeline_definition"] #[table_name = "timeline_definition"]
pub struct Timeline { pub struct Timeline {
pub id: i32, pub id: i32,
@ -300,73 +300,63 @@ mod tests {
fn test_timeline() { fn test_timeline() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let users = userTests::fill_database(&conn); let users = userTests::fill_database(conn);
let mut tl1_u1 = Timeline::new_for_user( let mut tl1_u1 = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"my timeline".to_owned(), "my timeline".to_owned(),
"all".to_owned(), "all".to_owned(),
) )
.unwrap(); .unwrap();
List::new( List::new(conn, "languages I speak", Some(&users[1]), ListType::Prefix).unwrap();
&conn,
"languages I speak",
Some(&users[1]),
ListType::Prefix,
)
.unwrap();
let tl2_u1 = Timeline::new_for_user( let tl2_u1 = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"another timeline".to_owned(), "another timeline".to_owned(),
"followed".to_owned(), "followed".to_owned(),
) )
.unwrap(); .unwrap();
let tl1_u2 = Timeline::new_for_user( let tl1_u2 = Timeline::new_for_user(
&conn, conn,
users[1].id, users[1].id,
"english posts".to_owned(), "english posts".to_owned(),
"lang in \"languages I speak\"".to_owned(), "lang in \"languages I speak\"".to_owned(),
) )
.unwrap(); .unwrap();
let tl1_instance = Timeline::new_for_instance( let tl1_instance = Timeline::new_for_instance(
&conn, conn,
"english posts".to_owned(), "english posts".to_owned(),
"license in [cc]".to_owned(), "license in [cc]".to_owned(),
) )
.unwrap(); .unwrap();
assert_eq!(tl1_u1, Timeline::get(&conn, tl1_u1.id).unwrap()); assert_eq!(tl1_u1, Timeline::get(conn, tl1_u1.id).unwrap());
assert_eq!( assert_eq!(
tl2_u1, tl2_u1,
Timeline::find_for_user_by_name(&conn, Some(users[0].id), "another timeline") Timeline::find_for_user_by_name(conn, Some(users[0].id), "another timeline")
.unwrap() .unwrap()
); );
assert_eq!( assert_eq!(
tl1_instance, tl1_instance,
Timeline::find_for_user_by_name(&conn, None, "english posts").unwrap() Timeline::find_for_user_by_name(conn, None, "english posts").unwrap()
); );
let tl_u1 = Timeline::list_for_user(&conn, Some(users[0].id)).unwrap(); let tl_u1 = Timeline::list_for_user(conn, Some(users[0].id)).unwrap();
assert_eq!(3, tl_u1.len()); // it is not 2 because there is a "Your feed" tl created for each user automatically assert_eq!(3, tl_u1.len()); // it is not 2 because there is a "Your feed" tl created for each user automatically
assert!(tl_u1.iter().fold(false, |res, tl| { res || *tl == tl1_u1 })); assert!(tl_u1.iter().any(|tl| *tl == tl1_u1));
assert!(tl_u1.iter().fold(false, |res, tl| { res || *tl == tl2_u1 })); assert!(tl_u1.iter().any(|tl| *tl == tl2_u1));
let tl_instance = Timeline::list_for_user(&conn, None).unwrap(); let tl_instance = Timeline::list_for_user(conn, None).unwrap();
assert_eq!(3, tl_instance.len()); // there are also the local and federated feed by default assert_eq!(3, tl_instance.len()); // there are also the local and federated feed by default
assert!(tl_instance assert!(tl_instance.iter().any(|tl| *tl == tl1_instance));
.iter()
.fold(false, |res, tl| { res || *tl == tl1_instance }));
tl1_u1.name = "My Super TL".to_owned(); tl1_u1.name = "My Super TL".to_owned();
let new_tl1_u2 = tl1_u2.update(&conn).unwrap(); let new_tl1_u2 = tl1_u2.update(conn).unwrap();
let tl_u2 = Timeline::list_for_user(&conn, Some(users[1].id)).unwrap(); let tl_u2 = Timeline::list_for_user(conn, Some(users[1].id)).unwrap();
assert_eq!(2, tl_u2.len()); // same here assert_eq!(2, tl_u2.len()); // same here
assert!(tl_u2 assert!(tl_u2.iter().any(|tl| *tl == new_tl1_u2));
.iter()
.fold(false, |res, tl| { res || *tl == new_tl1_u2 }));
Ok(()) Ok(())
}); });
@ -376,48 +366,48 @@ mod tests {
fn test_timeline_creation_error() { fn test_timeline_creation_error() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let users = userTests::fill_database(&conn); let users = userTests::fill_database(conn);
assert!(Timeline::new_for_user( assert!(Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"my timeline".to_owned(), "my timeline".to_owned(),
"invalid keyword".to_owned(), "invalid keyword".to_owned(),
) )
.is_err()); .is_err());
assert!(Timeline::new_for_instance( assert!(Timeline::new_for_instance(
&conn, conn,
"my timeline".to_owned(), "my timeline".to_owned(),
"invalid keyword".to_owned(), "invalid keyword".to_owned(),
) )
.is_err()); .is_err());
assert!(Timeline::new_for_user( assert!(Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"my timeline".to_owned(), "my timeline".to_owned(),
"author in non_existant_list".to_owned(), "author in non_existant_list".to_owned(),
) )
.is_err()); .is_err());
assert!(Timeline::new_for_instance( assert!(Timeline::new_for_instance(
&conn, conn,
"my timeline".to_owned(), "my timeline".to_owned(),
"lang in dont-exist".to_owned(), "lang in dont-exist".to_owned(),
) )
.is_err()); .is_err());
List::new(&conn, "friends", Some(&users[0]), ListType::User).unwrap(); List::new(conn, "friends", Some(&users[0]), ListType::User).unwrap();
List::new(&conn, "idk", None, ListType::Blog).unwrap(); List::new(conn, "idk", None, ListType::Blog).unwrap();
assert!(Timeline::new_for_user( assert!(Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"my timeline".to_owned(), "my timeline".to_owned(),
"blog in friends".to_owned(), "blog in friends".to_owned(),
) )
.is_err()); .is_err());
assert!(Timeline::new_for_instance( assert!(Timeline::new_for_instance(
&conn, conn,
"my timeline".to_owned(), "my timeline".to_owned(),
"not author in idk".to_owned(), "not author in idk".to_owned(),
) )
@ -431,10 +421,10 @@ mod tests {
fn test_simple_match() { fn test_simple_match() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(&conn); let (users, blogs) = blogTests::fill_database(conn);
let gnu_tl = Timeline::new_for_user( let gnu_tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"GNU timeline".to_owned(), "GNU timeline".to_owned(),
"license in [AGPL, LGPL, GPL]".to_owned(), "license in [AGPL, LGPL, GPL]".to_owned(),
@ -442,7 +432,7 @@ mod tests {
.unwrap(); .unwrap();
let gnu_post = Post::insert( let gnu_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug".to_string(), slug: "slug".to_string(),
@ -458,10 +448,10 @@ mod tests {
}, },
) )
.unwrap(); .unwrap();
assert!(gnu_tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(gnu_tl.matches(conn, &gnu_post, Kind::Original).unwrap());
let non_free_post = Post::insert( let non_free_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug2".to_string(), slug: "slug2".to_string(),
@ -478,7 +468,7 @@ mod tests {
) )
.unwrap(); .unwrap();
assert!(!gnu_tl assert!(!gnu_tl
.matches(&conn, &non_free_post, Kind::Original) .matches(conn, &non_free_post, Kind::Original)
.unwrap()); .unwrap());
Ok(()) Ok(())
@ -489,9 +479,9 @@ mod tests {
fn test_complex_match() { fn test_complex_match() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(&conn); let (users, blogs) = blogTests::fill_database(conn);
Follow::insert( Follow::insert(
&conn, conn,
NewFollow { NewFollow {
follower_id: users[0].id, follower_id: users[0].id,
following_id: users[1].id, following_id: users[1].id,
@ -501,11 +491,11 @@ mod tests {
.unwrap(); .unwrap();
let fav_blogs_list = let fav_blogs_list =
List::new(&conn, "fav_blogs", Some(&users[0]), ListType::Blog).unwrap(); List::new(conn, "fav_blogs", Some(&users[0]), ListType::Blog).unwrap();
fav_blogs_list.add_blogs(&conn, &[blogs[0].id]).unwrap(); fav_blogs_list.add_blogs(conn, &[blogs[0].id]).unwrap();
let my_tl = Timeline::new_for_user( let my_tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"My timeline".to_owned(), "My timeline".to_owned(),
"blog in fav_blogs and not has_cover or local and followed exclude likes" "blog in fav_blogs and not has_cover or local and followed exclude likes"
@ -514,7 +504,7 @@ mod tests {
.unwrap(); .unwrap();
let post = Post::insert( let post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "about-linux".to_string(), slug: "about-linux".to_string(),
@ -530,10 +520,10 @@ mod tests {
}, },
) )
.unwrap(); .unwrap();
assert!(my_tl.matches(&conn, &post, Kind::Original).unwrap()); // matches because of "blog in fav_blogs" (and there is no cover) assert!(my_tl.matches(conn, &post, Kind::Original).unwrap()); // matches because of "blog in fav_blogs" (and there is no cover)
let post = Post::insert( let post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[1].id, blog_id: blogs[1].id,
slug: "about-linux-2".to_string(), slug: "about-linux-2".to_string(),
@ -551,7 +541,7 @@ mod tests {
}, },
) )
.unwrap(); .unwrap();
assert!(!my_tl.matches(&conn, &post, Kind::Like(&users[1])).unwrap()); assert!(!my_tl.matches(conn, &post, Kind::Like(&users[1])).unwrap());
Ok(()) Ok(())
}); });
@ -561,17 +551,17 @@ mod tests {
fn test_add_to_all_timelines() { fn test_add_to_all_timelines() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(&conn); let (users, blogs) = blogTests::fill_database(conn);
let gnu_tl = Timeline::new_for_user( let gnu_tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"GNU timeline".to_owned(), "GNU timeline".to_owned(),
"license in [AGPL, LGPL, GPL]".to_owned(), "license in [AGPL, LGPL, GPL]".to_owned(),
) )
.unwrap(); .unwrap();
let non_gnu_tl = Timeline::new_for_user( let non_gnu_tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Stallman disapproved timeline".to_owned(), "Stallman disapproved timeline".to_owned(),
"not license in [AGPL, LGPL, GPL]".to_owned(), "not license in [AGPL, LGPL, GPL]".to_owned(),
@ -579,7 +569,7 @@ mod tests {
.unwrap(); .unwrap();
let gnu_post = Post::insert( let gnu_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug".to_string(), slug: "slug".to_string(),
@ -597,7 +587,7 @@ mod tests {
.unwrap(); .unwrap();
let non_free_post = Post::insert( let non_free_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug2".to_string(), slug: "slug2".to_string(),
@ -614,13 +604,13 @@ mod tests {
) )
.unwrap(); .unwrap();
Timeline::add_to_all_timelines(&conn, &gnu_post, Kind::Original).unwrap(); Timeline::add_to_all_timelines(conn, &gnu_post, Kind::Original).unwrap();
Timeline::add_to_all_timelines(&conn, &non_free_post, Kind::Original).unwrap(); Timeline::add_to_all_timelines(conn, &non_free_post, Kind::Original).unwrap();
let res = gnu_tl.get_latest(&conn, 2).unwrap(); let res = gnu_tl.get_latest(conn, 2).unwrap();
assert_eq!(res.len(), 1); assert_eq!(res.len(), 1);
assert_eq!(res[0].id, gnu_post.id); assert_eq!(res[0].id, gnu_post.id);
let res = non_gnu_tl.get_latest(&conn, 2).unwrap(); let res = non_gnu_tl.get_latest(conn, 2).unwrap();
assert_eq!(res.len(), 1); assert_eq!(res.len(), 1);
assert_eq!(res[0].id, non_free_post.id); assert_eq!(res[0].id, non_free_post.id);
@ -632,10 +622,10 @@ mod tests {
fn test_matches_lists_direct() { fn test_matches_lists_direct() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(&conn); let (users, blogs) = blogTests::fill_database(conn);
let gnu_post = Post::insert( let gnu_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug".to_string(), slug: "slug".to_string(),
@ -652,63 +642,63 @@ mod tests {
) )
.unwrap(); .unwrap();
gnu_post gnu_post
.update_tags(&conn, vec![Tag::build_activity("free".to_owned()).unwrap()]) .update_tags(conn, vec![Tag::build_activity("free".to_owned()).unwrap()])
.unwrap(); .unwrap();
PostAuthor::insert( PostAuthor::insert(
&conn, conn,
NewPostAuthor { NewPostAuthor {
post_id: gnu_post.id, post_id: gnu_post.id,
author_id: blogs[0].list_authors(&conn).unwrap()[0].id, author_id: blogs[0].list_authors(conn).unwrap()[0].id,
}, },
) )
.unwrap(); .unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"blog timeline".to_owned(), "blog timeline".to_owned(),
format!("blog in [{}]", blogs[0].fqn), format!("blog in [{}]", blogs[0].fqn),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"blog timeline".to_owned(), "blog timeline".to_owned(),
"blog in [no_one@nowhere]".to_owned(), "blog in [no_one@nowhere]".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"author timeline".to_owned(), "author timeline".to_owned(),
format!( format!(
"author in [{}]", "author in [{}]",
blogs[0].list_authors(&conn).unwrap()[0].fqn blogs[0].list_authors(conn).unwrap()[0].fqn
), ),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"author timeline".to_owned(), "author timeline".to_owned(),
format!("author in [{}]", users[2].fqn), format!("author in [{}]", users[2].fqn),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
assert!(tl assert!(tl
.matches(&conn, &gnu_post, Kind::Reshare(&users[2])) .matches(conn, &gnu_post, Kind::Reshare(&users[2]))
.unwrap()); .unwrap());
assert!(!tl.matches(&conn, &gnu_post, Kind::Like(&users[2])).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Like(&users[2])).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"author timeline".to_owned(), "author timeline".to_owned(),
format!( format!(
@ -717,50 +707,50 @@ mod tests {
), ),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
assert!(!tl assert!(!tl
.matches(&conn, &gnu_post, Kind::Reshare(&users[2])) .matches(conn, &gnu_post, Kind::Reshare(&users[2]))
.unwrap()); .unwrap());
assert!(tl.matches(&conn, &gnu_post, Kind::Like(&users[2])).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Like(&users[2])).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"tag timeline".to_owned(), "tag timeline".to_owned(),
"tags in [free]".to_owned(), "tags in [free]".to_owned(),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"tag timeline".to_owned(), "tag timeline".to_owned(),
"tags in [private]".to_owned(), "tags in [private]".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"english timeline".to_owned(), "english timeline".to_owned(),
"lang in [en]".to_owned(), "lang in [en]".to_owned(),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"franco-italian timeline".to_owned(), "franco-italian timeline".to_owned(),
"lang in [fr, it]".to_owned(), "lang in [fr, it]".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
Ok(()) Ok(())
}); });
@ -804,10 +794,10 @@ mod tests {
fn test_matches_keyword() { fn test_matches_keyword() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let (users, blogs) = blogTests::fill_database(&conn); let (users, blogs) = blogTests::fill_database(conn);
let gnu_post = Post::insert( let gnu_post = Post::insert(
&conn, conn,
NewPost { NewPost {
blog_id: blogs[0].id, blog_id: blogs[0].id,
slug: "slug".to_string(), slug: "slug".to_string(),
@ -825,61 +815,61 @@ mod tests {
.unwrap(); .unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Linux title".to_owned(), "Linux title".to_owned(),
"title contains Linux".to_owned(), "title contains Linux".to_owned(),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Microsoft title".to_owned(), "Microsoft title".to_owned(),
"title contains Microsoft".to_owned(), "title contains Microsoft".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Linux subtitle".to_owned(), "Linux subtitle".to_owned(),
"subtitle contains Stallman".to_owned(), "subtitle contains Stallman".to_owned(),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Microsoft subtitle".to_owned(), "Microsoft subtitle".to_owned(),
"subtitle contains Nadella".to_owned(), "subtitle contains Nadella".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Linux content".to_owned(), "Linux content".to_owned(),
"content contains Linux".to_owned(), "content contains Linux".to_owned(),
) )
.unwrap(); .unwrap();
assert!(tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
let tl = Timeline::new_for_user( let tl = Timeline::new_for_user(
&conn, conn,
users[0].id, users[0].id,
"Microsoft content".to_owned(), "Microsoft content".to_owned(),
"subtitle contains Windows".to_owned(), "subtitle contains Windows".to_owned(),
) )
.unwrap(); .unwrap();
assert!(!tl.matches(&conn, &gnu_post, Kind::Original).unwrap()); assert!(!tl.matches(conn, &gnu_post, Kind::Original).unwrap());
tl.delete(&conn).unwrap(); tl.delete(conn).unwrap();
Ok(()) Ok(())
}); });

View File

@ -10,7 +10,7 @@ use crate::{
use plume_common::activity_pub::inbox::AsActor; use plume_common::activity_pub::inbox::AsActor;
use whatlang::{self, Lang}; use whatlang::{self, Lang};
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum QueryError { pub enum QueryError {
SyntaxError(usize, usize, String), SyntaxError(usize, usize, String),
UnexpectedEndOfQuery, UnexpectedEndOfQuery,
@ -19,7 +19,7 @@ pub enum QueryError {
pub type QueryResult<T> = std::result::Result<T, QueryError>; pub type QueryResult<T> = std::result::Result<T, QueryError>;
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Kind<'a> { pub enum Kind<'a> {
Original, Original,
Reshare(&'a User), Reshare(&'a User),

View File

@ -187,7 +187,7 @@ impl User {
users::table users::table
.filter(users::instance_id.eq(Instance::get_local()?.id)) .filter(users::instance_id.eq(Instance::get_local()?.id))
.count() .count()
.get_result(&*conn) .get_result(conn)
.map_err(Error::from) .map_err(Error::from)
} }
@ -246,20 +246,7 @@ impl User {
fn fetch(url: &str) -> Result<CustomPerson> { fn fetch(url: &str) -> Result<CustomPerson> {
let res = get(url, Self::get_sender(), CONFIG.proxy().cloned())?; let res = get(url, Self::get_sender(), CONFIG.proxy().cloned())?;
let text = &res.text()?; let text = &res.text()?;
// without this workaround, publicKey is not correctly deserialized let json = serde_json::from_str::<CustomPerson>(text)?;
let ap_sign = serde_json::from_str::<ApSignature>(text)?;
let person = serde_json::from_str::<Person>(text)?;
let json = CustomPerson::new(
ApActor::new(
person
.clone()
.id_unchecked()
.ok_or(Error::MissingApProperty)?
.to_owned(),
person,
),
ap_sign,
); // FIXME: Don't clone()
Ok(json) Ok(json)
} }
@ -269,23 +256,13 @@ impl User {
pub fn refetch(&self, conn: &Connection) -> Result<()> { pub fn refetch(&self, conn: &Connection) -> Result<()> {
User::fetch(&self.ap_url.clone()).and_then(|json| { User::fetch(&self.ap_url.clone()).and_then(|json| {
let avatar = Media::save_remote( let avatar = json
conn,
json.ap_actor_ref()
.icon() .icon()
.ok_or(Error::MissingApProperty)? // FIXME: Fails when icon is not set .and_then(|icon| icon.iter().next())
.iter() .and_then(|i| i.clone().extend::<Image, ImageType>().ok())
.next() .and_then(|image| image)
.and_then(|i| { .and_then(|image| image.id_unchecked().map(|url| url.to_string()))
i.clone() .and_then(|url| Media::save_remote(conn, url, self).ok());
.extend::<Image, ImageType>() // FIXME: Don't clone()
.ok()?
.and_then(|url| Some(url.id_unchecked()?.to_string()))
})
.ok_or(Error::MissingApProperty)?,
self,
)
.ok();
let pub_key = &json.ext_one.public_key.public_key_pem; let pub_key = &json.ext_one.public_key.public_key_pem;
diesel::update(self) diesel::update(self)
@ -435,7 +412,7 @@ impl User {
} }
// if no user was found, and we were unable to auto-register from ldap // if no user was found, and we were unable to auto-register from ldap
// fake-verify a password, and return an error. // fake-verify a password, and return an error.
let other = User::get(&*conn, 1) let other = User::get(conn, 1)
.expect("No user is registered") .expect("No user is registered")
.hashed_password; .hashed_password;
other.map(|pass| bcrypt::verify(password, &pass)); other.map(|pass| bcrypt::verify(password, &pass));
@ -931,7 +908,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for User {
.cookies() .cookies()
.get_private(AUTH_COOKIE) .get_private(AUTH_COOKIE)
.and_then(|cookie| cookie.value().parse().ok()) .and_then(|cookie| cookie.value().parse().ok())
.and_then(|id| User::get(&*conn, id).ok()) .and_then(|id| User::get(&conn, id).ok())
.or_forward(()) .or_forward(())
} }
} }
@ -960,6 +937,10 @@ impl FromId<Connection> for User {
.to_string(); .to_string();
if username.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) { if username.contains(&['<', '>', '&', '@', '\'', '"', ' ', '\t'][..]) {
tracing::error!(
"preferredUsername includes invalid character(s): {}",
&username
);
return Err(Error::InvalidValue); return Err(Error::InvalidValue);
} }
@ -1232,7 +1213,7 @@ pub(crate) mod tests {
) )
.unwrap(); .unwrap();
other.avatar_id = Some(avatar.id); other.avatar_id = Some(avatar.id);
let other = other.save_changes::<User>(&*conn).unwrap(); let other = other.save_changes::<User>(conn).unwrap();
vec![admin, user, other] vec![admin, user, other]
} }
@ -1335,11 +1316,11 @@ pub(crate) mod tests {
fn delete() { fn delete() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let inserted = fill_database(&conn); let inserted = fill_database(conn);
assert!(User::get(&conn, inserted[0].id).is_ok()); assert!(User::get(conn, inserted[0].id).is_ok());
inserted[0].delete(&conn).unwrap(); inserted[0].delete(conn).unwrap();
assert!(User::get(&conn, inserted[0].id).is_err()); assert!(User::get(conn, inserted[0].id).is_err());
Ok(()) Ok(())
}); });
} }
@ -1348,20 +1329,20 @@ pub(crate) mod tests {
fn admin() { fn admin() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
let inserted = fill_database(&conn); let inserted = fill_database(conn);
let local_inst = Instance::get_local().unwrap(); let local_inst = Instance::get_local().unwrap();
let mut i = 0; let mut i = 0;
while local_inst.has_admin(&conn).unwrap() { while local_inst.has_admin(conn).unwrap() {
assert!(i < 100); //prevent from looping indefinitelly assert!(i < 100); //prevent from looping indefinitelly
local_inst local_inst
.main_admin(&conn) .main_admin(conn)
.unwrap() .unwrap()
.set_role(&conn, Role::Normal) .set_role(conn, Role::Normal)
.unwrap(); .unwrap();
i += 1; i += 1;
} }
inserted[0].set_role(&conn, Role::Admin).unwrap(); inserted[0].set_role(conn, Role::Admin).unwrap();
assert_eq!(inserted[0].id, local_inst.main_admin(&conn).unwrap().id); assert_eq!(inserted[0].id, local_inst.main_admin(conn).unwrap().id);
Ok(()) Ok(())
}); });
} }
@ -1370,9 +1351,9 @@ pub(crate) mod tests {
fn auth() { fn auth() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
fill_database(&conn); fill_database(conn);
let test_user = NewUser::new_local( let test_user = NewUser::new_local(
&conn, conn,
"test".to_owned(), "test".to_owned(),
"test user".to_owned(), "test user".to_owned(),
Role::Normal, Role::Normal,
@ -1383,10 +1364,10 @@ pub(crate) mod tests {
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
User::login(&conn, "test", "test_password").unwrap().id, User::login(conn, "test", "test_password").unwrap().id,
test_user.id test_user.id
); );
assert!(User::login(&conn, "test", "other_password").is_err()); assert!(User::login(conn, "test", "other_password").is_err());
Ok(()) Ok(())
}); });
} }
@ -1395,26 +1376,26 @@ pub(crate) mod tests {
fn get_local_page() { fn get_local_page() {
let conn = &db(); let conn = &db();
conn.test_transaction::<_, (), _>(|| { conn.test_transaction::<_, (), _>(|| {
fill_database(&conn); fill_database(conn);
let page = User::get_local_page(&conn, (0, 2)).unwrap(); let page = User::get_local_page(conn, (0, 2)).unwrap();
assert_eq!(page.len(), 2); assert_eq!(page.len(), 2);
assert!(page[0].username <= page[1].username); assert!(page[0].username <= page[1].username);
let mut last_username = User::get_local_page(&conn, (0, 1)).unwrap()[0] let mut last_username = User::get_local_page(conn, (0, 1)).unwrap()[0]
.username .username
.clone(); .clone();
for i in 1..User::count_local(&conn).unwrap() as i32 { for i in 1..User::count_local(conn).unwrap() as i32 {
let page = User::get_local_page(&conn, (i, i + 1)).unwrap(); let page = User::get_local_page(conn, (i, i + 1)).unwrap();
assert_eq!(page.len(), 1); assert_eq!(page.len(), 1);
assert!(last_username <= page[0].username); assert!(last_username <= page[0].username);
last_username = page[0].username.clone(); last_username = page[0].username.clone();
} }
assert_eq!( assert_eq!(
User::get_local_page(&conn, (0, User::count_local(&conn).unwrap() as i32 + 10)) User::get_local_page(conn, (0, User::count_local(conn).unwrap() as i32 + 10))
.unwrap() .unwrap()
.len() as i64, .len() as i64,
User::count_local(&conn).unwrap() User::count_local(conn).unwrap()
); );
Ok(()) Ok(())
}); });

View File

@ -1 +1 @@
nightly-2022-01-27 nightly-2022-07-19

View File

@ -3,4 +3,4 @@ set -euo pipefail
version="$1" version="$1"
docker run --rm -v $PWD:/repo -v $PWD/pkg:/pkg -v $PWD/script/prebuild.sh:/prebuild.sh plumeorg/plume-buildenv:v0.4.0 /prebuild.sh "$version" /repo /prebuild /pkg docker run --rm -v $PWD:/repo -v $PWD/pkg:/pkg -v $PWD/script/prebuild.sh:/prebuild.sh plumeorg/plume-buildenv:v0.7.0 /prebuild.sh "$version" /repo /prebuild /pkg

View File

@ -10,7 +10,7 @@ plm instance new -d plume-test.local -n plume-test
plm users new -n admin -N 'Admin' -e 'email@exemple.com' -p 'password' plm users new -n admin -N 'Admin' -e 'email@exemple.com' -p 'password'
plume & plume &
caddy run -config /Caddyfile & caddy run --config /Caddyfile &
until curl http://localhost:7878/test/health -f; do sleep 1; done 2>/dev/null >/dev/null until curl http://localhost:7878/test/health -f; do sleep 1; done 2>/dev/null >/dev/null

View File

@ -10,7 +10,7 @@ pub fn create(conn: DbConn, data: Json<NewAppData>) -> Api<App> {
let client_id = random_hex(); let client_id = random_hex();
let client_secret = random_hex(); let client_secret = random_hex();
let app = App::insert( let app = App::insert(
&*conn, &conn,
NewApp { NewApp {
name: data.name.clone(), name: data.name.clone(),
client_id, client_id,

View File

@ -160,7 +160,7 @@ pub fn delete(name: String, conn: DbConn, rockets: PlumeRocket) -> RespondOrRedi
.and_then(|u| u.is_author_in(&conn, &blog).ok()) .and_then(|u| u.is_author_in(&conn, &blog).ok())
.unwrap_or(false) .unwrap_or(false)
{ {
blog.delete(&*conn).expect("blog::expect: deletion error"); blog.delete(&conn).expect("blog::expect: deletion error");
Flash::success( Flash::success(
Redirect::to(uri!(super::instance::index)), Redirect::to(uri!(super::instance::index)),
i18n!(rockets.intl.catalog, "Your blog was deleted."), i18n!(rockets.intl.catalog, "Your blog was deleted."),
@ -364,7 +364,7 @@ pub fn outbox_page(
#[get("/~/<name>/atom.xml")] #[get("/~/<name>/atom.xml")]
pub fn atom_feed(name: String, conn: DbConn) -> Option<Content<String>> { pub fn atom_feed(name: String, conn: DbConn) -> Option<Content<String>> {
let blog = Blog::find_by_fqn(&conn, &name).ok()?; let blog = Blog::find_by_fqn(&conn, &name).ok()?;
let entries = Post::get_recents_for_blog(&*conn, &blog, 15).ok()?; let entries = Post::get_recents_for_blog(&conn, &blog, 15).ok()?;
let uri = Instance::get_local() let uri = Instance::get_local()
.ok()? .ok()?
.compute_box("~", &name, "atom.xml"); .compute_box("~", &name, "atom.xml");
@ -454,29 +454,33 @@ mod tests {
long_description_html: "<p>Good morning</p>".to_string(), long_description_html: "<p>Good morning</p>".to_string(),
short_description: SafeString::new("Hello"), short_description: SafeString::new("Hello"),
short_description_html: "<p>Hello</p>".to_string(), short_description_html: "<p>Hello</p>".to_string(),
name: random_hex().to_string(), name: random_hex(),
open_registrations: true, open_registrations: true,
public_domain: random_hex().to_string(), public_domain: random_hex(),
}, },
) )
.unwrap(); .unwrap();
Instance::cache_local(conn); Instance::cache_local(conn);
instance instance
}); });
let mut user = NewUser::default(); let user = NewUser {
user.instance_id = instance.id; instance_id: instance.id,
user.username = random_hex().to_string(); username: random_hex(),
user.ap_url = random_hex().to_string(); ap_url: random_hex(),
user.inbox_url = random_hex().to_string(); inbox_url: random_hex(),
user.outbox_url = random_hex().to_string(); outbox_url: random_hex(),
user.followers_endpoint = random_hex().to_string(); followers_endpoint: random_hex(),
..Default::default()
};
let user = User::insert(conn, user).unwrap(); let user = User::insert(conn, user).unwrap();
let mut blog = NewBlog::default(); let blog = NewBlog {
blog.instance_id = instance.id; instance_id: instance.id,
blog.actor_id = random_hex().to_string(); actor_id: random_hex(),
blog.ap_url = random_hex().to_string(); ap_url: random_hex(),
blog.inbox_url = random_hex().to_string(); inbox_url: random_hex(),
blog.outbox_url = random_hex().to_string(); outbox_url: random_hex(),
..Default::default()
};
let blog = Blog::insert(conn, blog).unwrap(); let blog = Blog::insert(conn, blog).unwrap();
BlogAuthor::insert( BlogAuthor::insert(
conn, conn,

View File

@ -125,7 +125,7 @@ pub fn create(
user.has_reshared(&conn, &post) user.has_reshared(&conn, &post)
.expect("comments::create: reshared error"), .expect("comments::create: reshared error"),
user.is_following( user.is_following(
&*conn, &conn,
post.get_authors(&conn) post.get_authors(&conn)
.expect("comments::create: authors error")[0] .expect("comments::create: authors error")[0]
.id .id

View File

@ -3,6 +3,7 @@ use crate::{
routes::{errors::ErrorPage, RespondOrRedirect}, routes::{errors::ErrorPage, RespondOrRedirect},
template_utils::{IntoContext, Ructe}, template_utils::{IntoContext, Ructe},
}; };
use plume_models::{ use plume_models::{
db_conn::DbConn, email_signups::EmailSignup, instance::Instance, lettre::Transport, signups, db_conn::DbConn, email_signups::EmailSignup, instance::Instance, lettre::Transport, signups,
Error, PlumeRocket, CONFIG, Error, PlumeRocket, CONFIG,
@ -13,7 +14,11 @@ use rocket::{
response::{Flash, Redirect}, response::{Flash, Redirect},
State, State,
}; };
use std::sync::{Arc, Mutex}; use std::{
borrow::Cow,
collections::HashMap,
sync::{Arc, Mutex},
};
use tracing::warn; use tracing::warn;
use validator::{Validate, ValidationError, ValidationErrors}; use validator::{Validate, ValidationError, ValidationErrors};
@ -105,6 +110,26 @@ pub fn create(
render!(email_signups::create(&(&conn, &rockets).to_context())).into() render!(email_signups::create(&(&conn, &rockets).to_context())).into()
} }
Error::NotFound => render!(errors::not_found(&(&conn, &rockets).to_context())).into(), Error::NotFound => render!(errors::not_found(&(&conn, &rockets).to_context())).into(),
Error::Blocklisted(show, msg) => {
let mut errors = ValidationErrors::new();
if *show {
errors.add(
"email",
ValidationError {
code: Cow::from("blocklisted"),
message: Some(Cow::from(msg.clone())),
params: HashMap::new(),
},
);
}
render!(email_signups::new(
&(&conn, &rockets).to_context(),
registration_open,
&form,
errors
))
.into()
}
_ => render!(errors::not_found(&(&conn, &rockets).to_context())).into(), // FIXME _ => render!(errors::not_found(&(&conn, &rockets).to_context())).into(), // FIXME
}); });
} }
@ -153,6 +178,28 @@ pub fn show(
))) )))
} // TODO: Flash and redirect } // TODO: Flash and redirect
Error::NotFound => return Err(Error::NotFound.into()), Error::NotFound => return Err(Error::NotFound.into()),
Error::Blocklisted(show, msg) => {
let mut errors = ValidationErrors::new();
if show {
errors.add(
"email",
ValidationError {
code: Cow::from("blocklisted"),
message: Some(Cow::from(msg)),
params: HashMap::new(),
},
);
}
return Ok(render!(email_signups::new(
&(&conn, &rockets).to_context(),
Instance::get_local()?.open_registrations,
&EmailSignupForm {
email: signup.email.clone(),
email_confirmation: signup.email
},
errors
)));
}
_ => return Err(Error::NotFound.into()), // FIXME _ => return Err(Error::NotFound.into()), // FIXME
} }
} }
@ -207,12 +254,38 @@ pub fn signup(
err err
)))); ))));
} }
let _user = signup let user = signup.complete(&conn, form.username.clone(), form.password.clone());
.complete(&conn, form.username.clone(), form.password.clone()) match user {
.map_err(|e| { Err(Error::Blocklisted(show, msg)) => {
let instance = Instance::get_local().map_err(|_| Status::UnprocessableEntity)?;
let mut errors = ValidationErrors::new();
if show {
errors.add(
"email",
ValidationError {
code: Cow::from("blocklisted"),
message: Some(Cow::from(msg)),
params: HashMap::new(),
},
);
}
return Ok(render!(email_signups::new(
&(&conn, &rockets).to_context(),
instance.open_registrations,
&EmailSignupForm {
email: signup.email.clone(),
email_confirmation: signup.email
},
errors
))
.into());
}
Err(e) => {
warn!("{:?}", e); warn!("{:?}", e);
Status::UnprocessableEntity return Err(Status::UnprocessableEntity);
})?; }
_ => {}
}
Ok(FlashRedirect(Flash::success( Ok(FlashRedirect(Flash::success(
Redirect::to(uri!(super::session::new: m = _)), Redirect::to(uri!(super::session::new: m = _)),
i18n!( i18n!(

View File

@ -105,7 +105,7 @@ pub fn update_settings(
Instance::get_local().expect("instance::update_settings: local instance error"); Instance::get_local().expect("instance::update_settings: local instance error");
instance instance
.update( .update(
&*conn, &conn,
form.name.clone(), form.name.clone(),
form.open_registrations, form.open_registrations,
form.short_description.clone(), form.short_description.clone(),
@ -366,8 +366,8 @@ pub fn edit_users(
} }
fn ban(id: i32, conn: &Connection, worker: &ScheduledThreadPool) -> Result<(), ErrorPage> { fn ban(id: i32, conn: &Connection, worker: &ScheduledThreadPool) -> Result<(), ErrorPage> {
let u = User::get(&*conn, id)?; let u = User::get(conn, id)?;
u.delete(&*conn)?; u.delete(conn)?;
if Instance::get_local() if Instance::get_local()
.map(|i| u.instance_id == i.id) .map(|i| u.instance_id == i.id)
.unwrap_or(false) .unwrap_or(false)
@ -382,8 +382,8 @@ fn ban(id: i32, conn: &Connection, worker: &ScheduledThreadPool) -> Result<(), E
}, },
) )
.unwrap(); .unwrap();
let target = User::one_by_instance(&*conn)?; let target = User::one_by_instance(conn)?;
let delete_act = u.delete_activity(&*conn)?; let delete_act = u.delete_activity(conn)?;
worker.execute(move || broadcast(&u, delete_act, target, CONFIG.proxy().cloned())); worker.execute(move || broadcast(&u, delete_act, target, CONFIG.proxy().cloned()));
} }

View File

@ -20,14 +20,14 @@ pub fn create(
let b = Blog::find_by_fqn(&conn, &blog)?; let b = Blog::find_by_fqn(&conn, &blog)?;
let post = Post::find_by_slug(&conn, &slug, b.id)?; let post = Post::find_by_slug(&conn, &slug, b.id)?;
if !user.has_liked(&*conn, &post)? { if !user.has_liked(&conn, &post)? {
let like = likes::Like::insert(&*conn, likes::NewLike::new(&post, &user))?; let like = likes::Like::insert(&conn, likes::NewLike::new(&post, &user))?;
like.notify(&*conn)?; like.notify(&conn)?;
Timeline::add_to_all_timelines(&conn, &post, Kind::Like(&user))?; Timeline::add_to_all_timelines(&conn, &post, Kind::Like(&user))?;
let dest = User::one_by_instance(&*conn)?; let dest = User::one_by_instance(&conn)?;
let act = like.to_activity(&*conn)?; let act = like.to_activity(&conn)?;
rockets rockets
.worker .worker
.execute(move || broadcast(&user, act, dest, CONFIG.proxy().cloned())); .execute(move || broadcast(&user, act, dest, CONFIG.proxy().cloned()));

View File

@ -145,9 +145,9 @@ pub fn details(
#[post("/medias/<id>/delete")] #[post("/medias/<id>/delete")]
pub fn delete(id: i32, user: User, conn: DbConn, intl: I18n) -> Result<Flash<Redirect>, ErrorPage> { pub fn delete(id: i32, user: User, conn: DbConn, intl: I18n) -> Result<Flash<Redirect>, ErrorPage> {
let media = Media::get(&*conn, id)?; let media = Media::get(&conn, id)?;
if media.owner_id == user.id { if media.owner_id == user.id {
media.delete(&*conn)?; media.delete(&conn)?;
Ok(Flash::success( Ok(Flash::success(
Redirect::to(uri!(list: page = _)), Redirect::to(uri!(list: page = _)),
i18n!(intl.catalog, "Your media have been deleted."), i18n!(intl.catalog, "Your media have been deleted."),
@ -167,9 +167,9 @@ pub fn set_avatar(
conn: DbConn, conn: DbConn,
intl: I18n, intl: I18n,
) -> Result<Flash<Redirect>, ErrorPage> { ) -> Result<Flash<Redirect>, ErrorPage> {
let media = Media::get(&*conn, id)?; let media = Media::get(&conn, id)?;
if media.owner_id == user.id { if media.owner_id == user.id {
user.set_avatar(&*conn, media.id)?; user.set_avatar(&conn, media.id)?;
Ok(Flash::success( Ok(Flash::success(
Redirect::to(uri!(details: id = id)), Redirect::to(uri!(details: id = id)),
i18n!(intl.catalog, "Your avatar has been updated."), i18n!(intl.catalog, "Your avatar has been updated."),

View File

@ -165,7 +165,7 @@ fn post_to_atom(post: Post, conn: &Connection) -> Entry {
.build(), .build(),
) )
.authors( .authors(
post.get_authors(&*conn) post.get_authors(conn)
.expect("Atom feed: author error") .expect("Atom feed: author error")
.into_iter() .into_iter()
.map(|a| { .map(|a| {

View File

@ -65,7 +65,7 @@ pub fn search(query: Option<Form<SearchQuery>>, conn: DbConn, rockets: PlumeRock
if str_query.is_empty() { if str_query.is_empty() {
render!(search::index( render!(search::index(
&(&conn, &rockets).to_context(), &(&conn, &rockets).to_context(),
&format!("{}", Utc::today().format("%Y-%m-d")) &format!("{}", Utc::now().date_naive().format("%Y-%m-d"))
)) ))
} else { } else {
let res = rockets let res = rockets

View File

@ -48,10 +48,10 @@ pub fn me(user: Option<User>) -> RespondOrRedirect {
#[get("/@/<name>", rank = 2)] #[get("/@/<name>", rank = 2)]
pub fn details(name: String, rockets: PlumeRocket, conn: DbConn) -> Result<Ructe, ErrorPage> { pub fn details(name: String, rockets: PlumeRocket, conn: DbConn) -> Result<Ructe, ErrorPage> {
let user = User::find_by_fqn(&conn, &name)?; let user = User::find_by_fqn(&conn, &name)?;
let recents = Post::get_recents_for_author(&*conn, &user, 6)?; let recents = Post::get_recents_for_author(&conn, &user, 6)?;
let reshares = Reshare::get_recents_for_author(&*conn, &user, 6)?; let reshares = Reshare::get_recents_for_author(&conn, &user, 6)?;
if !user.get_instance(&*conn)?.local { if !user.get_instance(&conn)?.local {
tracing::trace!("remote user found"); tracing::trace!("remote user found");
user.remote_user_found(); // Doesn't block user.remote_user_found(); // Doesn't block
} }
@ -62,14 +62,14 @@ pub fn details(name: String, rockets: PlumeRocket, conn: DbConn) -> Result<Ructe
rockets rockets
.user .user
.clone() .clone()
.and_then(|x| x.is_following(&*conn, user.id).ok()) .and_then(|x| x.is_following(&conn, user.id).ok())
.unwrap_or(false), .unwrap_or(false),
user.instance_id != Instance::get_local()?.id, user.instance_id != Instance::get_local()?.id,
user.get_instance(&*conn)?.public_domain, user.get_instance(&conn)?.public_domain,
recents, recents,
reshares reshares
.into_iter() .into_iter()
.filter_map(|r| r.get_post(&*conn).ok()) .filter_map(|r| r.get_post(&conn).ok())
.collect() .collect()
))) )))
} }

View File

@ -50,10 +50,10 @@ impl Resolver<DbConn> for WebfingerResolver {
fn find(prefix: Prefix, acct: String, conn: DbConn) -> Result<Webfinger, ResolverError> { fn find(prefix: Prefix, acct: String, conn: DbConn) -> Result<Webfinger, ResolverError> {
match prefix { match prefix {
Prefix::Acct => User::find_by_fqn(&conn, &acct) Prefix::Acct => User::find_by_fqn(&conn, &acct)
.and_then(|usr| usr.webfinger(&*conn)) .and_then(|usr| usr.webfinger(&conn))
.or(Err(ResolverError::NotFound)), .or(Err(ResolverError::NotFound)),
Prefix::Group => Blog::find_by_fqn(&conn, &acct) Prefix::Group => Blog::find_by_fqn(&conn, &acct)
.and_then(|blog| blog.webfinger(&*conn)) .and_then(|blog| blog.webfinger(&conn))
.or(Err(ResolverError::NotFound)), .or(Err(ResolverError::NotFound)),
Prefix::Custom(_) => Err(ResolverError::NotFound), Prefix::Custom(_) => Err(ResolverError::NotFound),
} }

View File

@ -85,7 +85,7 @@ impl<'r> Responder<'r> for Ructe {
macro_rules! render { macro_rules! render {
($group:tt :: $page:tt ( $( $param:expr ),* ) ) => { ($group:tt :: $page:tt ( $( $param:expr ),* ) ) => {
{ {
use crate::templates; use $crate::templates;
let mut res = vec![]; let mut res = vec![];
templates::$group::$page( templates::$group::$page(