mirror of
https://github.com/LemmyNet/lemmy.git
synced 2024-11-22 06:36:14 +00:00
Merge branch 'move_views_to_diesel' into remove_travis_and_federation_docker
This commit is contained in:
commit
edf0fd4381
85
.drone.yml
85
.drone.yml
|
@ -1,5 +1,10 @@
|
||||||
|
---
|
||||||
kind: pipeline
|
kind: pipeline
|
||||||
name: default
|
name: amd64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: fetch git submodules
|
- name: fetch git submodules
|
||||||
|
@ -54,21 +59,12 @@ steps:
|
||||||
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
|
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
|
||||||
DO_WRITE_HOSTS_FILE: 1
|
DO_WRITE_HOSTS_FILE: 1
|
||||||
commands:
|
commands:
|
||||||
- ls -la target/lemmy_server
|
|
||||||
- apk add bash curl postgresql-client
|
- apk add bash curl postgresql-client
|
||||||
- bash api_tests/prepare-drone-federation-test.sh
|
- bash api_tests/prepare-drone-federation-test.sh
|
||||||
- cd api_tests/
|
- cd api_tests/
|
||||||
- yarn
|
- yarn
|
||||||
- yarn api-test
|
- yarn api-test
|
||||||
|
|
||||||
- name: create docker tags
|
|
||||||
image: ekidd/rust-musl-builder:1.47.0
|
|
||||||
commands:
|
|
||||||
- echo "$(git describe),latest" > .tags
|
|
||||||
when:
|
|
||||||
ref:
|
|
||||||
- refs/tags/*
|
|
||||||
|
|
||||||
- name: make release build and push to docker hub
|
- name: make release build and push to docker hub
|
||||||
image: plugins/docker
|
image: plugins/docker
|
||||||
settings:
|
settings:
|
||||||
|
@ -90,6 +86,69 @@ services:
|
||||||
POSTGRES_USER: lemmy
|
POSTGRES_USER: lemmy
|
||||||
POSTGRES_PASSWORD: password
|
POSTGRES_PASSWORD: password
|
||||||
|
|
||||||
volumes:
|
---
|
||||||
- name: dieselcli
|
kind: pipeline
|
||||||
temp: {}
|
name: arm64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: cargo test
|
||||||
|
image: rust:1.47-slim-buster
|
||||||
|
environment:
|
||||||
|
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432/lemmy
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
RUST_TEST_THREADS: 1
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get -y install --no-install-recommends espeak postgresql-client libssl-dev pkg-config libpq-dev
|
||||||
|
- cargo test --workspace --no-fail-fast
|
||||||
|
- cargo build
|
||||||
|
|
||||||
|
# Using Debian here because there seems to be no official Alpine-based Rust docker image for ARM.
|
||||||
|
- name: cargo build
|
||||||
|
image: rust:1.47-slim-buster
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev
|
||||||
|
- cargo build
|
||||||
|
- mv target/debug/lemmy_server target/lemmy_server
|
||||||
|
|
||||||
|
- name: run federation tests
|
||||||
|
image: node:15-buster-slim
|
||||||
|
environment:
|
||||||
|
LEMMY_DATABASE_URL: postgres://lemmy:password@database:5432
|
||||||
|
DO_WRITE_HOSTS_FILE: 1
|
||||||
|
commands:
|
||||||
|
- mkdir -p /usr/share/man/man1 /usr/share/man/man7
|
||||||
|
- apt-get update
|
||||||
|
- apt-get -y install --no-install-recommends bash curl libssl-dev pkg-config libpq-dev postgresql-client libc6-dev
|
||||||
|
- bash api_tests/prepare-drone-federation-test.sh
|
||||||
|
- cd api_tests/
|
||||||
|
- yarn
|
||||||
|
- yarn api-test
|
||||||
|
|
||||||
|
- name: make release build and push to docker hub
|
||||||
|
image: plugins/docker
|
||||||
|
settings:
|
||||||
|
dockerfile: docker/prod/Dockerfile.arm
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: dessalines/lemmy
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: arm64
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/tags/*
|
||||||
|
|
||||||
|
services:
|
||||||
|
- name: database
|
||||||
|
image: postgres:12-alpine
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: lemmy
|
||||||
|
POSTGRES_PASSWORD: password
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
# Code of Conduct
|
|
||||||
|
|
||||||
- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
|
|
||||||
- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
|
|
||||||
- Please be kind and courteous. There’s no need to be mean or rude.
|
|
||||||
- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
|
|
||||||
- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
|
|
||||||
- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the Citizen Code of Conduct; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don’t tolerate behavior that excludes people in socially marginalized groups.
|
|
||||||
- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or any of the Lemmy moderation team immediately. Whether you’re a regular contributor or a newcomer, we care about making this community a safe place for you and we’ve got your back.
|
|
||||||
- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
|
|
||||||
|
|
||||||
[**Message the Moderation Team on Mastodon**](https://mastodon.social/@LemmyDev)
|
|
||||||
|
|
||||||
[**Email The Moderation Team**](mailto:contact@lemmy.ml)
|
|
||||||
|
|
||||||
## Moderation
|
|
||||||
|
|
||||||
These are the policies for upholding our community’s standards of conduct. If you feel that a thread needs moderation, please contact the Lemmy moderation team .
|
|
||||||
|
|
||||||
1. Remarks that violate the Lemmy standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.)
|
|
||||||
2. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed.
|
|
||||||
3. Moderators will first respond to such remarks with a warning, at the same time the offending content will likely be removed whenever possible.
|
|
||||||
4. If the warning is unheeded, the user will be “kicked,” i.e., kicked out of the communication channel to cool off.
|
|
||||||
5. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded.
|
|
||||||
6. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology.
|
|
||||||
7. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, in private. Complaints about bans in-channel are not allowed.
|
|
||||||
8. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others.
|
|
||||||
|
|
||||||
In the Lemmy community we strive to go the extra step to look out for each other. Don’t just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they’re off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely.
|
|
||||||
|
|
||||||
And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could’ve communicated better — remember that it’s your responsibility to make others comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust.
|
|
||||||
|
|
||||||
The enforcement policies listed above apply to all official Lemmy venues; including git repositories under [github.com/LemmyNet/lemmy](https://github.com/LemmyNet/lemmy) and [yerbamate.ml/LemmyNet/lemmy](https://yerbamate.ml/LemmyNet/lemmy), the [Matrix channel](https://matrix.to/#/!BZVTUuEiNmRcbFeLeI:matrix.org?via=matrix.org&via=privacytools.io&via=permaweb.io); and all instances under lemmy.ml. For other projects adopting the Rust Code of Conduct, please contact the maintainers of those projects for enforcement. If you wish to use this code of conduct for your own project, consider explicitly mentioning your moderation policy or making a copy with your own moderation policy so as to avoid confusion.
|
|
||||||
|
|
||||||
Adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct), which is based on the [Node.js Policy on Trolling](http://blog.izs.me/post/30036893703/policy-on-trolling) as well as the [Contributor Covenant v1.3.0](https://www.contributor-covenant.org/version/1/3/0/).
|
|
|
@ -27,6 +27,8 @@
|
||||||
<a href="https://github.com/LemmyNet/lemmy/issues">Request Feature</a>
|
<a href="https://github.com/LemmyNet/lemmy/issues">Request Feature</a>
|
||||||
·
|
·
|
||||||
<a href="https://github.com/LemmyNet/lemmy/blob/main/RELEASES.md">Releases</a>
|
<a href="https://github.com/LemmyNet/lemmy/blob/main/RELEASES.md">Releases</a>
|
||||||
|
·
|
||||||
|
<a href="https://lemmy.ml/docs/en/code_of_conduct.html">Code of Conduct</a>
|
||||||
</p>
|
</p>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
7
clean.sh
7
clean.sh
|
@ -1,7 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
cargo update
|
|
||||||
cargo fmt
|
|
||||||
cargo check
|
|
||||||
cargo clippy
|
|
||||||
cargo outdated -R
|
|
48
docker/prod/Dockerfile.arm
Normal file
48
docker/prod/Dockerfile.arm
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
ARG RUST_BUILDER_IMAGE=rust:1.47-slim-buster
|
||||||
|
|
||||||
|
# Build Lemmy
|
||||||
|
FROM $RUST_BUILDER_IMAGE as builder
|
||||||
|
|
||||||
|
# Install compilation dependencies
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get -y install --no-install-recommends libssl-dev pkg-config libpq-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY ./ ./
|
||||||
|
|
||||||
|
RUN cargo build --release
|
||||||
|
|
||||||
|
# reduce binary size
|
||||||
|
RUN strip ./target/release/lemmy_server
|
||||||
|
|
||||||
|
RUN cp ./target/release/lemmy_server /app/lemmy_server
|
||||||
|
|
||||||
|
# Build the docs
|
||||||
|
FROM $RUST_BUILDER_IMAGE as docs
|
||||||
|
WORKDIR /app
|
||||||
|
RUN cargo install --debug mdbook
|
||||||
|
COPY docs ./docs
|
||||||
|
RUN mdbook build docs/
|
||||||
|
|
||||||
|
# The Debian runner
|
||||||
|
FROM debian:buster-slim as lemmy
|
||||||
|
|
||||||
|
# Install libpq for postgres and espeak for captchas
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get -y install --no-install-recommends espeak postgresql-client libc6 libssl1.1 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN addgroup --gid 1000 lemmy
|
||||||
|
RUN adduser --no-create-home --shell /bin/sh --uid 1000 --gid 1000 lemmy
|
||||||
|
|
||||||
|
# Copy resources
|
||||||
|
COPY --chown=lemmy:lemmy config/defaults.hjson /config/defaults.hjson
|
||||||
|
COPY --chown=lemmy:lemmy --from=builder /app/lemmy_server /app/lemmy
|
||||||
|
COPY --chown=lemmy:lemmy --from=docs /app/docs/book/ /app/documentation/
|
||||||
|
|
||||||
|
RUN chown lemmy:lemmy /app/lemmy
|
||||||
|
USER lemmy
|
||||||
|
EXPOSE 8536
|
||||||
|
CMD ["/app/lemmy"]
|
|
@ -57,17 +57,14 @@ impl ApubObjectType for Comment {
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
|
|
||||||
let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
|
let maa = collect_non_local_mentions(&self, &community, context).await?;
|
||||||
let mut ccs = vec![community.actor_id()?];
|
|
||||||
ccs.append(&mut maa.addressed_ccs);
|
|
||||||
ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
|
|
||||||
|
|
||||||
let mut create = Create::new(creator.actor_id.to_owned(), note.into_any_base()?);
|
let mut create = Create::new(creator.actor_id.to_owned(), note.into_any_base()?);
|
||||||
create
|
create
|
||||||
.set_many_contexts(lemmy_context()?)
|
.set_many_contexts(lemmy_context()?)
|
||||||
.set_id(generate_activity_id(CreateType::Create)?)
|
.set_id(generate_activity_id(CreateType::Create)?)
|
||||||
.set_to(public())
|
.set_to(public())
|
||||||
.set_many_ccs(ccs)
|
.set_many_ccs(maa.ccs.to_owned())
|
||||||
// Set the mention tags
|
// Set the mention tags
|
||||||
.set_many_tags(maa.get_tags()?);
|
.set_many_tags(maa.get_tags()?);
|
||||||
|
|
||||||
|
@ -90,17 +87,14 @@ impl ApubObjectType for Comment {
|
||||||
})
|
})
|
||||||
.await??;
|
.await??;
|
||||||
|
|
||||||
let mut maa = collect_non_local_mentions_and_addresses(&self.content, context).await?;
|
let maa = collect_non_local_mentions(&self, &community, context).await?;
|
||||||
let mut ccs = vec![community.actor_id()?];
|
|
||||||
ccs.append(&mut maa.addressed_ccs);
|
|
||||||
ccs.push(get_comment_parent_creator_id(context.pool(), &self).await?);
|
|
||||||
|
|
||||||
let mut update = Update::new(creator.actor_id.to_owned(), note.into_any_base()?);
|
let mut update = Update::new(creator.actor_id.to_owned(), note.into_any_base()?);
|
||||||
update
|
update
|
||||||
.set_many_contexts(lemmy_context()?)
|
.set_many_contexts(lemmy_context()?)
|
||||||
.set_id(generate_activity_id(UpdateType::Update)?)
|
.set_id(generate_activity_id(UpdateType::Update)?)
|
||||||
.set_to(public())
|
.set_to(public())
|
||||||
.set_many_ccs(ccs)
|
.set_many_ccs(maa.ccs.to_owned())
|
||||||
// Set the mention tags
|
// Set the mention tags
|
||||||
.set_many_tags(maa.get_tags()?);
|
.set_many_tags(maa.get_tags()?);
|
||||||
|
|
||||||
|
@ -295,7 +289,7 @@ impl ApubLikeableType for Comment {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct MentionsAndAddresses {
|
struct MentionsAndAddresses {
|
||||||
addressed_ccs: Vec<Url>,
|
ccs: Vec<Url>,
|
||||||
inboxes: Vec<Url>,
|
inboxes: Vec<Url>,
|
||||||
tags: Vec<Mention>,
|
tags: Vec<Mention>,
|
||||||
}
|
}
|
||||||
|
@ -313,23 +307,26 @@ impl MentionsAndAddresses {
|
||||||
/// This takes a comment, and builds a list of to_addresses, inboxes,
|
/// This takes a comment, and builds a list of to_addresses, inboxes,
|
||||||
/// and mention tags, so they know where to be sent to.
|
/// and mention tags, so they know where to be sent to.
|
||||||
/// Addresses are the users / addresses that go in the cc field.
|
/// Addresses are the users / addresses that go in the cc field.
|
||||||
async fn collect_non_local_mentions_and_addresses(
|
async fn collect_non_local_mentions(
|
||||||
content: &str,
|
comment: &Comment,
|
||||||
|
community: &Community,
|
||||||
context: &LemmyContext,
|
context: &LemmyContext,
|
||||||
) -> Result<MentionsAndAddresses, LemmyError> {
|
) -> Result<MentionsAndAddresses, LemmyError> {
|
||||||
let mut addressed_ccs = vec![];
|
let parent_creator = get_comment_parent_creator(context.pool(), comment).await?;
|
||||||
|
let mut addressed_ccs = vec![community.actor_id()?, parent_creator.actor_id()?];
|
||||||
|
// Note: dont include community inbox here, as we send to it separately with `send_to_community()`
|
||||||
|
let mut inboxes = vec![parent_creator.get_shared_inbox_url()?];
|
||||||
|
|
||||||
// Add the mention tag
|
// Add the mention tag
|
||||||
let mut tags = Vec::new();
|
let mut tags = Vec::new();
|
||||||
|
|
||||||
// Get the inboxes for any mentions
|
// Get the user IDs for any mentions
|
||||||
let mentions = scrape_text_for_mentions(&content)
|
let mentions = scrape_text_for_mentions(&comment.content)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// Filter only the non-local ones
|
// Filter only the non-local ones
|
||||||
.filter(|m| !m.is_local())
|
.filter(|m| !m.is_local())
|
||||||
.collect::<Vec<MentionData>>();
|
.collect::<Vec<MentionData>>();
|
||||||
|
|
||||||
let mut mention_inboxes: Vec<Url> = Vec::new();
|
|
||||||
for mention in &mentions {
|
for mention in &mentions {
|
||||||
// TODO should it be fetching it every time?
|
// TODO should it be fetching it every time?
|
||||||
if let Ok(actor_id) = fetch_webfinger_url(mention, context.client()).await {
|
if let Ok(actor_id) = fetch_webfinger_url(mention, context.client()).await {
|
||||||
|
@ -337,19 +334,18 @@ async fn collect_non_local_mentions_and_addresses(
|
||||||
addressed_ccs.push(actor_id.to_owned().to_string().parse()?);
|
addressed_ccs.push(actor_id.to_owned().to_string().parse()?);
|
||||||
|
|
||||||
let mention_user = get_or_fetch_and_upsert_user(&actor_id, context, &mut 0).await?;
|
let mention_user = get_or_fetch_and_upsert_user(&actor_id, context, &mut 0).await?;
|
||||||
let shared_inbox = mention_user.get_shared_inbox_url()?;
|
inboxes.push(mention_user.get_shared_inbox_url()?);
|
||||||
|
|
||||||
mention_inboxes.push(shared_inbox);
|
|
||||||
let mut mention_tag = Mention::new();
|
let mut mention_tag = Mention::new();
|
||||||
mention_tag.set_href(actor_id).set_name(mention.full_name());
|
mention_tag.set_href(actor_id).set_name(mention.full_name());
|
||||||
tags.push(mention_tag);
|
tags.push(mention_tag);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let inboxes = mention_inboxes.into_iter().unique().collect();
|
let inboxes = inboxes.into_iter().unique().collect();
|
||||||
|
|
||||||
Ok(MentionsAndAddresses {
|
Ok(MentionsAndAddresses {
|
||||||
addressed_ccs,
|
ccs: addressed_ccs,
|
||||||
inboxes,
|
inboxes,
|
||||||
tags,
|
tags,
|
||||||
})
|
})
|
||||||
|
@ -357,10 +353,7 @@ async fn collect_non_local_mentions_and_addresses(
|
||||||
|
|
||||||
/// Returns the apub ID of the user this comment is responding to. Meaning, in case this is a
|
/// Returns the apub ID of the user this comment is responding to. Meaning, in case this is a
|
||||||
/// top-level comment, the creator of the post, otherwise the creator of the parent comment.
|
/// top-level comment, the creator of the post, otherwise the creator of the parent comment.
|
||||||
async fn get_comment_parent_creator_id(
|
async fn get_comment_parent_creator(pool: &DbPool, comment: &Comment) -> Result<User_, LemmyError> {
|
||||||
pool: &DbPool,
|
|
||||||
comment: &Comment,
|
|
||||||
) -> Result<Url, LemmyError> {
|
|
||||||
let parent_creator_id = if let Some(parent_comment_id) = comment.parent_id {
|
let parent_creator_id = if let Some(parent_comment_id) = comment.parent_id {
|
||||||
let parent_comment =
|
let parent_comment =
|
||||||
blocking(pool, move |conn| Comment::read(conn, parent_comment_id)).await??;
|
blocking(pool, move |conn| Comment::read(conn, parent_comment_id)).await??;
|
||||||
|
@ -370,8 +363,7 @@ async fn get_comment_parent_creator_id(
|
||||||
let parent_post = blocking(pool, move |conn| Post::read(conn, parent_post_id)).await??;
|
let parent_post = blocking(pool, move |conn| Post::read(conn, parent_post_id)).await??;
|
||||||
parent_post.creator_id
|
parent_post.creator_id
|
||||||
};
|
};
|
||||||
let parent_creator = blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??;
|
Ok(blocking(pool, move |conn| User_::read(conn, parent_creator_id)).await??)
|
||||||
Ok(parent_creator.actor_id()?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Turns a user id like `@name@example.com` into an apub ID, like `https://example.com/user/name`,
|
/// Turns a user id like `@name@example.com` into an apub ID, like `https://example.com/user/name`,
|
||||||
|
|
|
@ -219,6 +219,13 @@ where
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Don't send anything to ourselves
|
||||||
|
let hostname = Settings::get().get_hostname_without_port()?;
|
||||||
|
let inboxes: Vec<&Url> = inboxes
|
||||||
|
.iter()
|
||||||
|
.filter(|i| i.domain().unwrap() != hostname)
|
||||||
|
.collect();
|
||||||
|
|
||||||
let activity = activity.into_any_base()?;
|
let activity = activity.into_any_base()?;
|
||||||
let serialised_activity = serde_json::to_string(&activity)?;
|
let serialised_activity = serde_json::to_string(&activity)?;
|
||||||
|
|
||||||
|
@ -232,7 +239,7 @@ where
|
||||||
for i in inboxes {
|
for i in inboxes {
|
||||||
let message = SendActivityTask {
|
let message = SendActivityTask {
|
||||||
activity: serialised_activity.to_owned(),
|
activity: serialised_activity.to_owned(),
|
||||||
inbox: i,
|
inbox: i.to_owned(),
|
||||||
actor_id: actor.actor_id()?,
|
actor_id: actor.actor_id()?,
|
||||||
private_key: actor.private_key().context(location_info!())?,
|
private_key: actor.private_key().context(location_info!())?,
|
||||||
};
|
};
|
||||||
|
|
|
@ -100,7 +100,9 @@ pub async fn search_by_apub_id(
|
||||||
delete_object_locally(&query_url, context).await?;
|
delete_object_locally(&query_url, context).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
build_response(fetch_response?, query_url, recursion_counter, context).await
|
// Necessary because we get a stack overflow using FetchError
|
||||||
|
let fet_res = fetch_response.map_err(|e| LemmyError::from(e.inner))?;
|
||||||
|
build_response(fet_res, query_url, recursion_counter, context).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn build_response(
|
async fn build_response(
|
||||||
|
|
|
@ -61,13 +61,7 @@ pub static APUB_JSON_CONTENT_TYPE: &str = "application/activity+json";
|
||||||
fn check_is_apub_id_valid(apub_id: &Url) -> Result<(), LemmyError> {
|
fn check_is_apub_id_valid(apub_id: &Url) -> Result<(), LemmyError> {
|
||||||
let settings = Settings::get();
|
let settings = Settings::get();
|
||||||
let domain = apub_id.domain().context(location_info!())?.to_string();
|
let domain = apub_id.domain().context(location_info!())?.to_string();
|
||||||
let local_instance = settings
|
let local_instance = settings.get_hostname_without_port()?;
|
||||||
.hostname
|
|
||||||
.split(':')
|
|
||||||
.collect::<Vec<&str>>()
|
|
||||||
.first()
|
|
||||||
.context(location_info!())?
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
if !settings.federation.enabled {
|
if !settings.federation.enabled {
|
||||||
return if domain == local_instance {
|
return if domain == local_instance {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use crate::location_info;
|
||||||
|
use anyhow::Context;
|
||||||
use config::{Config, ConfigError, Environment, File};
|
use config::{Config, ConfigError, Environment, File};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{env, fs, io::Error, net::IpAddr, path::PathBuf, sync::RwLock};
|
use std::{env, fs, io::Error, net::IpAddr, path::PathBuf, sync::RwLock};
|
||||||
|
@ -178,6 +180,21 @@ impl Settings {
|
||||||
format!("{}://{}", self.get_protocol_string(), self.hostname)
|
format!("{}://{}", self.get_protocol_string(), self.hostname)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// When running the federation test setup in `api_tests/` or `docker/federation`, the `hostname`
|
||||||
|
/// variable will be like `lemmy-alpha:8541`. This method removes the port and returns
|
||||||
|
/// `lemmy-alpha` instead. It has no effect in production.
|
||||||
|
pub fn get_hostname_without_port(&self) -> Result<String, anyhow::Error> {
|
||||||
|
Ok(
|
||||||
|
self
|
||||||
|
.hostname
|
||||||
|
.split(':')
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.first()
|
||||||
|
.context(location_info!())?
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn save_config_file(data: &str) -> Result<String, Error> {
|
pub fn save_config_file(data: &str) -> Result<String, Error> {
|
||||||
fs::write(CONFIG_FILE, data)?;
|
fs::write(CONFIG_FILE, data)?;
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ fn test_valid_post_title() {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_slur_filter() {
|
fn test_slur_filter() {
|
||||||
let test =
|
let test =
|
||||||
"coons test dindu ladyboy tranny retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
|
"faggot test kike tranny cocksucker retardeds. Capitalized Niggerz. This is a bunch of other safe text.";
|
||||||
let slur_free = "No slurs here";
|
let slur_free = "No slurs here";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
remove_slurs(&test),
|
remove_slurs(&test),
|
||||||
|
@ -63,13 +63,13 @@ fn test_slur_filter() {
|
||||||
|
|
||||||
let has_slurs_vec = vec![
|
let has_slurs_vec = vec![
|
||||||
"Niggerz",
|
"Niggerz",
|
||||||
"coons",
|
"cocksucker",
|
||||||
"dindu",
|
"faggot",
|
||||||
"ladyboy",
|
"kike",
|
||||||
"retardeds",
|
"retardeds",
|
||||||
"tranny",
|
"tranny",
|
||||||
];
|
];
|
||||||
let has_slurs_err_str = "No slurs - Niggerz, coons, dindu, ladyboy, retardeds, tranny";
|
let has_slurs_err_str = "No slurs - Niggerz, cocksucker, faggot, kike, retardeds, tranny";
|
||||||
|
|
||||||
assert_eq!(slur_check(test), Err(has_slurs_vec));
|
assert_eq!(slur_check(test), Err(has_slurs_vec));
|
||||||
assert_eq!(slur_check(slur_free), Ok(()));
|
assert_eq!(slur_check(slur_free), Ok(()));
|
||||||
|
|
|
@ -7,7 +7,7 @@ use regex::{Regex, RegexBuilder};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref EMAIL_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap();
|
static ref EMAIL_REGEX: Regex = Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap();
|
||||||
static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?|maricos?|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|dindu(s?)|mudslime?s?|kikes?|mongoloids?|towel\s*heads?|\bspi(c|k)s?\b|\bchinks?|niglets?|beaners?|\bnips?\b|\bcoons?\b|jungle\s*bunn(y|ies?)|jigg?aboo?s?|\bpakis?\b|rag\s*heads?|gooks?|cunts?|bitch(es|ing|y)?|puss(y|ies?)|twats?|feminazis?|whor(es?|ing)|\bslut(s|t?y)?|\btr(a|@)nn?(y|ies?)|ladyboy(s?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap();
|
static ref SLUR_REGEX: Regex = RegexBuilder::new(r"(fag(g|got|tard)?\b|cock\s?sucker(s|ing)?|\bn(i|1)g(\b|g?(a|er)?(s|z)?)\b|mudslime?s?|kikes?|\bspi(c|k)s?\b|\bchinks?|gooks?|bitch(es|ing|y)?|whor(es?|ing)|\btr(a|@)nn?(y|ies?)|\b(b|re|r)tard(ed)?s?)").case_insensitive(true).build().unwrap();
|
||||||
static ref USERNAME_MATCHES_REGEX: Regex = Regex::new(r"/u/[a-zA-Z][0-9a-zA-Z_]*").unwrap();
|
static ref USERNAME_MATCHES_REGEX: Regex = Regex::new(r"/u/[a-zA-Z][0-9a-zA-Z_]*").unwrap();
|
||||||
// TODO keep this old one, it didn't work with port well tho
|
// TODO keep this old one, it didn't work with port well tho
|
||||||
// static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)").unwrap();
|
// static ref MENTIONS_REGEX: Regex = Regex::new(r"@(?P<name>[\w.]+)@(?P<domain>[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)").unwrap();
|
||||||
|
|
|
@ -1,13 +1,21 @@
|
||||||
-- Site aggregates
|
-- Site aggregates
|
||||||
drop table site_aggregates;
|
drop table site_aggregates;
|
||||||
drop trigger site_aggregates_site on site;
|
drop trigger site_aggregates_site on site;
|
||||||
drop trigger site_aggregates_user on user_;
|
drop trigger site_aggregates_user_insert on user_;
|
||||||
drop trigger site_aggregates_post on post;
|
drop trigger site_aggregates_user_delete on user_;
|
||||||
drop trigger site_aggregates_comment on comment;
|
drop trigger site_aggregates_post_insert on post;
|
||||||
drop trigger site_aggregates_community on community;
|
drop trigger site_aggregates_post_delete on post;
|
||||||
|
drop trigger site_aggregates_comment_insert on comment;
|
||||||
|
drop trigger site_aggregates_comment_delete on comment;
|
||||||
|
drop trigger site_aggregates_community_insert on community;
|
||||||
|
drop trigger site_aggregates_community_delete on community;
|
||||||
drop function
|
drop function
|
||||||
site_aggregates_site,
|
site_aggregates_site,
|
||||||
site_aggregates_user,
|
site_aggregates_user_insert,
|
||||||
site_aggregates_post,
|
site_aggregates_user_delete,
|
||||||
site_aggregates_comment,
|
site_aggregates_post_insert,
|
||||||
site_aggregates_community;
|
site_aggregates_post_delete,
|
||||||
|
site_aggregates_comment_insert,
|
||||||
|
site_aggregates_comment_delete,
|
||||||
|
site_aggregates_community_insert,
|
||||||
|
site_aggregates_community_delete;
|
||||||
|
|
|
@ -10,10 +10,10 @@ create table site_aggregates (
|
||||||
|
|
||||||
insert into site_aggregates (site_id, users, posts, comments, communities)
|
insert into site_aggregates (site_id, users, posts, comments, communities)
|
||||||
select id as site_id,
|
select id as site_id,
|
||||||
( select coalesce(count(*), 0) from user_) as users,
|
( select coalesce(count(*), 0) from user_ where local = true) as users,
|
||||||
( select coalesce(count(*), 0) from post) as posts,
|
( select coalesce(count(*), 0) from post where local = true) as posts,
|
||||||
( select coalesce(count(*), 0) from comment) as comments,
|
( select coalesce(count(*), 0) from comment where local = true) as comments,
|
||||||
( select coalesce(count(*), 0) from community) as communities
|
( select coalesce(count(*), 0) from community where local = true) as communities
|
||||||
from site;
|
from site;
|
||||||
|
|
||||||
-- initial site add
|
-- initial site add
|
||||||
|
@ -36,91 +36,134 @@ execute procedure site_aggregates_site();
|
||||||
|
|
||||||
-- Add site aggregate triggers
|
-- Add site aggregate triggers
|
||||||
-- user
|
-- user
|
||||||
create or replace function site_aggregates_user()
|
create function site_aggregates_user_insert()
|
||||||
returns trigger language plpgsql
|
returns trigger language plpgsql
|
||||||
as $$
|
as $$
|
||||||
begin
|
begin
|
||||||
IF (TG_OP = 'INSERT') THEN
|
update site_aggregates
|
||||||
update site_aggregates
|
set users = users + 1;
|
||||||
set users = users + 1;
|
|
||||||
ELSIF (TG_OP = 'DELETE') THEN
|
|
||||||
-- Join to site since the creator might not be there anymore
|
|
||||||
update site_aggregates sa
|
|
||||||
set users = users - 1
|
|
||||||
from site s
|
|
||||||
where sa.site_id = s.id;
|
|
||||||
END IF;
|
|
||||||
return null;
|
return null;
|
||||||
end $$;
|
end $$;
|
||||||
|
|
||||||
create trigger site_aggregates_user
|
create function site_aggregates_user_delete()
|
||||||
after insert or delete on user_
|
returns trigger language plpgsql
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
-- Join to site since the creator might not be there anymore
|
||||||
|
update site_aggregates sa
|
||||||
|
set users = users - 1
|
||||||
|
from site s
|
||||||
|
where sa.site_id = s.id;
|
||||||
|
return null;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
create trigger site_aggregates_user_insert
|
||||||
|
after insert on user_
|
||||||
for each row
|
for each row
|
||||||
execute procedure site_aggregates_user();
|
when (NEW.local = true)
|
||||||
|
execute procedure site_aggregates_user_insert();
|
||||||
|
|
||||||
|
create trigger site_aggregates_user_delete
|
||||||
|
after delete on user_
|
||||||
|
for each row
|
||||||
|
when (OLD.local = true)
|
||||||
|
execute procedure site_aggregates_user_delete();
|
||||||
|
|
||||||
-- post
|
-- post
|
||||||
create function site_aggregates_post()
|
create function site_aggregates_post_insert()
|
||||||
returns trigger language plpgsql
|
returns trigger language plpgsql
|
||||||
as $$
|
as $$
|
||||||
begin
|
begin
|
||||||
IF (TG_OP = 'INSERT') THEN
|
update site_aggregates
|
||||||
update site_aggregates
|
set posts = posts + 1;
|
||||||
set posts = posts + 1;
|
|
||||||
ELSIF (TG_OP = 'DELETE') THEN
|
|
||||||
update site_aggregates sa
|
|
||||||
set posts = posts - 1
|
|
||||||
from site s
|
|
||||||
where sa.site_id = s.id;
|
|
||||||
END IF;
|
|
||||||
return null;
|
return null;
|
||||||
end $$;
|
end $$;
|
||||||
|
|
||||||
create trigger site_aggregates_post
|
create function site_aggregates_post_delete()
|
||||||
after insert or delete on post
|
returns trigger language plpgsql
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
update site_aggregates sa
|
||||||
|
set posts = posts - 1
|
||||||
|
from site s
|
||||||
|
where sa.site_id = s.id;
|
||||||
|
return null;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
create trigger site_aggregates_post_insert
|
||||||
|
after insert on post
|
||||||
for each row
|
for each row
|
||||||
execute procedure site_aggregates_post();
|
when (NEW.local = true)
|
||||||
|
execute procedure site_aggregates_post_insert();
|
||||||
|
|
||||||
|
create trigger site_aggregates_post_delete
|
||||||
|
after delete on post
|
||||||
|
for each row
|
||||||
|
when (OLD.local = true)
|
||||||
|
execute procedure site_aggregates_post_delete();
|
||||||
|
|
||||||
-- comment
|
-- comment
|
||||||
create function site_aggregates_comment()
|
create function site_aggregates_comment_insert()
|
||||||
returns trigger language plpgsql
|
returns trigger language plpgsql
|
||||||
as $$
|
as $$
|
||||||
begin
|
begin
|
||||||
IF (TG_OP = 'INSERT') THEN
|
update site_aggregates
|
||||||
update site_aggregates
|
set comments = comments + 1;
|
||||||
set comments = comments + 1;
|
|
||||||
ELSIF (TG_OP = 'DELETE') THEN
|
|
||||||
update site_aggregates sa
|
|
||||||
set comments = comments - 1
|
|
||||||
from site s
|
|
||||||
where sa.site_id = s.id;
|
|
||||||
END IF;
|
|
||||||
return null;
|
return null;
|
||||||
end $$;
|
end $$;
|
||||||
|
|
||||||
create trigger site_aggregates_comment
|
create function site_aggregates_comment_delete()
|
||||||
after insert or delete on comment
|
returns trigger language plpgsql
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
update site_aggregates sa
|
||||||
|
set comments = comments - 1
|
||||||
|
from site s
|
||||||
|
where sa.site_id = s.id;
|
||||||
|
return null;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
create trigger site_aggregates_comment_insert
|
||||||
|
after insert on comment
|
||||||
for each row
|
for each row
|
||||||
execute procedure site_aggregates_comment();
|
when (NEW.local = true)
|
||||||
|
execute procedure site_aggregates_comment_insert();
|
||||||
|
|
||||||
|
create trigger site_aggregates_comment_delete
|
||||||
|
after delete on comment
|
||||||
|
for each row
|
||||||
|
when (OLD.local = true)
|
||||||
|
execute procedure site_aggregates_comment_delete();
|
||||||
|
|
||||||
-- community
|
-- community
|
||||||
create function site_aggregates_community()
|
create function site_aggregates_community_insert()
|
||||||
returns trigger language plpgsql
|
returns trigger language plpgsql
|
||||||
as $$
|
as $$
|
||||||
begin
|
begin
|
||||||
IF (TG_OP = 'INSERT') THEN
|
update site_aggregates
|
||||||
update site_aggregates
|
set communities = communities + 1;
|
||||||
set communities = communities + 1;
|
|
||||||
ELSIF (TG_OP = 'DELETE') THEN
|
|
||||||
update site_aggregates sa
|
|
||||||
set communities = communities - 1
|
|
||||||
from site s
|
|
||||||
where sa.site_id = s.id;
|
|
||||||
END IF;
|
|
||||||
return null;
|
return null;
|
||||||
end $$;
|
end $$;
|
||||||
|
|
||||||
create trigger site_aggregates_community
|
create function site_aggregates_community_delete()
|
||||||
after insert or delete on community
|
returns trigger language plpgsql
|
||||||
for each row
|
as $$
|
||||||
execute procedure site_aggregates_community();
|
begin
|
||||||
|
update site_aggregates sa
|
||||||
|
set communities = communities - 1
|
||||||
|
from site s
|
||||||
|
where sa.site_id = s.id;
|
||||||
|
return null;
|
||||||
|
end $$;
|
||||||
|
|
||||||
|
create trigger site_aggregates_community_insert
|
||||||
|
after insert on community
|
||||||
|
for each row
|
||||||
|
when (NEW.local = true)
|
||||||
|
execute procedure site_aggregates_community_insert();
|
||||||
|
|
||||||
|
create trigger site_aggregates_community_delete
|
||||||
|
after delete on community
|
||||||
|
for each row
|
||||||
|
when (OLD.local = true)
|
||||||
|
execute procedure site_aggregates_community_delete();
|
||||||
|
|
|
@ -66,10 +66,13 @@ as $$
|
||||||
begin
|
begin
|
||||||
IF (TG_OP = 'INSERT') THEN
|
IF (TG_OP = 'INSERT') THEN
|
||||||
update post_aggregates pa
|
update post_aggregates pa
|
||||||
set comments = comments + 1,
|
set comments = comments + 1
|
||||||
newest_comment_time = NEW.published
|
where pa.post_id = NEW.post_id;
|
||||||
where pa.post_id = NEW.post_id
|
|
||||||
-- A 2 day necro-bump limit
|
-- A 2 day necro-bump limit
|
||||||
|
update post_aggregates pa
|
||||||
|
set newest_comment_time = NEW.published
|
||||||
|
where pa.post_id = NEW.post_id
|
||||||
and published > ('now'::timestamp - '2 days'::interval);
|
and published > ('now'::timestamp - '2 days'::interval);
|
||||||
ELSIF (TG_OP = 'DELETE') THEN
|
ELSIF (TG_OP = 'DELETE') THEN
|
||||||
-- Join to post because that post may not exist anymore
|
-- Join to post because that post may not exist anymore
|
||||||
|
|
23
scripts/compilation_benchmark.sh
Executable file
23
scripts/compilation_benchmark.sh
Executable file
|
@ -0,0 +1,23 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
times=3
|
||||||
|
duration=0
|
||||||
|
for ((i=0; i < times; i++)) ; do
|
||||||
|
echo "Starting iteration $i"
|
||||||
|
echo "cargo clean"
|
||||||
|
# to benchmark incremental compilation time, do a full build with the same compiler version first,
|
||||||
|
# and use the following clean command:
|
||||||
|
#cargo clean -p lemmy_utils
|
||||||
|
cargo clean
|
||||||
|
echo "cargo build"
|
||||||
|
start=$(date +%s.%N)
|
||||||
|
RUSTC_WRAPPER='' cargo +1.47.0 build -q
|
||||||
|
end=$(date +%s.%N)
|
||||||
|
echo "Finished iteration $i after $(bc <<< "scale=0; $end - $start") seconds"
|
||||||
|
duration=$(bc <<< "$duration + $end - $start")
|
||||||
|
done
|
||||||
|
|
||||||
|
average=$(bc <<< "scale=0; $duration / $times")
|
||||||
|
|
||||||
|
echo "Average compilation time over $times runs is $average seconds"
|
|
@ -46,12 +46,13 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
|
||||||
},
|
},
|
||||||
protocols,
|
protocols,
|
||||||
usage: NodeInfoUsage {
|
usage: NodeInfoUsage {
|
||||||
// TODO get these again
|
users: NodeInfoUsers {
|
||||||
users: NodeInfoUsers { total: 0 },
|
total: site_view.counts.users,
|
||||||
local_posts: 0,
|
},
|
||||||
local_comments: 0,
|
local_posts: site_view.counts.posts,
|
||||||
open_registrations: site_view.site.open_registration,
|
local_comments: site_view.counts.comments,
|
||||||
},
|
},
|
||||||
|
open_registrations: site_view.site.open_registration,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(json))
|
Ok(HttpResponse::Ok().json(json))
|
||||||
|
@ -69,11 +70,13 @@ struct NodeInfoWellKnownLinks {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct NodeInfo {
|
struct NodeInfo {
|
||||||
pub version: String,
|
pub version: String,
|
||||||
pub software: NodeInfoSoftware,
|
pub software: NodeInfoSoftware,
|
||||||
pub protocols: Vec<String>,
|
pub protocols: Vec<String>,
|
||||||
pub usage: NodeInfoUsage,
|
pub usage: NodeInfoUsage,
|
||||||
|
pub open_registrations: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
@ -88,7 +91,6 @@ struct NodeInfoUsage {
|
||||||
pub users: NodeInfoUsers,
|
pub users: NodeInfoUsers,
|
||||||
pub local_posts: i64,
|
pub local_posts: i64,
|
||||||
pub local_comments: i64,
|
pub local_comments: i64,
|
||||||
pub open_registrations: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
|
Loading…
Reference in a new issue