Merge remote-tracking branch 'origin/main' into combined_tables_2

This commit is contained in:
Dessalines 2024-12-04 12:39:59 -05:00
commit e1affa8696
7 changed files with 98 additions and 36 deletions

View file

@ -2,6 +2,10 @@
# See https://github.com/woodpecker-ci/woodpecker/issues/1677 # See https://github.com/woodpecker-ci/woodpecker/issues/1677
variables: variables:
# When updating the rust version here, be sure to update versions in `docker/Dockerfile`
# as well. Otherwise release builds can fail if Lemmy or dependencies rely on new Rust
# features. In particular the ARM builder image needs to be updated manually in the repo below:
# https://github.com/raskyld/lemmy-cross-toolchains
- &rust_image "rust:1.81" - &rust_image "rust:1.81"
- &rust_nightly_image "rustlang/rust:nightly" - &rust_nightly_image "rustlang/rust:nightly"
- &install_pnpm "corepack enable pnpm" - &install_pnpm "corepack enable pnpm"

23
Cargo.lock generated
View file

@ -779,6 +779,17 @@ dependencies = [
"nom", "nom",
] ]
[[package]]
name = "cfb"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f"
dependencies = [
"byteorder",
"fnv",
"uuid",
]
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "1.0.0" version = "1.0.0"
@ -2347,6 +2358,15 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "infer"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc150e5ce2330295b8616ce0e3f53250e53af31759a9dbedad1621ba29151847"
dependencies = [
"cfb",
]
[[package]] [[package]]
name = "inout" name = "inout"
version = "0.1.3" version = "0.1.3"
@ -2507,6 +2527,7 @@ dependencies = [
"encoding_rs", "encoding_rs",
"enum-map", "enum-map",
"futures", "futures",
"infer",
"jsonwebtoken", "jsonwebtoken",
"lemmy_db_schema", "lemmy_db_schema",
"lemmy_db_views", "lemmy_db_views",
@ -2879,7 +2900,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.48.5", "windows-targets 0.52.6",
] ]
[[package]] [[package]]

View file

@ -35,7 +35,7 @@ import {
userBlockInstance, userBlockInstance,
} from "./shared"; } from "./shared";
import { AdminAllowInstanceParams } from "lemmy-js-client/dist/types/AdminAllowInstanceParams"; import { AdminAllowInstanceParams } from "lemmy-js-client/dist/types/AdminAllowInstanceParams";
import { EditCommunity, EditSite } from "lemmy-js-client"; import { EditCommunity, EditSite, GetPosts } from "lemmy-js-client";
beforeAll(setupLogins); beforeAll(setupLogins);
afterAll(unfollows); afterAll(unfollows);
@ -576,3 +576,29 @@ test("Remote mods can edit communities", async () => {
"Example description", "Example description",
); );
}); });
test("Community name with non-ascii chars", async () => {
const name = овае_ядосва" + Math.random().toString().slice(2, 6);
let communityRes = await createCommunity(alpha, name);
let betaCommunity1 = await resolveCommunity(
beta,
communityRes.community_view.community.actor_id,
);
expect(betaCommunity1.community!.community.name).toBe(name);
let alphaCommunity2 = await getCommunityByName(alpha, name);
expect(alphaCommunity2.community_view.community.name).toBe(name);
let fediName = `${communityRes.community_view.community.name}@LEMMY-ALPHA:8541`;
let betaCommunity2 = await getCommunityByName(beta, fediName);
expect(betaCommunity2.community_view.community.name).toBe(name);
let postRes = await createPost(beta, betaCommunity1.community!.community.id);
let form: GetPosts = {
community_name: fediName,
};
let posts = await beta.getPosts(form);
expect(posts.posts[0].post.name).toBe(postRes.post_view.post.name);
});

View file

@ -22,6 +22,7 @@ import {
alphaImage, alphaImage,
unfollows, unfollows,
saveUserSettingsBio, saveUserSettingsBio,
getPersonDetails,
} from "./shared"; } from "./shared";
import { import {
EditSite, EditSite,
@ -136,11 +137,9 @@ test("Requests with invalid auth should be treated as unauthenticated", async ()
}); });
test("Create user with Arabic name", async () => { test("Create user with Arabic name", async () => {
let user = await registerUser( // less than actor_name_max_length
alpha, const name = "تجريب" + Math.random().toString().slice(2, 10);
alphaUrl, let user = await registerUser(alpha, alphaUrl, name);
"تجريب" + Math.random().toString().slice(2, 10), // less than actor_name_max_length
);
let site = await getSite(user); let site = await getSite(user);
expect(site.my_user).toBeDefined(); expect(site.my_user).toBeDefined();
@ -149,8 +148,11 @@ test("Create user with Arabic name", async () => {
} }
apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`; apShortname = `${site.my_user.local_user_view.person.name}@lemmy-alpha:8541`;
let alphaPerson = (await resolvePerson(alpha, apShortname)).person; let betaPerson1 = (await resolvePerson(beta, apShortname)).person;
expect(alphaPerson).toBeDefined(); expect(betaPerson1!.person.name).toBe(name);
let betaPerson2 = await getPersonDetails(beta, betaPerson1!.person.id);
expect(betaPerson2!.person_view.person.name).toBe(name);
}); });
test("Create user with accept-language", async () => { test("Create user with accept-language", async () => {

View file

@ -66,6 +66,7 @@ enum-map = { workspace = true }
urlencoding = { workspace = true } urlencoding = { workspace = true }
mime = { version = "0.3.17", optional = true } mime = { version = "0.3.17", optional = true }
mime_guess = "2.0.5" mime_guess = "2.0.5"
infer = "0.16.0"
webpage = { version = "2.0", default-features = false, features = [ webpage = { version = "2.0", default-features = false, features = [
"serde", "serde",
], optional = true } ], optional = true }

View file

@ -23,6 +23,7 @@ use lemmy_utils::{
REQWEST_TIMEOUT, REQWEST_TIMEOUT,
VERSION, VERSION,
}; };
use mime::{Mime, TEXT_HTML};
use reqwest::{ use reqwest::{
header::{CONTENT_TYPE, RANGE}, header::{CONTENT_TYPE, RANGE},
Client, Client,
@ -63,47 +64,54 @@ pub async fn fetch_link_metadata(url: &Url, context: &LemmyContext) -> LemmyResu
.await? .await?
.error_for_status()?; .error_for_status()?;
// In some cases servers send a wrong mime type for images, which prevents thumbnail let mut content_type: Option<Mime> = response
// generation. To avoid this we also try to guess the mime type from file extension. .headers()
let content_type = mime_guess::from_path(url.path()) .get(CONTENT_TYPE)
.first() .and_then(|h| h.to_str().ok())
// If you can guess that its an image type, then return that first. .and_then(|h| h.parse().ok())
.filter(|guess| guess.type_() == mime::IMAGE) // If we don't get a content_type from the response (e.g. if the server is down),
// Otherwise, get the content type from the headers // then try to infer the content_type from the file extension.
.or( .or(mime_guess::from_path(url.path()).first());
response
.headers()
.get(CONTENT_TYPE)
.and_then(|h| h.to_str().ok())
.and_then(|h| h.parse().ok()),
);
let opengraph_data = { let opengraph_data = {
// if the content type is not text/html, we don't need to parse it
let is_html = content_type let is_html = content_type
.as_ref() .as_ref()
.map(|c| { .map(|c| {
(c.type_() == mime::TEXT && c.subtype() == mime::HTML) // application/xhtml+xml is a subset of HTML
|| let application_xhtml: Mime = "application/xhtml+xml".parse::<Mime>().unwrap_or(TEXT_HTML);
// application/xhtml+xml is a subset of HTML let allowed_mime_types = [TEXT_HTML.essence_str(), application_xhtml.essence_str()];
(c.type_() == mime::APPLICATION && c.subtype() == "xhtml") allowed_mime_types.contains(&c.essence_str())
}) })
.unwrap_or(false); .unwrap_or_default();
if !is_html {
Default::default() if is_html {
} else {
// Can't use .text() here, because it only checks the content header, not the actual bytes // Can't use .text() here, because it only checks the content header, not the actual bytes
// https://github.com/LemmyNet/lemmy/issues/1964 // https://github.com/LemmyNet/lemmy/issues/1964
// So we want to do deep inspection of the actually returned bytes but need to be careful not // So we want to do deep inspection of the actually returned bytes but need to be careful
// spend too much time parsing binary data as HTML // not spend too much time parsing binary data as HTML
// only take first bytes regardless of how many bytes the server returns // only take first bytes regardless of how many bytes the server returns
let html_bytes = collect_bytes_until_limit(response, bytes_to_fetch).await?; let html_bytes = collect_bytes_until_limit(response, bytes_to_fetch).await?;
extract_opengraph_data(&html_bytes, url) extract_opengraph_data(&html_bytes, url)
.map_err(|e| info!("{e}")) .map_err(|e| info!("{e}"))
.unwrap_or_default() .unwrap_or_default()
} else {
let is_octet_type = content_type
.as_ref()
.map(|c| c.subtype() == "octet-stream")
.unwrap_or_default();
// Overwrite the content type if its an octet type
if is_octet_type {
// Don't need to fetch as much data for this as we do with opengraph
let octet_bytes = collect_bytes_until_limit(response, 512).await?;
content_type =
infer::get(&octet_bytes).map_or(content_type, |t| t.mime_type().parse().ok());
}
Default::default()
} }
}; };
Ok(LinkMetadata { Ok(LinkMetadata {
opengraph_data, opengraph_data,
content_type: content_type.map(|c| c.to_string()), content_type: content_type.map(|c| c.to_string()),

View file

@ -5,7 +5,7 @@ ARG RUST_RELEASE_MODE=debug
ARG AMD_BUILDER_IMAGE=rust:${RUST_VERSION} ARG AMD_BUILDER_IMAGE=rust:${RUST_VERSION}
# Repo: https://github.com/raskyld/lemmy-cross-toolchains # Repo: https://github.com/raskyld/lemmy-cross-toolchains
ARG ARM_BUILDER_IMAGE="ghcr.io/raskyld/aarch64-lemmy-linux-gnu:v0.4.0" ARG ARM_BUILDER_IMAGE="ghcr.io/raskyld/aarch64-lemmy-linux-gnu:v0.5.0"
ARG AMD_RUNNER_IMAGE=debian:bookworm-slim ARG AMD_RUNNER_IMAGE=debian:bookworm-slim
ARG ARM_RUNNER_IMAGE=debian:bookworm-slim ARG ARM_RUNNER_IMAGE=debian:bookworm-slim