Merge branch 'main' into fix-import-blocked-objects

This commit is contained in:
Dessalines 2024-05-14 22:45:59 -04:00 committed by GitHub
commit 75cadd8e2e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 914 additions and 595 deletions

605
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,5 @@
[workspace.package] [workspace.package]
version = "0.19.4-beta.6" version = "0.19.4-beta.7"
edition = "2021" edition = "2021"
description = "A link aggregator for the fediverse" description = "A link aggregator for the fediverse"
license = "AGPL-3.0" license = "AGPL-3.0"
@ -88,18 +88,18 @@ unused_self = "deny"
unwrap_used = "deny" unwrap_used = "deny"
[workspace.dependencies] [workspace.dependencies]
lemmy_api = { version = "=0.19.4-beta.6", path = "./crates/api" } lemmy_api = { version = "=0.19.4-beta.7", path = "./crates/api" }
lemmy_api_crud = { version = "=0.19.4-beta.6", path = "./crates/api_crud" } lemmy_api_crud = { version = "=0.19.4-beta.7", path = "./crates/api_crud" }
lemmy_apub = { version = "=0.19.4-beta.6", path = "./crates/apub" } lemmy_apub = { version = "=0.19.4-beta.7", path = "./crates/apub" }
lemmy_utils = { version = "=0.19.4-beta.6", path = "./crates/utils", default-features = false } lemmy_utils = { version = "=0.19.4-beta.7", path = "./crates/utils", default-features = false }
lemmy_db_schema = { version = "=0.19.4-beta.6", path = "./crates/db_schema" } lemmy_db_schema = { version = "=0.19.4-beta.7", path = "./crates/db_schema" }
lemmy_api_common = { version = "=0.19.4-beta.6", path = "./crates/api_common" } lemmy_api_common = { version = "=0.19.4-beta.7", path = "./crates/api_common" }
lemmy_routes = { version = "=0.19.4-beta.6", path = "./crates/routes" } lemmy_routes = { version = "=0.19.4-beta.7", path = "./crates/routes" }
lemmy_db_views = { version = "=0.19.4-beta.6", path = "./crates/db_views" } lemmy_db_views = { version = "=0.19.4-beta.7", path = "./crates/db_views" }
lemmy_db_views_actor = { version = "=0.19.4-beta.6", path = "./crates/db_views_actor" } lemmy_db_views_actor = { version = "=0.19.4-beta.7", path = "./crates/db_views_actor" }
lemmy_db_views_moderator = { version = "=0.19.4-beta.6", path = "./crates/db_views_moderator" } lemmy_db_views_moderator = { version = "=0.19.4-beta.7", path = "./crates/db_views_moderator" }
lemmy_federate = { version = "=0.19.4-beta.6", path = "./crates/federate" } lemmy_federate = { version = "=0.19.4-beta.7", path = "./crates/federate" }
activitypub_federation = { version = "0.5.5", default-features = false, features = [ activitypub_federation = { version = "0.5.6", default-features = false, features = [
"actix-web", "actix-web",
] } ] }
diesel = "2.1.6" diesel = "2.1.6"

View file

@ -13,13 +13,13 @@ importers:
version: 29.5.12 version: 29.5.12
'@types/node': '@types/node':
specifier: ^20.12.4 specifier: ^20.12.4
version: 20.12.7 version: 20.12.4
'@typescript-eslint/eslint-plugin': '@typescript-eslint/eslint-plugin':
specifier: ^7.5.0 specifier: ^7.5.0
version: 7.5.0(@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5) version: 7.5.0(@typescript-eslint/parser@7.5.0)(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/parser': '@typescript-eslint/parser':
specifier: ^7.5.0 specifier: ^7.5.0
version: 7.5.0(eslint@8.57.0)(typescript@5.4.5) version: 7.5.0(eslint@8.57.0)(typescript@5.4.4)
download-file-sync: download-file-sync:
specifier: ^1.0.4 specifier: ^1.0.4
version: 1.0.4 version: 1.0.4
@ -31,7 +31,7 @@ importers:
version: 5.1.3(eslint@8.57.0)(prettier@3.2.5) version: 5.1.3(eslint@8.57.0)(prettier@3.2.5)
jest: jest:
specifier: ^29.5.0 specifier: ^29.5.0
version: 29.7.0(@types/node@20.12.7) version: 29.7.0(@types/node@20.12.4)
lemmy-js-client: lemmy-js-client:
specifier: 0.19.4-alpha.18 specifier: 0.19.4-alpha.18
version: 0.19.4-alpha.18 version: 0.19.4-alpha.18
@ -40,10 +40,10 @@ importers:
version: 3.2.5 version: 3.2.5
ts-jest: ts-jest:
specifier: ^29.1.0 specifier: ^29.1.0
version: 29.1.2(@babel/core@7.23.9)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@20.12.7))(typescript@5.4.5) version: 29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.4)
typescript: typescript:
specifier: ^5.4.4 specifier: ^5.4.4
version: 5.4.5 version: 5.4.4
packages: packages:
@ -398,8 +398,8 @@ packages:
'@types/json-schema@7.0.15': '@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/node@20.12.7': '@types/node@20.12.4':
resolution: {integrity: sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==} resolution: {integrity: sha512-E+Fa9z3wSQpzgYQdYmme5X3OTuejnnTx88A6p6vkkJosR3KBz+HpE3kqNm98VE6cfLFcISx7zW7MsJkH6KwbTw==}
'@types/semver@7.5.8': '@types/semver@7.5.8':
resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==} resolution: {integrity: sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==}
@ -1539,8 +1539,8 @@ packages:
resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==}
engines: {node: '>=10'} engines: {node: '>=10'}
typescript@5.4.5: typescript@5.4.4:
resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} resolution: {integrity: sha512-dGE2Vv8cpVvw28v8HCPqyb08EzbBURxDpuhJvTrusShUfGnhHBafDsLdS1EhhxyL6BJQE+2cT3dDPAv+MQ6oLw==}
engines: {node: '>=14.17'} engines: {node: '>=14.17'}
hasBin: true hasBin: true
@ -1857,7 +1857,7 @@ snapshots:
'@jest/console@29.7.0': '@jest/console@29.7.0':
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
jest-message-util: 29.7.0 jest-message-util: 29.7.0
jest-util: 29.7.0 jest-util: 29.7.0
@ -1870,14 +1870,14 @@ snapshots:
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/transform': 29.7.0 '@jest/transform': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
ansi-escapes: 4.3.2 ansi-escapes: 4.3.2
chalk: 4.1.2 chalk: 4.1.2
ci-info: 3.9.0 ci-info: 3.9.0
exit: 0.1.2 exit: 0.1.2
graceful-fs: 4.2.11 graceful-fs: 4.2.11
jest-changed-files: 29.7.0 jest-changed-files: 29.7.0
jest-config: 29.7.0(@types/node@20.12.7) jest-config: 29.7.0(@types/node@20.12.4)
jest-haste-map: 29.7.0 jest-haste-map: 29.7.0
jest-message-util: 29.7.0 jest-message-util: 29.7.0
jest-regex-util: 29.6.3 jest-regex-util: 29.6.3
@ -1902,7 +1902,7 @@ snapshots:
dependencies: dependencies:
'@jest/fake-timers': 29.7.0 '@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
jest-mock: 29.7.0 jest-mock: 29.7.0
'@jest/expect-utils@29.7.0': '@jest/expect-utils@29.7.0':
@ -1920,7 +1920,7 @@ snapshots:
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@sinonjs/fake-timers': 10.3.0 '@sinonjs/fake-timers': 10.3.0
'@types/node': 20.12.7 '@types/node': 20.12.4
jest-message-util: 29.7.0 jest-message-util: 29.7.0
jest-mock: 29.7.0 jest-mock: 29.7.0
jest-util: 29.7.0 jest-util: 29.7.0
@ -1942,7 +1942,7 @@ snapshots:
'@jest/transform': 29.7.0 '@jest/transform': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@jridgewell/trace-mapping': 0.3.22 '@jridgewell/trace-mapping': 0.3.22
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
collect-v8-coverage: 1.0.2 collect-v8-coverage: 1.0.2
exit: 0.1.2 exit: 0.1.2
@ -2012,7 +2012,7 @@ snapshots:
'@jest/schemas': 29.6.3 '@jest/schemas': 29.6.3
'@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-lib-coverage': 2.0.6
'@types/istanbul-reports': 3.0.4 '@types/istanbul-reports': 3.0.4
'@types/node': 20.12.7 '@types/node': 20.12.4
'@types/yargs': 17.0.32 '@types/yargs': 17.0.32
chalk: 4.1.2 chalk: 4.1.2
@ -2080,7 +2080,7 @@ snapshots:
'@types/graceful-fs@4.1.9': '@types/graceful-fs@4.1.9':
dependencies: dependencies:
'@types/node': 20.12.7 '@types/node': 20.12.4
'@types/istanbul-lib-coverage@2.0.6': {} '@types/istanbul-lib-coverage@2.0.6': {}
@ -2099,7 +2099,7 @@ snapshots:
'@types/json-schema@7.0.15': {} '@types/json-schema@7.0.15': {}
'@types/node@20.12.7': '@types/node@20.12.4':
dependencies: dependencies:
undici-types: 5.26.5 undici-types: 5.26.5
@ -2113,13 +2113,13 @@ snapshots:
dependencies: dependencies:
'@types/yargs-parser': 21.0.3 '@types/yargs-parser': 21.0.3
'@typescript-eslint/eslint-plugin@7.5.0(@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5)': '@typescript-eslint/eslint-plugin@7.5.0(@typescript-eslint/parser@7.5.0)(eslint@8.57.0)(typescript@5.4.4)':
dependencies: dependencies:
'@eslint-community/regexpp': 4.10.0 '@eslint-community/regexpp': 4.10.0
'@typescript-eslint/parser': 7.5.0(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/parser': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/scope-manager': 7.5.0 '@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/type-utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/type-utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
'@typescript-eslint/visitor-keys': 7.5.0 '@typescript-eslint/visitor-keys': 7.5.0
debug: 4.3.4 debug: 4.3.4
eslint: 8.57.0 eslint: 8.57.0
@ -2127,22 +2127,20 @@ snapshots:
ignore: 5.3.1 ignore: 5.3.1
natural-compare: 1.4.0 natural-compare: 1.4.0
semver: 7.6.0 semver: 7.6.0
ts-api-utils: 1.3.0(typescript@5.4.5) ts-api-utils: 1.3.0(typescript@5.4.4)
optionalDependencies: typescript: 5.4.4
typescript: 5.4.5
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
'@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.5)': '@typescript-eslint/parser@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies: dependencies:
'@typescript-eslint/scope-manager': 7.5.0 '@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/types': 7.5.0 '@typescript-eslint/types': 7.5.0
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5) '@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
'@typescript-eslint/visitor-keys': 7.5.0 '@typescript-eslint/visitor-keys': 7.5.0
debug: 4.3.4 debug: 4.3.4
eslint: 8.57.0 eslint: 8.57.0
optionalDependencies: typescript: 5.4.4
typescript: 5.4.5
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
@ -2151,21 +2149,20 @@ snapshots:
'@typescript-eslint/types': 7.5.0 '@typescript-eslint/types': 7.5.0
'@typescript-eslint/visitor-keys': 7.5.0 '@typescript-eslint/visitor-keys': 7.5.0
'@typescript-eslint/type-utils@7.5.0(eslint@8.57.0)(typescript@5.4.5)': '@typescript-eslint/type-utils@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies: dependencies:
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5) '@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
'@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/utils': 7.5.0(eslint@8.57.0)(typescript@5.4.4)
debug: 4.3.4 debug: 4.3.4
eslint: 8.57.0 eslint: 8.57.0
ts-api-utils: 1.3.0(typescript@5.4.5) ts-api-utils: 1.3.0(typescript@5.4.4)
optionalDependencies: typescript: 5.4.4
typescript: 5.4.5
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
'@typescript-eslint/types@7.5.0': {} '@typescript-eslint/types@7.5.0': {}
'@typescript-eslint/typescript-estree@7.5.0(typescript@5.4.5)': '@typescript-eslint/typescript-estree@7.5.0(typescript@5.4.4)':
dependencies: dependencies:
'@typescript-eslint/types': 7.5.0 '@typescript-eslint/types': 7.5.0
'@typescript-eslint/visitor-keys': 7.5.0 '@typescript-eslint/visitor-keys': 7.5.0
@ -2174,20 +2171,19 @@ snapshots:
is-glob: 4.0.3 is-glob: 4.0.3
minimatch: 9.0.3 minimatch: 9.0.3
semver: 7.6.0 semver: 7.6.0
ts-api-utils: 1.3.0(typescript@5.4.5) ts-api-utils: 1.3.0(typescript@5.4.4)
optionalDependencies: typescript: 5.4.4
typescript: 5.4.5
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
'@typescript-eslint/utils@7.5.0(eslint@8.57.0)(typescript@5.4.5)': '@typescript-eslint/utils@7.5.0(eslint@8.57.0)(typescript@5.4.4)':
dependencies: dependencies:
'@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0)
'@types/json-schema': 7.0.15 '@types/json-schema': 7.0.15
'@types/semver': 7.5.8 '@types/semver': 7.5.8
'@typescript-eslint/scope-manager': 7.5.0 '@typescript-eslint/scope-manager': 7.5.0
'@typescript-eslint/types': 7.5.0 '@typescript-eslint/types': 7.5.0
'@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.5) '@typescript-eslint/typescript-estree': 7.5.0(typescript@5.4.4)
eslint: 8.57.0 eslint: 8.57.0
semver: 7.6.0 semver: 7.6.0
transitivePeerDependencies: transitivePeerDependencies:
@ -2378,13 +2374,13 @@ snapshots:
convert-source-map@2.0.0: {} convert-source-map@2.0.0: {}
create-jest@29.7.0(@types/node@20.12.7): create-jest@29.7.0(@types/node@20.12.4):
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
chalk: 4.1.2 chalk: 4.1.2
exit: 0.1.2 exit: 0.1.2
graceful-fs: 4.2.11 graceful-fs: 4.2.11
jest-config: 29.7.0(@types/node@20.12.7) jest-config: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0 jest-util: 29.7.0
prompts: 2.4.2 prompts: 2.4.2
transitivePeerDependencies: transitivePeerDependencies:
@ -2751,7 +2747,7 @@ snapshots:
'@jest/expect': 29.7.0 '@jest/expect': 29.7.0
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
co: 4.6.0 co: 4.6.0
dedent: 1.5.1 dedent: 1.5.1
@ -2771,16 +2767,16 @@ snapshots:
- babel-plugin-macros - babel-plugin-macros
- supports-color - supports-color
jest-cli@29.7.0(@types/node@20.12.7): jest-cli@29.7.0(@types/node@20.12.4):
dependencies: dependencies:
'@jest/core': 29.7.0 '@jest/core': 29.7.0
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
chalk: 4.1.2 chalk: 4.1.2
create-jest: 29.7.0(@types/node@20.12.7) create-jest: 29.7.0(@types/node@20.12.4)
exit: 0.1.2 exit: 0.1.2
import-local: 3.1.0 import-local: 3.1.0
jest-config: 29.7.0(@types/node@20.12.7) jest-config: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0 jest-util: 29.7.0
jest-validate: 29.7.0 jest-validate: 29.7.0
yargs: 17.7.2 yargs: 17.7.2
@ -2790,11 +2786,12 @@ snapshots:
- supports-color - supports-color
- ts-node - ts-node
jest-config@29.7.0(@types/node@20.12.7): jest-config@29.7.0(@types/node@20.12.4):
dependencies: dependencies:
'@babel/core': 7.23.9 '@babel/core': 7.23.9
'@jest/test-sequencer': 29.7.0 '@jest/test-sequencer': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.4
babel-jest: 29.7.0(@babel/core@7.23.9) babel-jest: 29.7.0(@babel/core@7.23.9)
chalk: 4.1.2 chalk: 4.1.2
ci-info: 3.9.0 ci-info: 3.9.0
@ -2814,8 +2811,6 @@ snapshots:
pretty-format: 29.7.0 pretty-format: 29.7.0
slash: 3.0.0 slash: 3.0.0
strip-json-comments: 3.1.1 strip-json-comments: 3.1.1
optionalDependencies:
'@types/node': 20.12.7
transitivePeerDependencies: transitivePeerDependencies:
- babel-plugin-macros - babel-plugin-macros
- supports-color - supports-color
@ -2844,7 +2839,7 @@ snapshots:
'@jest/environment': 29.7.0 '@jest/environment': 29.7.0
'@jest/fake-timers': 29.7.0 '@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
jest-mock: 29.7.0 jest-mock: 29.7.0
jest-util: 29.7.0 jest-util: 29.7.0
@ -2854,7 +2849,7 @@ snapshots:
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/graceful-fs': 4.1.9 '@types/graceful-fs': 4.1.9
'@types/node': 20.12.7 '@types/node': 20.12.4
anymatch: 3.1.3 anymatch: 3.1.3
fb-watchman: 2.0.2 fb-watchman: 2.0.2
graceful-fs: 4.2.11 graceful-fs: 4.2.11
@ -2893,11 +2888,11 @@ snapshots:
jest-mock@29.7.0: jest-mock@29.7.0:
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
jest-util: 29.7.0 jest-util: 29.7.0
jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): jest-pnp-resolver@1.2.3(jest-resolve@29.7.0):
optionalDependencies: dependencies:
jest-resolve: 29.7.0 jest-resolve: 29.7.0
jest-regex-util@29.6.3: {} jest-regex-util@29.6.3: {}
@ -2928,7 +2923,7 @@ snapshots:
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/transform': 29.7.0 '@jest/transform': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
emittery: 0.13.1 emittery: 0.13.1
graceful-fs: 4.2.11 graceful-fs: 4.2.11
@ -2956,7 +2951,7 @@ snapshots:
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/transform': 29.7.0 '@jest/transform': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
cjs-module-lexer: 1.2.3 cjs-module-lexer: 1.2.3
collect-v8-coverage: 1.0.2 collect-v8-coverage: 1.0.2
@ -3002,7 +2997,7 @@ snapshots:
jest-util@29.7.0: jest-util@29.7.0:
dependencies: dependencies:
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
chalk: 4.1.2 chalk: 4.1.2
ci-info: 3.9.0 ci-info: 3.9.0
graceful-fs: 4.2.11 graceful-fs: 4.2.11
@ -3021,7 +3016,7 @@ snapshots:
dependencies: dependencies:
'@jest/test-result': 29.7.0 '@jest/test-result': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
'@types/node': 20.12.7 '@types/node': 20.12.4
ansi-escapes: 4.3.2 ansi-escapes: 4.3.2
chalk: 4.1.2 chalk: 4.1.2
emittery: 0.13.1 emittery: 0.13.1
@ -3030,17 +3025,17 @@ snapshots:
jest-worker@29.7.0: jest-worker@29.7.0:
dependencies: dependencies:
'@types/node': 20.12.7 '@types/node': 20.12.4
jest-util: 29.7.0 jest-util: 29.7.0
merge-stream: 2.0.0 merge-stream: 2.0.0
supports-color: 8.1.1 supports-color: 8.1.1
jest@29.7.0(@types/node@20.12.7): jest@29.7.0(@types/node@20.12.4):
dependencies: dependencies:
'@jest/core': 29.7.0 '@jest/core': 29.7.0
'@jest/types': 29.6.3 '@jest/types': 29.6.3
import-local: 3.1.0 import-local: 3.1.0
jest-cli: 29.7.0(@types/node@20.12.7) jest-cli: 29.7.0(@types/node@20.12.4)
transitivePeerDependencies: transitivePeerDependencies:
- '@types/node' - '@types/node'
- babel-plugin-macros - babel-plugin-macros
@ -3362,26 +3357,23 @@ snapshots:
dependencies: dependencies:
is-number: 7.0.0 is-number: 7.0.0
ts-api-utils@1.3.0(typescript@5.4.5): ts-api-utils@1.3.0(typescript@5.4.4):
dependencies: dependencies:
typescript: 5.4.5 typescript: 5.4.4
ts-jest@29.1.2(@babel/core@7.23.9)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.23.9))(jest@29.7.0(@types/node@20.12.7))(typescript@5.4.5): ts-jest@29.1.2(@babel/core@7.23.9)(jest@29.7.0)(typescript@5.4.4):
dependencies: dependencies:
'@babel/core': 7.23.9
bs-logger: 0.2.6 bs-logger: 0.2.6
fast-json-stable-stringify: 2.1.0 fast-json-stable-stringify: 2.1.0
jest: 29.7.0(@types/node@20.12.7) jest: 29.7.0(@types/node@20.12.4)
jest-util: 29.7.0 jest-util: 29.7.0
json5: 2.2.3 json5: 2.2.3
lodash.memoize: 4.1.2 lodash.memoize: 4.1.2
make-error: 1.3.6 make-error: 1.3.6
semver: 7.5.4 semver: 7.5.4
typescript: 5.4.5 typescript: 5.4.4
yargs-parser: 21.1.1 yargs-parser: 21.1.1
optionalDependencies:
'@babel/core': 7.23.9
'@jest/types': 29.6.3
babel-jest: 29.7.0(@babel/core@7.23.9)
tslib@2.6.2: {} tslib@2.6.2: {}
@ -3395,7 +3387,7 @@ snapshots:
type-fest@0.21.3: {} type-fest@0.21.3: {}
typescript@5.4.5: {} typescript@5.4.4: {}
undici-types@5.26.5: {} undici-types@5.26.5: {}

View file

@ -29,7 +29,7 @@ pub async fn add_admin(
.await? .await?
.ok_or(LemmyErrorType::ObjectNotLocal)?; .ok_or(LemmyErrorType::ObjectNotLocal)?;
let added_admin = LocalUser::update( LocalUser::update(
&mut context.pool(), &mut context.pool(),
added_local_user.local_user.id, added_local_user.local_user.id,
&LocalUserUpdateForm { &LocalUserUpdateForm {
@ -43,7 +43,7 @@ pub async fn add_admin(
// Mod tables // Mod tables
let form = ModAddForm { let form = ModAddForm {
mod_person_id: local_user_view.person.id, mod_person_id: local_user_view.person.id,
other_person_id: added_admin.person_id, other_person_id: added_local_user.person.id,
removed: Some(!data.added), removed: Some(!data.added),
}; };

View file

@ -141,11 +141,7 @@ pub async fn save_user_settings(
..Default::default() ..Default::default()
}; };
// Ignore errors, because 'no fields updated' will return an error. LocalUser::update(&mut context.pool(), local_user_id, &local_user_form).await?;
// https://github.com/LemmyNet/lemmy/issues/4076
LocalUser::update(&mut context.pool(), local_user_id, &local_user_form)
.await
.ok();
// Update the vote display modes // Update the vote display modes
let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm { let vote_display_modes_form = LocalUserVoteDisplayModeUpdateForm {

View file

@ -9,12 +9,10 @@ use lemmy_db_schema::{
source::{ source::{
email_verification::EmailVerification, email_verification::EmailVerification,
local_user::{LocalUser, LocalUserUpdateForm}, local_user::{LocalUser, LocalUserUpdateForm},
person::Person,
}, },
traits::Crud,
RegistrationMode, RegistrationMode,
}; };
use lemmy_db_views::structs::SiteView; use lemmy_db_views::structs::{LocalUserView, SiteView};
use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
pub async fn verify_email( pub async fn verify_email(
@ -38,7 +36,7 @@ pub async fn verify_email(
}; };
let local_user_id = verification.local_user_id; let local_user_id = verification.local_user_id;
let local_user = LocalUser::update(&mut context.pool(), local_user_id, &form).await?; LocalUser::update(&mut context.pool(), local_user_id, &form).await?;
EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?; EmailVerification::delete_old_tokens_for_local_user(&mut context.pool(), local_user_id).await?;
@ -46,12 +44,16 @@ pub async fn verify_email(
if site_view.local_site.registration_mode == RegistrationMode::RequireApplication if site_view.local_site.registration_mode == RegistrationMode::RequireApplication
&& site_view.local_site.application_email_admins && site_view.local_site.application_email_admins
{ {
let person = Person::read(&mut context.pool(), local_user.person_id) let local_user = LocalUserView::read(&mut context.pool(), local_user_id)
.await? .await?
.ok_or(LemmyErrorType::CouldntFindPerson)?; .ok_or(LemmyErrorType::CouldntFindPerson)?;
send_new_applicant_email_to_admins(&person.name, &mut context.pool(), context.settings()) send_new_applicant_email_to_admins(
.await?; &local_user.person.name,
&mut context.pool(),
context.settings(),
)
.await?;
} }
Ok(Json(SuccessResponse::default())) Ok(Json(SuccessResponse::default()))

View file

@ -40,7 +40,7 @@ pub struct Register {
pub username: String, pub username: String,
pub password: Sensitive<String>, pub password: Sensitive<String>,
pub password_verify: Sensitive<String>, pub password_verify: Sensitive<String>,
pub show_nsfw: bool, pub show_nsfw: Option<bool>,
/// email is mandatory if email verification is enabled on the server /// email is mandatory if email verification is enabled on the server
pub email: Option<Sensitive<String>>, pub email: Option<Sensitive<String>>,
/// The UUID of the captcha item. /// The UUID of the captcha item.

View file

@ -142,12 +142,17 @@ pub async fn register(
.map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string()) .map(|lang_str| lang_str.split('-').next().unwrap_or_default().to_string())
.collect(); .collect();
// Show nsfw content if param is true, or if content_warning exists
let show_nsfw = data
.show_nsfw
.unwrap_or(site_view.site.content_warning.is_some());
// Create the local user // Create the local user
let local_user_form = LocalUserInsertForm::builder() let local_user_form = LocalUserInsertForm::builder()
.person_id(inserted_person.id) .person_id(inserted_person.id)
.email(data.email.as_deref().map(str::to_lowercase)) .email(data.email.as_deref().map(str::to_lowercase))
.password_encrypted(data.password.to_string()) .password_encrypted(data.password.to_string())
.show_nsfw(Some(data.show_nsfw)) .show_nsfw(Some(show_nsfw))
.accepted_application(accepted_application) .accepted_application(accepted_application)
.default_listing_type(Some(local_site.default_post_listing_type)) .default_listing_type(Some(local_site.default_post_listing_type))
.post_listing_mode(Some(local_site.default_post_listing_mode)) .post_listing_mode(Some(local_site.default_post_listing_mode))

View file

@ -0,0 +1,49 @@
{
"@context": ["https://www.w3.org/ns/activitystreams"],
"id": "https://pfefferle.org/lemmy-part-4/#activity#activity",
"type": "Announce",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/#activity",
"type": "Update",
"audience": "https://pfefferle.org/@pfefferle.org",
"published": "2024-05-03T12:32:29Z",
"updated": "2024-05-06T08:20:33Z",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": [],
"object": {
"id": "https://pfefferle.org/lemmy-part-4/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"audience": "https://pfefferle.org/@pfefferle.org",
"content": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E",
"contentMap": {
"en": "\u003Cp\u003EIdentifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. \u003C/p\u003E"
},
"name": "Lemmy (Part 4)",
"published": "2024-05-03T12:32:29Z",
"summary": "Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant. Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object can considered to be relevant.Identifies one or more entities that represent the total population of entities for which the object [...]",
"tag": [],
"updated": "2024-05-06T08:20:33Z",
"url": "https://pfefferle.org/lemmy-part-4/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/actors/1/followers"
],
"cc": []
},
"actor": "https://pfefferle.org/author/pfefferle/"
},
"actor": "https://pfefferle.org/@pfefferle.org"
}

View file

@ -0,0 +1,66 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/@pfefferle.org",
"type": "Group",
"attachment": [],
"attributedTo": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://pfefferle.org/wp-content/uploads/2023/06/cropped-BeLItBV-_400x400.jpg"
},
"published": "2024-04-03T16:58:22Z",
"summary": "<p>Webworker, blogger und podcaster</p>\n",
"tag": [],
"url": "https://pfefferle.org/@pfefferle.org",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers",
"preferredUsername": "pfefferle.org",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/@pfefferle.org#main-key",
"owner": "https://pfefferle.org/@pfefferle.org",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuq8xeLMFcaCwPFBhgMRE\n/dDh2XKoNXFXnixctmK8BXSuuLMxucm3I/8NyhIvb3LqU+uP1fO8F0ecUbk2sN+x\nKag5vIV6yKXzJ8ILMWQ9AaELpXDmMZqL0zal0LUJRAOkDgPDovDAoq6tx++yDoV0\njdVbf9CoZKit1cz2ZrEuE5dswq3J/z9+c6POkhCkWEX5TPJzkOrmnjkvrXxGHUJ2\nA3+P+VaZhd5cmvqYosSpYNJshxCdev12pIF78OnYLiYiyXlgGHU+7uQR0M4tTcij\n6cUdLkms9m+b6H3ctXntPn410e5YLFPldjAYzQB5wHVdFZsWtyrbqfYdCa+KkKpA\nvwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/0/collections/featured",
"moderators": "https://pfefferle.org/wp-json/activitypub/1.0/collections/moderators",
"discoverable": true,
"indexable": true,
"webfinger": "pfefferle.org@pfefferle.org",
"postingRestrictedToMods": true
}

View file

@ -0,0 +1,24 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org?c=148",
"type": "Note",
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>Nice! Hello from WordPress!</p>",
"contentMap": {
"en": "<p>Nice! Hello from WordPress!</p>"
},
"inReplyTo": "https://socialhub.activitypub.rocks/ap/object/ce040f1ead95964f6dbbf1084b81432d",
"published": "2024-04-30T15:21:13Z",
"tag": [],
"url": "https://pfefferle.org?c=148",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/0/followers"
],
"cc": []
}

View file

@ -0,0 +1,26 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"Hashtag": "as:Hashtag"
}
],
"id": "https://pfefferle.org/this-is-a-test-federation/",
"type": "Article",
"attachment": [],
"attributedTo": "https://pfefferle.org/author/pfefferle/",
"content": "<p>with Discource!</p>",
"contentMap": {
"en": "<p>with Discource!</p>"
},
"name": "This is a test-federation",
"published": "2024-04-30T15:16:41Z",
"summary": "with Discource! [...]",
"tag": [],
"url": "https://pfefferle.org/this-is-a-test-federation/",
"to": [
"https://www.w3.org/ns/activitystreams#Public",
"https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers"
],
"cc": []
}

View file

@ -0,0 +1,74 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://purl.archive.org/socialweb/webfinger",
{
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"webfinger": "https://webfinger.net/#",
"lemmy": "https://join-lemmy.org/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"Hashtag": "as:Hashtag",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"moderators": {
"@id": "lemmy:moderators",
"@type": "@id"
},
"postingRestrictedToMods": "lemmy:postingRestrictedToMods",
"discoverable": "toot:discoverable",
"indexable": "toot:indexable",
"resource": "webfinger:resource"
}
],
"id": "https://pfefferle.org/author/pfefferle/",
"type": "Person",
"attachment": [
{
"type": "PropertyValue",
"name": "Blog",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/\" target=\"_blank\" href=\"https://pfefferle.org/\">pfefferle.org</a>"
},
{
"type": "PropertyValue",
"name": "Profile",
"value": "<a rel=\"me\" title=\"https://pfefferle.org/author/pfefferle/\" target=\"_blank\" href=\"https://pfefferle.org/author/pfefferle/\">pfefferle.org</a>"
}
],
"name": "Matthias Pfefferle",
"icon": {
"type": "Image",
"url": "https://secure.gravatar.com/avatar/a2bdca7870e859658cece96c044b3be5?s=120&#038;d=mm&#038;r=g"
},
"published": "2014-02-10T15:23:08Z",
"summary": "<p>Ich arbeite als Open Web Lead für Automattic.</p>\n",
"tag": [],
"url": "https://pfefferle.org/author/pfefferle/",
"inbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/inbox",
"outbox": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/outbox",
"following": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/following",
"followers": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/followers",
"preferredUsername": "matthias",
"endpoints": {
"sharedInbox": "https://pfefferle.org/wp-json/activitypub/1.0/inbox"
},
"publicKey": {
"id": "https://pfefferle.org/author/pfefferle/#main-key",
"owner": "https://pfefferle.org/author/pfefferle/",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvTA5RA40nOsso04RSwyX\nHXTojRPUMlIlArDcSy3M5GUJp9/xbxSUOdBjqd31KKB1GIi3vrLmD1Qi/ZqS95Qy\nw2Zd3xOsCg+o9bsyOG+O6Y8Lu+HEB5JKLUbNHdiSviakJ8wGadH9Wm4WIiN20y+q\n/u6lgxgiWfZ2CFCN6SOc28fUKi9NmKvXK+M12BhFfy1tC5KWXKDm0UbfI1+dmqhR\n3Ffe6vEsCI/YIVVdWxQ9kouOd0XSHOGdslktkepRO7IP9i9TdwyeCa0WWRoeO5Wa\ntVpc1Y0WuNbTM2ksIXTg0G+rO1/6KO/hrHnGu3RCfb/ZIHK5L/aWYb9B3PG3LyKV\n+wIDAQAB\n-----END PUBLIC KEY-----\n"
},
"manuallyApprovesFollowers": false,
"featured": "https://pfefferle.org/wp-json/activitypub/1.0/users/1/collections/featured",
"discoverable": true,
"indexable": true,
"webfinger": "matthias@pfefferle.org"
}

View file

@ -20,7 +20,8 @@ use lemmy_db_schema::{
}; };
use lemmy_utils::error::{LemmyErrorType, LemmyResult}; use lemmy_utils::error::{LemmyErrorType, LemmyResult};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::ops::Deref; use std::{ops::Deref, time::Duration};
use tokio::time::timeout;
use url::Url; use url::Url;
mod comment; mod comment;
@ -30,13 +31,22 @@ mod post;
pub mod routes; pub mod routes;
pub mod site; pub mod site;
const INCOMING_ACTIVITY_TIMEOUT: Duration = Duration::from_secs(9);
pub async fn shared_inbox( pub async fn shared_inbox(
request: HttpRequest, request: HttpRequest,
body: Bytes, body: Bytes,
data: Data<LemmyContext>, data: Data<LemmyContext>,
) -> LemmyResult<HttpResponse> { ) -> LemmyResult<HttpResponse> {
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data) let receive_fut =
receive_activity::<SharedInboxActivities, UserOrCommunity, LemmyContext>(request, body, &data);
// Set a timeout shorter than `REQWEST_TIMEOUT` for processing incoming activities. This is to
// avoid taking a long time to process an incoming activity when a required data fetch times out.
// In this case our own instance would timeout and be marked as dead by the sender. Better to
// consider the activity broken and move on.
timeout(INCOMING_ACTIVITY_TIMEOUT, receive_fut)
.await .await
.map_err(|_| LemmyErrorType::InboxTimeout)?
} }
/// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub /// Convert the data to json and turn it into an HTTP Response with the correct ActivityPub

View file

@ -100,7 +100,7 @@ impl Object for ApubSite {
kind: ApplicationType::Application, kind: ApplicationType::Application,
id: self.id().into(), id: self.id().into(),
name: self.name.clone(), name: self.name.clone(),
preferred_username: data.domain().to_string(), preferred_username: Some(data.domain().to_string()),
content: self.sidebar.as_ref().map(|d| markdown_to_html(d)), content: self.sidebar.as_ref().map(|d| markdown_to_html(d)),
source: self.sidebar.clone().map(Source::new), source: self.sidebar.clone().map(Source::new),
summary: self.description.clone(), summary: self.description.clone(),

View file

@ -96,4 +96,10 @@ mod tests {
test_json::<Report>("assets/mbin/activities/flag.json")?; test_json::<Report>("assets/mbin/activities/flag.json")?;
Ok(()) Ok(())
} }
#[test]
fn test_parse_wordpress_activities() -> LemmyResult<()> {
test_json::<AnnounceActivity>("assets/wordpress/activities/announce.json")?;
Ok(())
}
} }

View file

@ -22,7 +22,7 @@ pub struct Instance {
/// site name /// site name
pub(crate) name: String, pub(crate) name: String,
/// instance domain, necessary for mastodon authorized fetch /// instance domain, necessary for mastodon authorized fetch
pub(crate) preferred_username: String, pub(crate) preferred_username: Option<String>,
pub(crate) inbox: Url, pub(crate) inbox: Url,
/// mandatory field in activitypub, lemmy currently serves an empty outbox /// mandatory field in activitypub, lemmy currently serves an empty outbox
pub(crate) outbox: Url, pub(crate) outbox: Url,

View file

@ -206,4 +206,13 @@ mod tests {
test_json::<Person>("assets/nodebb/objects/person.json")?; test_json::<Person>("assets/nodebb/objects/person.json")?;
Ok(()) Ok(())
} }
#[test]
fn test_parse_object_wordpress() -> LemmyResult<()> {
test_json::<Group>("assets/wordpress/objects/group.json")?;
test_json::<Page>("assets/wordpress/objects/page.json")?;
test_json::<Person>("assets/wordpress/objects/person.json")?;
test_json::<Note>("assets/wordpress/objects/note.json")?;
Ok(())
}
} }

View file

@ -233,6 +233,10 @@ impl ActivityHandler for Page {
#[async_trait::async_trait] #[async_trait::async_trait]
impl InCommunity for Page { impl InCommunity for Page {
async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> { async fn community(&self, context: &Data<LemmyContext>) -> LemmyResult<ApubCommunity> {
if let Some(audience) = &self.audience {
return audience.dereference(context).await;
}
let community = match &self.attributed_to { let community = match &self.attributed_to {
AttributedTo::Lemmy(_) => { AttributedTo::Lemmy(_) => {
let mut iter = self.to.iter().merge(self.cc.iter()); let mut iter = self.to.iter().merge(self.cc.iter());
@ -243,7 +247,7 @@ impl InCommunity for Page {
break c; break c;
} }
} else { } else {
Err(LemmyErrorType::NoCommunityFoundInCc)? Err(LemmyErrorType::CouldntFindCommunity)?;
} }
} }
} }
@ -251,11 +255,12 @@ impl InCommunity for Page {
p.iter() p.iter()
.find(|a| a.kind == PersonOrGroupType::Group) .find(|a| a.kind == PersonOrGroupType::Group)
.map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner())) .map(|a| ObjectId::<ApubCommunity>::from(a.id.clone().into_inner()))
.ok_or(LemmyErrorType::PageDoesNotSpecifyGroup)? .ok_or(LemmyErrorType::CouldntFindCommunity)?
.dereference(context) .dereference(context)
.await? .await?
} }
}; };
if let Some(audience) = &self.audience { if let Some(audience) = &self.audience {
verify_community_matches(audience, community.actor_id.clone())?; verify_community_matches(audience, community.actor_id.clone())?;
} }

View file

@ -5,6 +5,12 @@
-- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and -- (even if only other columns are updated) because triggers can run after the deletion of referenced rows and
-- before the automatic deletion of the row that references it. This is not a problem for insert or delete. -- before the automatic deletion of the row that references it. This is not a problem for insert or delete.
-- --
-- After a row update begins, a concurrent update on the same row can't begin until the whole
-- transaction that contains the first update is finished. To reduce this locking, statements in
-- triggers should be ordered based on the likelihood of concurrent writers. For example, updating
-- site_aggregates should be done last because the same row is updated for all local stuff. If
-- it were not last, then the locking period for concurrent writers would extend to include the
-- time consumed by statements that come after.
-- --
-- --
-- Create triggers for both post and comments -- Create triggers for both post and comments
@ -38,16 +44,18 @@ BEGIN
(thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff (thing_like).thing_id, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score = 1), 0) AS upvotes, coalesce(sum(count_diff) FILTER (WHERE (thing_like).score != 1), 0) AS downvotes FROM select_old_and_new_rows AS old_and_new_rows GROUP BY (thing_like).thing_id) AS diff
WHERE WHERE
a.thing_id = diff.thing_id a.thing_id = diff.thing_id
RETURNING AND (diff.upvotes, diff.downvotes) != (0, 0)
r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score) RETURNING
UPDATE r.creator_id_from_thing_aggregates (a.*) AS creator_id, diff.upvotes - diff.downvotes AS score)
person_aggregates AS a UPDATE
SET person_aggregates AS a
thing_score = a.thing_score + diff.score FROM ( SET
SELECT thing_score = a.thing_score + diff.score FROM (
creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff SELECT
WHERE creator_id, sum(score) AS score FROM thing_diff GROUP BY creator_id) AS diff
a.person_id = diff.creator_id; WHERE
a.person_id = diff.creator_id
AND diff.score != 0;
RETURN NULL; RETURN NULL;
END; END;
$$); $$);
@ -62,6 +70,21 @@ CALL r.post_or_comment ('post');
CALL r.post_or_comment ('comment'); CALL r.post_or_comment ('comment');
-- Create triggers that update counts in parent aggregates -- Create triggers that update counts in parent aggregates
CREATE FUNCTION r.parent_comment_ids (path ltree)
RETURNS SETOF int
LANGUAGE sql
IMMUTABLE parallel safe
BEGIN
ATOMIC
SELECT
comment_id::int
FROM
string_to_table (ltree2text (path), '.') AS comment_id
-- Skip first and last
LIMIT (nlevel (path) - 2) OFFSET 1;
END;
CALL r.create_triggers ('comment', $$ CALL r.create_triggers ('comment', $$
BEGIN BEGIN
UPDATE UPDATE
@ -76,60 +99,84 @@ BEGIN
r.is_counted (comment) r.is_counted (comment)
GROUP BY (comment).creator_id) AS diff GROUP BY (comment).creator_id) AS diff
WHERE WHERE
a.person_id = diff.creator_id; a.person_id = diff.creator_id
AND diff.comment_count != 0;
UPDATE UPDATE
site_aggregates AS a comment_aggregates AS a
SET SET
comments = a.comments + diff.comments child_count = a.child_count + diff.child_count
FROM ( FROM (
SELECT SELECT
coalesce(sum(count_diff), 0) AS comments parent_id,
FROM coalesce(sum(count_diff), 0) AS child_count
select_old_and_new_rows AS old_and_new_rows FROM (
WHERE -- For each inserted or deleted comment, this outputs 1 row for each parent comment.
r.is_counted (comment) -- For example, this:
AND (comment).local) AS diff; --
-- count_diff | (comment).path
-- ------------+----------------
-- 1 | 0.5.6.7
-- 1 | 0.5.6.7.8
--
-- becomes this:
--
-- count_diff | parent_id
-- ------------+-----------
-- 1 | 5
-- 1 | 6
-- 1 | 5
-- 1 | 6
-- 1 | 7
SELECT
count_diff,
parent_id
FROM
select_old_and_new_rows AS old_and_new_rows,
LATERAL r.parent_comment_ids ((comment).path) AS parent_id) AS expanded_old_and_new_rows
GROUP BY
parent_id) AS diff
WHERE
a.comment_id = diff.parent_id
AND diff.child_count != 0;
WITH post_diff AS ( WITH post_diff AS (
UPDATE UPDATE
post_aggregates AS a post_aggregates AS a
SET SET
comments = a.comments + diff.comments, comments = a.comments + diff.comments,
newest_comment_time = GREATEST (a.newest_comment_time, ( newest_comment_time = GREATEST (a.newest_comment_time, diff.newest_comment_time),
SELECT newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id ORDER BY published DESC LIMIT 1)),
newest_comment_time_necro = GREATEST (a.newest_comment_time_necro, (
SELECT
published
FROM select_new_rows AS new_comment
WHERE
a.post_id = new_comment.post_id
-- Ignore comments from the post's creator
AND a.creator_id != new_comment.creator_id
-- Ignore comments on old posts
AND a.published > (new_comment.published - '2 days'::interval)
ORDER BY published DESC LIMIT 1))
FROM ( FROM (
SELECT SELECT
(comment).post_id, post.id AS post_id,
coalesce(sum(count_diff), 0) AS comments coalesce(sum(count_diff), 0) AS comments,
-- Old rows are excluded using `count_diff = 1`
max((comment).published) FILTER (WHERE count_diff = 1) AS newest_comment_time,
max((comment).published) FILTER (WHERE count_diff = 1
-- Ignore comments from the post's creator
AND post.creator_id != (comment).creator_id
-- Ignore comments on old posts
AND post.published > ((comment).published - '2 days'::interval)) AS newest_comment_time_necro,
r.is_counted (post.*) AS include_in_community_aggregates
FROM FROM
select_old_and_new_rows AS old_and_new_rows select_old_and_new_rows AS old_and_new_rows
LEFT JOIN post ON post.id = (comment).post_id
WHERE WHERE
r.is_counted (comment) r.is_counted (comment)
GROUP BY GROUP BY
(comment).post_id) AS diff post.id) AS diff
LEFT JOIN post ON post.id = diff.post_id
WHERE WHERE
a.post_id = diff.post_id a.post_id = diff.post_id
AND (diff.comments,
GREATEST (a.newest_comment_time, diff.newest_comment_time),
GREATEST (a.newest_comment_time_necro, diff.newest_comment_time_necro)) != (0,
a.newest_comment_time,
a.newest_comment_time_necro)
RETURNING RETURNING
a.community_id, a.community_id,
diff.comments, diff.comments,
r.is_counted (post.*) AS include_in_community_aggregates) diff.include_in_community_aggregates)
UPDATE UPDATE
community_aggregates AS a community_aggregates AS a
SET SET
@ -145,7 +192,23 @@ FROM (
GROUP BY GROUP BY
community_id) AS diff community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.comments != 0;
UPDATE
site_aggregates AS a
SET
comments = a.comments + diff.comments
FROM (
SELECT
coalesce(sum(count_diff), 0) AS comments
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (comment)
AND (comment).local) AS diff
WHERE
diff.comments != 0;
RETURN NULL; RETURN NULL;
@ -167,20 +230,8 @@ BEGIN
r.is_counted (post) r.is_counted (post)
GROUP BY (post).creator_id) AS diff GROUP BY (post).creator_id) AS diff
WHERE WHERE
a.person_id = diff.creator_id; a.person_id = diff.creator_id
AND diff.post_count != 0;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff;
UPDATE UPDATE
community_aggregates AS a community_aggregates AS a
@ -197,7 +248,23 @@ FROM (
GROUP BY GROUP BY
(post).community_id) AS diff (post).community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.posts != 0;
UPDATE
site_aggregates AS a
SET
posts = a.posts + diff.posts
FROM (
SELECT
coalesce(sum(count_diff), 0) AS posts
FROM
select_old_and_new_rows AS old_and_new_rows
WHERE
r.is_counted (post)
AND (post).local) AS diff
WHERE
diff.posts != 0;
RETURN NULL; RETURN NULL;
@ -217,7 +284,9 @@ BEGIN
FROM select_old_and_new_rows AS old_and_new_rows FROM select_old_and_new_rows AS old_and_new_rows
WHERE WHERE
r.is_counted (community) r.is_counted (community)
AND (community).local) AS diff; AND (community).local) AS diff
WHERE
diff.communities != 0;
RETURN NULL; RETURN NULL;
@ -235,7 +304,9 @@ BEGIN
SELECT SELECT
coalesce(sum(count_diff), 0) AS users coalesce(sum(count_diff), 0) AS users
FROM select_old_and_new_rows AS old_and_new_rows FROM select_old_and_new_rows AS old_and_new_rows
WHERE (person).local) AS diff; WHERE (person).local) AS diff
WHERE
diff.users != 0;
RETURN NULL; RETURN NULL;
@ -270,7 +341,8 @@ BEGIN
GROUP BY GROUP BY
old_post.community_id) AS diff old_post.community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND diff.comments != 0;
RETURN NULL; RETURN NULL;
END; END;
$$; $$;
@ -296,7 +368,8 @@ BEGIN
LEFT JOIN community ON community.id = (community_follower).community_id LEFT JOIN community ON community.id = (community_follower).community_id
LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff LEFT JOIN person ON person.id = (community_follower).person_id GROUP BY (community_follower).community_id) AS diff
WHERE WHERE
a.community_id = diff.community_id; a.community_id = diff.community_id
AND (diff.subscribers, diff.subscribers_local) != (0, 0);
RETURN NULL; RETURN NULL;
@ -474,3 +547,24 @@ CREATE TRIGGER delete_follow
FOR EACH ROW FOR EACH ROW
EXECUTE FUNCTION r.delete_follow_before_person (); EXECUTE FUNCTION r.delete_follow_before_person ();
-- Triggers that change values before insert or update
CREATE FUNCTION r.comment_change_values ()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
DECLARE
id text = NEW.id::text;
BEGIN
-- Make `path` end with `id` if it doesn't already
IF NOT (NEW.path ~ ('*.' || id)::lquery) THEN
NEW.path = NEW.path || id;
END IF;
RETURN NEW;
END
$$;
CREATE TRIGGER change_values
BEFORE INSERT OR UPDATE ON comment
FOR EACH ROW
EXECUTE FUNCTION r.comment_change_values ();

View file

@ -15,12 +15,7 @@ use crate::{
utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT}, utils::{functions::coalesce, get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT},
}; };
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use diesel::{ use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl};
dsl::{insert_into, sql_query},
result::Error,
ExpressionMethods,
QueryDsl,
};
use diesel_async::RunQueryDsl; use diesel_async::RunQueryDsl;
use diesel_ltree::Ltree; use diesel_ltree::Ltree;
use url::Url; use url::Url;
@ -72,81 +67,23 @@ impl Comment {
parent_path: Option<&Ltree>, parent_path: Option<&Ltree>,
) -> Result<Comment, Error> { ) -> Result<Comment, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
let comment_form = (comment_form, parent_path.map(|p| comment::path.eq(p)));
conn if let Some(timestamp) = timestamp {
.build_transaction() insert_into(comment::table)
.run(|conn| { .values(comment_form)
Box::pin(async move { .on_conflict(comment::ap_id)
// Insert, to get the id .filter_target(coalesce(comment::updated, comment::published).lt(timestamp))
let inserted_comment = if let Some(timestamp) = timestamp { .do_update()
insert_into(comment::table) .set(comment_form)
.values(comment_form) .get_result::<Self>(conn)
.on_conflict(comment::ap_id) .await
.filter_target(coalesce(comment::updated, comment::published).lt(timestamp)) } else {
.do_update() insert_into(comment::table)
.set(comment_form) .values(comment_form)
.get_result::<Self>(conn) .get_result::<Self>(conn)
.await? .await
} else { }
insert_into(comment::table)
.values(comment_form)
.get_result::<Self>(conn)
.await?
};
let comment_id = inserted_comment.id;
// You need to update the ltree column
let ltree = Ltree(if let Some(parent_path) = parent_path {
// The previous parent will already have 0 in it
// Append this comment id
format!("{}.{}", parent_path.0, comment_id)
} else {
// '0' is always the first path, append to that
format!("{}.{}", 0, comment_id)
});
let updated_comment = diesel::update(comment::table.find(comment_id))
.set(comment::path.eq(ltree))
.get_result::<Self>(conn)
.await?;
// Update the child count for the parent comment_aggregates
// You could do this with a trigger, but since you have to do this manually anyway,
// you can just have it here
if let Some(parent_path) = parent_path {
// You have to update counts for all parents, not just the immediate one
// TODO if the performance of this is terrible, it might be better to do this as part of a
// scheduled query... although the counts would often be wrong.
//
// The child_count query for reference:
// select c.id, c.path, count(c2.id) as child_count from comment c
// left join comment c2 on c2.path <@ c.path and c2.path != c.path
// group by c.id
let parent_id = parent_path.0.split('.').nth(1);
if let Some(parent_id) = parent_id {
let top_parent = format!("0.{}", parent_id);
let update_child_count_stmt = format!(
"
update comment_aggregates ca set child_count = c.child_count
from (
select c.id, c.path, count(c2.id) as child_count from comment c
join comment c2 on c2.path <@ c.path and c2.path != c.path
and c.path <@ '{top_parent}'
group by c.id
) as c
where ca.comment_id = c.id"
);
sql_query(update_child_count_stmt).execute(conn).await?;
}
}
Ok(updated_comment)
}) as _
})
.await
} }
pub async fn read_from_apub_id( pub async fn read_from_apub_id(

View file

@ -55,12 +55,17 @@ impl LocalUser {
pool: &mut DbPool<'_>, pool: &mut DbPool<'_>,
local_user_id: LocalUserId, local_user_id: LocalUserId,
form: &LocalUserUpdateForm, form: &LocalUserUpdateForm,
) -> Result<LocalUser, Error> { ) -> Result<usize, Error> {
let conn = &mut get_conn(pool).await?; let conn = &mut get_conn(pool).await?;
diesel::update(local_user::table.find(local_user_id)) let res = diesel::update(local_user::table.find(local_user_id))
.set(form) .set(form)
.get_result::<Self>(conn) .execute(conn)
.await .await;
// Diesel will throw an error if the query is all Nones (not updating anything), ignore this.
match res {
Err(Error::QueryBuilderError(_)) => Ok(0),
other => other,
}
} }
pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> { pub async fn delete(pool: &mut DbPool<'_>, id: LocalUserId) -> Result<usize, Error> {

View file

@ -950,9 +950,8 @@ mod tests {
show_bot_accounts: Some(false), show_bot_accounts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user = LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; data.local_user_view.local_user.show_bot_accounts = false;
data.local_user_view.local_user = inserted_local_user;
let read_post_listing = PostQuery { let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -986,9 +985,8 @@ mod tests {
show_bot_accounts: Some(true), show_bot_accounts: Some(true),
..Default::default() ..Default::default()
}; };
let inserted_local_user = LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; data.local_user_view.local_user.show_bot_accounts = true;
data.local_user_view.local_user = inserted_local_user;
let post_listings_with_bots = PostQuery { let post_listings_with_bots = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -1110,9 +1108,8 @@ mod tests {
show_bot_accounts: Some(false), show_bot_accounts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user = LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; data.local_user_view.local_user.show_bot_accounts = false;
data.local_user_view.local_user = inserted_local_user;
let read_post_listing = PostQuery { let read_post_listing = PostQuery {
community_id: Some(data.inserted_community.id), community_id: Some(data.inserted_community.id),
@ -1533,9 +1530,8 @@ mod tests {
show_read_posts: Some(false), show_read_posts: Some(false),
..Default::default() ..Default::default()
}; };
let inserted_local_user = LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?;
LocalUser::update(pool, data.local_user_view.local_user.id, &local_user_form).await?; data.local_user_view.local_user.show_read_posts = false;
data.local_user_view.local_user = inserted_local_user;
// Mark a post as read // Mark a post as read
PostRead::mark_as_read( PostRead::mark_as_read(

View file

@ -43,21 +43,18 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
.map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))? .map_err(|_| ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?
.ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?; .ok_or(ErrorBadRequest(LemmyError::from(anyhow!("not_found"))))?;
let protocols = if site_view.local_site.federation_enabled {
Some(vec!["activitypub".to_string()])
} else {
None
};
// Since there are 3 registration options, // Since there are 3 registration options,
// we need to set open_registrations as true if RegistrationMode is not Closed. // we need to set open_registrations as true if RegistrationMode is not Closed.
let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed); let open_registrations = Some(site_view.local_site.registration_mode != RegistrationMode::Closed);
let json = NodeInfo { let json = NodeInfo {
version: Some("2.0".to_string()), version: Some("2.1".to_string()),
software: Some(NodeInfoSoftware { software: Some(NodeInfoSoftware {
name: Some("lemmy".to_string()), name: Some("lemmy".to_string()),
version: Some(VERSION.to_string()), version: Some(VERSION.to_string()),
repository: Some("https://github.com/LemmyNet/lemmy".to_string()),
homepage: Some("https://join-lemmy.org/".to_string()),
}), }),
protocols, protocols: Some(vec!["activitypub".to_string()]),
usage: Some(NodeInfoUsage { usage: Some(NodeInfoUsage {
users: Some(NodeInfoUsers { users: Some(NodeInfoUsers {
total: Some(site_view.counts.users), total: Some(site_view.counts.users),
@ -68,6 +65,11 @@ async fn node_info(context: web::Data<LemmyContext>) -> Result<HttpResponse, Err
local_comments: Some(site_view.counts.comments), local_comments: Some(site_view.counts.comments),
}), }),
open_registrations, open_registrations,
services: Some(NodeInfoServices {
inbound: Some(vec![]),
outbound: Some(vec![]),
}),
metadata: Some(vec![]),
}; };
Ok(HttpResponse::Ok().json(json)) Ok(HttpResponse::Ok().json(json))
@ -84,6 +86,7 @@ struct NodeInfoWellKnownLinks {
pub href: Url, pub href: Url,
} }
/// Nodeinfo spec: http://nodeinfo.diaspora.software/docson/index.html#/ns/schema/2.1
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)] #[serde(rename_all = "camelCase", default)]
pub struct NodeInfo { pub struct NodeInfo {
@ -92,6 +95,9 @@ pub struct NodeInfo {
pub protocols: Option<Vec<String>>, pub protocols: Option<Vec<String>>,
pub usage: Option<NodeInfoUsage>, pub usage: Option<NodeInfoUsage>,
pub open_registrations: Option<bool>, pub open_registrations: Option<bool>,
/// These fields are required by the spec for no reason
pub services: Option<NodeInfoServices>,
pub metadata: Option<Vec<String>>,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
@ -99,6 +105,8 @@ pub struct NodeInfo {
pub struct NodeInfoSoftware { pub struct NodeInfoSoftware {
pub name: Option<String>, pub name: Option<String>,
pub version: Option<String>, pub version: Option<String>,
pub repository: Option<String>,
pub homepage: Option<String>,
} }
#[derive(Serialize, Deserialize, Debug, Default)] #[derive(Serialize, Deserialize, Debug, Default)]
@ -116,3 +124,10 @@ pub struct NodeInfoUsers {
pub active_halfyear: Option<i64>, pub active_halfyear: Option<i64>,
pub active_month: Option<i64>, pub active_month: Option<i64>,
} }
#[derive(Serialize, Deserialize, Debug, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct NodeInfoServices {
pub inbound: Option<Vec<String>>,
pub outbound: Option<Vec<String>>,
}

View file

@ -99,8 +99,6 @@ pub enum LemmyErrorType {
PersonIsBannedFromSite(String), PersonIsBannedFromSite(String),
InvalidVoteValue, InvalidVoteValue,
PageDoesNotSpecifyCreator, PageDoesNotSpecifyCreator,
PageDoesNotSpecifyGroup,
NoCommunityFoundInCc,
NoEmailSetup, NoEmailSetup,
LocalSiteNotSetup, LocalSiteNotSetup,
EmailSmtpServerNeedsAPort, EmailSmtpServerNeedsAPort,
@ -177,6 +175,7 @@ pub enum LemmyErrorType {
InvalidBotAction, InvalidBotAction,
CantBlockLocalInstance, CantBlockLocalInstance,
UrlWithoutDomain, UrlWithoutDomain,
InboxTimeout,
Unknown(String), Unknown(String),
} }

@ -1 +1 @@
Subproject commit a4681f70a4ddf077951ed2dcc8cf90bb243d4828 Subproject commit f0ab81deea347c433277a90ae752b10f68473719

View file

@ -0,0 +1,3 @@
SELECT
1;

View file

@ -0,0 +1,4 @@
-- This migration exists to trigger re-execution of replaceable_schema
SELECT
1;

View file

@ -4,39 +4,39 @@ set -e
echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete." echo "Do not stop in the middle of this upgrade, wait until you see the message: Upgrade complete."
echo "Stopping lemmy and all services..." echo "Stopping lemmy and all services..."
sudo docker-compose stop sudo docker compose stop
echo "Make sure postgres is started..." echo "Make sure postgres is started..."
sudo docker-compose up -d postgres sudo docker compose up -d postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Exporting the Database to 15_16.dump.sql ..." echo "Exporting the Database to 15_16.dump.sql ..."
sudo docker-compose exec -T postgres pg_dumpall -c -U lemmy > 15_16_dump.sql sudo docker compose exec -T postgres pg_dumpall -c -U lemmy | sudo tee 15_16_dump.sql > /dev/null
echo "Done." echo "Done."
echo "Stopping postgres..." echo "Stopping postgres..."
sudo docker-compose stop postgres sudo docker compose stop postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Removing the old postgres folder" echo "Removing the old postgres folder"
sudo rm -rf volumes/postgres sudo rm -rf volumes/postgres
echo "Updating docker-compose to use postgres version 16." echo "Updating docker compose to use postgres version 16."
sed -i "s/image: postgres:.*/image: postgres:16-alpine/" ./docker-compose.yml sudo sed -i "s/image: .*postgres:.*/image: docker.io/postgres:16-alpine/" ./docker-compose.yml
echo "Starting up new postgres..." echo "Starting up new postgres..."
sudo docker-compose up -d postgres sudo docker compose up -d postgres
echo "Waiting..." echo "Waiting..."
sleep 20s sleep 20s
echo "Importing the database...." echo "Importing the database...."
cat 15_16_dump.sql | sudo docker-compose exec -T postgres psql -U lemmy sudo cat 15_16_dump.sql | sudo docker compose exec -T postgres psql -U lemmy
echo "Done." echo "Done."
echo "Starting up lemmy..." echo "Starting up lemmy..."
sudo docker-compose up -d sudo docker compose up -d
echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly." echo "A copy of your old database is at 15_16.dump.sql . You can delete this file if the upgrade went smoothly."
echo "Upgrade complete." echo "Upgrade complete."

View file

@ -262,12 +262,22 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
// User // User
.service( .service(
// Account action, I don't like that it's in /user maybe /accounts // Account action, I don't like that it's in /user maybe /accounts
// Handle /user/register separately to add the register() rate limitter // Handle /user/register separately to add the register() rate limiter
web::resource("/user/register") web::resource("/user/register")
.guard(guard::Post()) .guard(guard::Post())
.wrap(rate_limit.register()) .wrap(rate_limit.register())
.route(web::post().to(register)), .route(web::post().to(register)),
) )
// User
.service(
// Handle /user/login separately to add the register() rate limiter
// TODO: pretty annoying way to apply rate limits for register and login, we should
// group them under a common path so that rate limit is only applied once (eg under /account).
web::resource("/user/login")
.guard(guard::Post())
.wrap(rate_limit.register())
.route(web::post().to(login)),
)
.service( .service(
// Handle captcha separately // Handle captcha separately
web::resource("/user/get_captcha") web::resource("/user/get_captcha")
@ -306,7 +316,6 @@ pub fn config(cfg: &mut web::ServiceConfig, rate_limit: &RateLimitCell) {
.route("/banned", web::get().to(list_banned_users)) .route("/banned", web::get().to(list_banned_users))
.route("/block", web::post().to(block_person)) .route("/block", web::post().to(block_person))
// TODO Account actions. I don't like that they're in /user maybe /accounts // TODO Account actions. I don't like that they're in /user maybe /accounts
.route("/login", web::post().to(login))
.route("/logout", web::post().to(logout)) .route("/logout", web::post().to(logout))
.route("/delete_account", web::post().to(delete_account)) .route("/delete_account", web::post().to(delete_account))
.route("/password_reset", web::post().to(reset_password)) .route("/password_reset", web::post().to(reset_password))

View file

@ -160,10 +160,10 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
rate_limit_cell.clone(), rate_limit_cell.clone(),
); );
if !args.disable_scheduled_tasks { let scheduled_tasks = (!args.disable_scheduled_tasks).then(|| {
// Schedules various cleanup tasks for the DB // Schedules various cleanup tasks for the DB
let _scheduled_tasks = tokio::task::spawn(scheduled_tasks::setup(context.clone())); tokio::task::spawn(scheduled_tasks::setup(context.clone()))
} });
if let Some(prometheus) = SETTINGS.prometheus.clone() { if let Some(prometheus) = SETTINGS.prometheus.clone() {
serve_prometheus(prometheus, context.clone())?; serve_prometheus(prometheus, context.clone())?;
@ -218,7 +218,7 @@ pub async fn start_lemmy_server(args: CmdArgs) -> LemmyResult<()> {
let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?; let mut interrupt = tokio::signal::unix::signal(SignalKind::interrupt())?;
let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?; let mut terminate = tokio::signal::unix::signal(SignalKind::terminate())?;
if server.is_some() || federate.is_some() { if server.is_some() || federate.is_some() || scheduled_tasks.is_some() {
tokio::select! { tokio::select! {
_ = tokio::signal::ctrl_c() => { _ = tokio::signal::ctrl_c() => {
tracing::warn!("Received ctrl-c, shutting down gracefully..."); tracing::warn!("Received ctrl-c, shutting down gracefully...");