Merge branch 'develop' into stable

This commit is contained in:
Floatingghost 2025-01-05 16:23:34 +00:00
commit bdffb55013
25 changed files with 392 additions and 103 deletions

View file

@ -4,6 +4,27 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## 2025.01
## Added
- New config option `:instance, :cleanup_attachments_delay`
- It is now possible to display custom source URLs in akkoma-fe;
the settings are part of the frontend configuration
## Fixed
- Media proxy no longer attempts to proxy embedded images
- Fix significant uneccessary overhead of attachment cleanup;
it no longer attempts to cleanup attachments of deleted remote posts
- Fix “Delete & Redraft” often losing attachments if attachment cleanup was enabled
- ObjectAge policy no longer lets unlisted posts slip through
- ObjectAge policy no longer leaks belated DMs and follower-only posts
- the NodeINfo endpoint now uses the correct content type
## Changed
- Anonymous objects now federate completely without an id
adopting a proposed AP spec errata and restoring federation
with e.g. IceShrimp.NET and fedify-based implementations
## 3.13.3 ## 3.13.3
## BREAKING ## BREAKING

View file

@ -255,6 +255,7 @@ config :pleroma, :instance,
external_user_synchronization: true, external_user_synchronization: true,
extended_nickname_format: true, extended_nickname_format: true,
cleanup_attachments: false, cleanup_attachments: false,
cleanup_attachments_delay: 1800,
multi_factor_authentication: [ multi_factor_authentication: [
totp: [ totp: [
# digits 6 or 8 # digits 6 or 8
@ -302,6 +303,7 @@ config :pleroma, :markup,
allow_headings: false, allow_headings: false,
allow_tables: false, allow_tables: false,
allow_fonts: false, allow_fonts: false,
allow_math: true,
scrub_policy: [ scrub_policy: [
Pleroma.HTML.Scrubber.Default, Pleroma.HTML.Scrubber.Default,
Pleroma.HTML.Transform.MediaProxy Pleroma.HTML.Transform.MediaProxy

View file

@ -1184,7 +1184,7 @@ config :pleroma, :config_description, [
logoMask: true, logoMask: true,
minimalScopesMode: false, minimalScopesMode: false,
noAttachmentLinks: false, noAttachmentLinks: false,
nsfwCensorImage: "/static/img/nsfw.74818f9.png", nsfwCensorImage: "",
postContentType: "text/plain", postContentType: "text/plain",
redirectRootLogin: "/main/friends", redirectRootLogin: "/main/friends",
redirectRootNoLogin: "/main/all", redirectRootNoLogin: "/main/all",
@ -1194,7 +1194,9 @@ config :pleroma, :config_description, [
showInstanceSpecificPanel: false, showInstanceSpecificPanel: false,
subjectLineBehavior: "email", subjectLineBehavior: "email",
theme: "pleroma-dark", theme: "pleroma-dark",
webPushNotifications: false webPushNotifications: false,
backendCommitUrl: "",
frontendCommitUrl: ""
} }
], ],
children: [ children: [
@ -1285,7 +1287,7 @@ config :pleroma, :config_description, [
type: {:string, :image}, type: {:string, :image},
description: description:
"URL of the image to use for hiding NSFW media attachments in the timeline", "URL of the image to use for hiding NSFW media attachments in the timeline",
suggestions: ["/static/img/nsfw.74818f9.png"] suggestions: [""]
}, },
%{ %{
key: :postContentType, key: :postContentType,
@ -1398,6 +1400,18 @@ config :pleroma, :config_description, [
label: "Stop Gifs", label: "Stop Gifs",
type: :boolean, type: :boolean,
description: "Whether to pause animated images until they're hovered on" description: "Whether to pause animated images until they're hovered on"
},
%{
key: :backendCommitUrl,
label: "Backend Commit URL",
type: :string,
description: "URL prefix for backend commit hashes"
},
%{
key: :frontendCommitUrl,
label: "Frontend Commit URL",
type: :string,
description: "URL prefix for frontend commit hashes"
} }
] ]
}, },

View file

@ -58,6 +58,7 @@ To add configuration to your config file, you can copy it from the base config.
* `registration_reason_length`: Maximum registration reason length (default: `500`). * `registration_reason_length`: Maximum registration reason length (default: `500`).
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users. * `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances. * `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `cleanup_attachments_delay`: How many seconds to wait after post deletion before attempting to deletion; useful for “delete & redraft” functionality (default: `1800`)
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`) * `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`)

View file

@ -35,32 +35,24 @@ sudo useradd -r -s /bin/false -m -d /var/lib/akkoma -U akkoma
### Install Elixir and Erlang ### Install Elixir and Erlang
#### Using `apt`
If your distribution packages a recent enough version of Elixir, you can install it directly from the distro repositories and skip to the next section of the guide: If your distribution packages a recent enough version of Elixir, you can install it directly from the distro repositories and skip to the next section of the guide:
```shell ```shell
sudo apt install elixir erlang-dev erlang-nox sudo apt install elixir erlang-dev erlang-nox
``` ```
Otherwise use [asdf](https://github.com/asdf-vm/asdf) to install the latest versions of Elixir and Erlang. #### Using `asdf`
If your distribution does not have a recent version of Elxir in their repositories, you can use [asdf](https://asdf-vm.com/) to install a newer version of Elixir and Erlang.
First, install some dependencies needed to build Elixir and Erlang: First, install some dependencies needed to build Elixir and Erlang:
```shell ```shell
sudo apt install curl unzip build-essential autoconf m4 libncurses5-dev libssh-dev unixodbc-dev xsltproc libxml2-utils libncurses-dev sudo apt install curl unzip build-essential autoconf m4 libncurses5-dev libssh-dev unixodbc-dev xsltproc libxml2-utils libncurses-dev
``` ```
Then login to the `akkoma` user and install asdf: Then login to the `akkoma` user.
```shell
git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch v0.11.3
```
Add the following lines to `~/.bashrc`: Install asdf by following steps 1 to 3 on [their website](https://asdf-vm.com/guide/getting-started.html), then restart the shell to load asdf:
```shell
. "$HOME/.asdf/asdf.sh"
# asdf completions
. "$HOME/.asdf/completions/asdf.bash"
```
Restart the shell:
```shell ```shell
exec $SHELL exec $SHELL
``` ```
@ -69,15 +61,15 @@ Next install Erlang:
```shell ```shell
asdf plugin add erlang https://github.com/asdf-vm/asdf-erlang.git asdf plugin add erlang https://github.com/asdf-vm/asdf-erlang.git
export KERL_CONFIGURE_OPTIONS="--disable-debug --without-javac" export KERL_CONFIGURE_OPTIONS="--disable-debug --without-javac"
asdf install erlang 25.3.2.5 asdf install erlang 26.2.5.4
asdf global erlang 25.3.2.5 asdf global erlang 26.2.5.4
``` ```
Now install Elixir: Now install Elixir:
```shell ```shell
asdf plugin-add elixir https://github.com/asdf-vm/asdf-elixir.git asdf plugin-add elixir https://github.com/asdf-vm/asdf-elixir.git
asdf install elixir 1.15.4-otp-25 asdf install elixir 1.17.3-otp-26
asdf global elixir 1.15.4-otp-25 asdf global elixir 1.17.3-otp-26
``` ```
Confirm that Elixir is installed correctly by checking the version: Confirm that Elixir is installed correctly by checking the version:

View file

@ -1,8 +1,8 @@
## Required dependencies ## Required dependencies
* PostgreSQL 12+ * PostgreSQL 12+
* Elixir 1.14+ (currently tested up to 1.16) * Elixir 1.14+ (currently tested up to 1.17)
* Erlang OTP 25+ (currently tested up to OTP26) * Erlang OTP 25+ (currently tested up to OTP27)
* git * git
* file / libmagic * file / libmagic
* gcc (clang might also work) * gcc (clang might also work)

View file

@ -11,11 +11,13 @@
# #
daemon="/usr/local/bin/elixir" daemon="/usr/local/bin/elixir"
daemon_flags="--detached -S /usr/local/bin/mix phx.server" daemon_flags="-S /usr/local/bin/mix phx.server"
daemon_user="_akkoma" daemon_user="_akkoma"
daemon_execdir="/home/_akkoma/akkoma"
. /etc/rc.d/rc.subr . /etc/rc.d/rc.subr
rc_bg="YES"
rc_reload=NO rc_reload=NO
pexp="phx.server" pexp="phx.server"
@ -24,7 +26,7 @@ rc_check() {
} }
rc_start() { rc_start() {
${rcexec} "cd akkoma; ${daemon} ${daemon_flags}" rc_exec "${daemon} ${daemon_flags}"
} }
rc_stop() { rc_stop() {

View file

@ -343,10 +343,16 @@ defmodule Mix.Tasks.Pleroma.Database do
%{:num_rows => del_hashtags} = %{:num_rows => del_hashtags} =
""" """
DELETE FROM hashtags AS ht DELETE FROM hashtags
WHERE NOT EXISTS ( USING hashtags AS ht
SELECT 1 FROM hashtags_objects hto LEFT JOIN hashtags_objects hto
WHERE ht.id = hto.hashtag_id) ON ht.id = hto.hashtag_id
LEFT JOIN user_follows_hashtag ufht
ON ht.id = ufht.hashtag_id
WHERE
hashtags.id = ht.id
AND hto.hashtag_id is NULL
AND ufht.hashtag_id is NULL
""" """
|> Repo.query!() |> Repo.query!()

View file

@ -84,8 +84,14 @@ defmodule Pleroma.Emails.Mailer do
cacerts: os_cacerts, cacerts: os_cacerts,
versions: [:"tlsv1.2", :"tlsv1.3"], versions: [:"tlsv1.2", :"tlsv1.3"],
verify: :verify_peer, verify: :verify_peer,
# some versions have supposedly issues verifying wildcard certs without this
server_name_indication: relay, server_name_indication: relay,
# This allows wildcard ceritifcates to be verified properly.
# The :https parameter simply means to use the HTTPS wildcard format
# (as opposed to say LDAP). SMTP servers tend to use the same type of
# certs as HTTPS ones so this should work for most.
customize_hostname_check: [
match_fun: :public_key.pkix_verify_hostname_match_fun(:https)
],
# the default of 10 is too restrictive # the default of 10 is too restrictive
depth: 32 depth: 32
] ]

View file

@ -9,7 +9,6 @@ defmodule Pleroma.Object do
import Ecto.Changeset import Ecto.Changeset
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Hashtag alias Pleroma.Hashtag
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Object.Fetcher alias Pleroma.Object.Fetcher
@ -241,23 +240,11 @@ defmodule Pleroma.Object do
with {:ok, _obj} = swap_object_with_tombstone(object), with {:ok, _obj} = swap_object_with_tombstone(object),
deleted_activity = Activity.delete_all_by_object_ap_id(id), deleted_activity = Activity.delete_all_by_object_ap_id(id),
{:ok, _} <- invalid_object_cache(object) do {:ok, _} <- invalid_object_cache(object) do
cleanup_attachments( AttachmentsCleanupWorker.enqueue_if_needed(object.data)
Config.get([:instance, :cleanup_attachments]),
%{object: object}
)
{:ok, object, deleted_activity} {:ok, object, deleted_activity}
end end
end end
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
{:ok, Oban.Job.t() | nil}
def cleanup_attachments(true, %{object: _} = params) do
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
end
def cleanup_attachments(_, _), do: {:ok, nil}
def prune(%Object{data: %{"id" => _id}} = object) do def prune(%Object{data: %{"id" => _id}} = object) do
with {:ok, object} <- Repo.delete(object), with {:ok, object} <- Repo.delete(object),
{:ok, _} <- invalid_object_cache(object) do {:ok, _} <- invalid_object_cache(object) do

View file

@ -95,21 +95,29 @@ defmodule Pleroma.Search.DatabaseSearch do
) )
end end
def maybe_restrict_local(q, user) do def should_restrict_local(user) do
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated) limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
case {limit, user} do case {limit, user} do
{:all, _} -> restrict_local(q) {:all, _} -> true
{:unauthenticated, %User{}} -> q {:unauthenticated, %User{}} -> false
{:unauthenticated, _} -> restrict_local(q) {:unauthenticated, _} -> true
{false, _} -> q {false, _} -> false
end
end
def maybe_restrict_local(q, user) do
case should_restrict_local(user) do
true -> restrict_local(q)
false -> q
end end
end end
defp restrict_local(q), do: where(q, local: true) defp restrict_local(q), do: where(q, local: true)
def maybe_fetch(activities, user, search_query) do def maybe_fetch(activities, user, search_query) do
with true <- Regex.match?(~r/https?:/, search_query), with false <- should_restrict_local(user),
true <- Regex.match?(~r/https?:/, search_query),
{:ok, object} <- Fetcher.fetch_object_from_id(search_query), {:ok, object} <- Fetcher.fetch_object_from_id(search_query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]), %Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do true <- Visibility.visible_for_user?(activity, user) do

View file

@ -194,31 +194,24 @@ defmodule Pleroma.User.SigningKey do
""" """
def fetch_remote_key(key_id) do def fetch_remote_key(key_id) do
Logger.debug("Fetching remote key: #{key_id}") Logger.debug("Fetching remote key: #{key_id}")
resp = Pleroma.Object.Fetcher.fetch_and_contain_remote_object_from_id(key_id)
case resp do with {:ok, _body} = resp <-
{:ok, _body} -> Pleroma.Object.Fetcher.fetch_and_contain_remote_object_from_id(key_id),
case handle_signature_response(resp) do {:ok, ap_id, public_key_pem} <- handle_signature_response(resp) do
{:ok, ap_id, public_key_pem} -> Logger.debug("Fetched remote key: #{ap_id}")
Logger.debug("Fetched remote key: #{ap_id}") # fetch the user
# fetch the user {:ok, user} = User.get_or_fetch_by_ap_id(ap_id)
{:ok, user} = User.get_or_fetch_by_ap_id(ap_id) # store the key
# store the key key = %__MODULE__{
key = %__MODULE__{ user_id: user.id,
user_id: user.id, public_key: public_key_pem,
public_key: public_key_pem, key_id: key_id
key_id: key_id }
}
Repo.insert(key, on_conflict: :replace_all, conflict_target: :key_id) Repo.insert(key, on_conflict: :replace_all, conflict_target: :key_id)
else
e -> e ->
Logger.debug("Failed to fetch remote key: #{inspect(e)}") Logger.debug("Failed to fetch remote key: #{inspect(e)}")
{:error, "Could not fetch key"}
end
_ ->
Logger.debug("Failed to fetch remote key: #{inspect(resp)}")
{:error, "Could not fetch key"} {:error, "Could not fetch key"}
end end
end end

View file

@ -34,16 +34,34 @@ defmodule Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy do
end end
end end
@spec delete_and_count(list(), term()) :: {integer(), list()}
defp delete_and_count(list, element), do: delete_and_count(list, element, {0, [], list})
defp delete_and_count([], _element, {0, _nlist, olist}), do: {0, olist}
defp delete_and_count([], _element, {count, nlist, _olist}), do: {count, Enum.reverse(nlist)}
defp delete_and_count([h | r], h, {count, nlist, olist}),
do: delete_and_count(r, h, {count + 1, nlist, olist})
defp delete_and_count([h | r], element, {count, nlist, olist}),
do: delete_and_count(r, element, {count, [h | nlist], olist})
defp insert_if_needed(list, oldcount, element) do
if oldcount <= 0 || Enum.member?(list, element) do
list
else
[element | list]
end
end
defp check_delist(message, actions) do defp check_delist(message, actions) do
if :delist in actions do if :delist in actions do
with %User{} = user <- User.get_cached_by_ap_id(message["actor"]) do with %User{} = user <- User.get_cached_by_ap_id(message["actor"]) do
to = {pubcnt, to} = delete_and_count(message["to"] || [], Pleroma.Constants.as_public())
List.delete(message["to"] || [], Pleroma.Constants.as_public()) ++ {flwcnt, cc} = delete_and_count(message["cc"] || [], user.follower_address)
[user.follower_address]
cc = cc = insert_if_needed(cc, pubcnt, Pleroma.Constants.as_public())
List.delete(message["cc"] || [], user.follower_address) ++ to = insert_if_needed(to, flwcnt, user.follower_address)
[Pleroma.Constants.as_public()]
message = message =
message message
@ -65,8 +83,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy do
defp check_strip_followers(message, actions) do defp check_strip_followers(message, actions) do
if :strip_followers in actions do if :strip_followers in actions do
with %User{} = user <- User.get_cached_by_ap_id(message["actor"]) do with %User{} = user <- User.get_cached_by_ap_id(message["actor"]) do
to = List.delete(message["to"] || [], user.follower_address) {_, to} = delete_and_count(message["to"] || [], user.follower_address)
cc = List.delete(message["cc"] || [], user.follower_address) {_, cc} = delete_and_count(message["cc"] || [], user.follower_address)
message = message =
message message

View file

@ -950,8 +950,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"}, "icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
"name" => ":" <> name <> ":", "name" => ":" <> name <> ":",
"type" => "Emoji", "type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z", "updated" => "1970-01-01T00:00:00Z"
"id" => nil
} }
end end

View file

@ -52,11 +52,11 @@ defmodule Pleroma.Web.MediaProxy do
@spec url_proxiable?(String.t()) :: boolean() @spec url_proxiable?(String.t()) :: boolean()
def url_proxiable?(url) do def url_proxiable?(url) do
not local?(url) and not whitelisted?(url) and not blocked?(url) not local?(url) and not whitelisted?(url) and not blocked?(url) and http_scheme?(url)
end end
def preview_url(url, preview_params \\ []) do def preview_url(url, preview_params \\ []) do
if preview_enabled?() do if preview_enabled?() and url_proxiable?(url) do
encode_preview_url(url, preview_params) encode_preview_url(url, preview_params)
else else
url(url) url(url)
@ -71,6 +71,8 @@ defmodule Pleroma.Web.MediaProxy do
def local?(url), do: String.starts_with?(url, Endpoint.url()) def local?(url), do: String.starts_with?(url, Endpoint.url())
def http_scheme?(url), do: String.starts_with?(url, ["http:", "https:"])
def whitelisted?(url) do def whitelisted?(url) do
%{host: domain} = URI.parse(url) %{host: domain} = URI.parse(url)

View file

@ -31,7 +31,7 @@ defmodule Pleroma.Web.Nodeinfo.NodeinfoController do
conn conn
|> put_resp_header( |> put_resp_header(
"content-type", "content-type",
"application/json; profile=http://nodeinfo.diaspora.software/ns/schema/2.0#; charset=utf-8" "application/json; profile=\"http://nodeinfo.diaspora.software/ns/schema/#{version}#\"; charset=utf-8"
) )
|> json(Nodeinfo.get_nodeinfo(version)) |> json(Nodeinfo.get_nodeinfo(version))
end end

View file

@ -5,30 +5,65 @@
defmodule Pleroma.Workers.AttachmentsCleanupWorker do defmodule Pleroma.Workers.AttachmentsCleanupWorker do
import Ecto.Query import Ecto.Query
alias Pleroma.Config
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup" use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup"
@doc """
Takes object data and if necessary enqueues a job,
deleting all attachments of the post eligible for cleanup
"""
@spec enqueue_if_needed(map()) :: {:ok, Oban.Job.t()} | {:ok, :skip} | {:error, any()}
def enqueue_if_needed(%{
"actor" => actor,
"attachment" => [_ | _] = attachments
}) do
with true <- Config.get([:instance, :cleanup_attachments]),
true <- URI.parse(actor).host == Pleroma.Web.Endpoint.host(),
[_ | _] <- attachments do
enqueue(
"cleanup_attachments",
%{"actor" => actor, "attachments" => attachments},
schedule_in: Config.get!([:instance, :cleanup_attachments_delay])
)
else
_ -> {:ok, :skip}
end
end
def enqueue_if_needed(_), do: {:ok, :skip}
@impl Oban.Worker @impl Oban.Worker
def perform(%Job{ def perform(%Job{
args: %{ args: %{
"op" => "cleanup_attachments", "op" => "cleanup_attachments",
"object" => %{"data" => %{"attachment" => [_ | _] = attachments, "actor" => actor}} "attachments" => [_ | _] = attachments,
"actor" => actor
} }
}) do }) do
if Pleroma.Config.get([:instance, :cleanup_attachments], false) do attachments
attachments |> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end)
|> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end) |> fetch_objects
|> fetch_objects |> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|> prepare_objects(actor, Enum.map(attachments, & &1["name"])) |> filter_objects
|> filter_objects |> do_clean
|> do_clean
end
{:ok, :success} {:ok, :success}
end end
# Left over already enqueued jobs in the old format
# This function clause can be deleted once sufficient time passed after 3.14
def perform(%Job{
args: %{
"op" => "cleanup_attachments",
"object" => %{"data" => data}
}
}) do
enqueue_if_needed(data)
end
def perform(%Job{args: %{"op" => "cleanup_attachments", "object" => _object}}), do: {:ok, :skip} def perform(%Job{args: %{"op" => "cleanup_attachments", "object" => _object}}), do: {:ok, :skip}
defp do_clean({object_ids, attachment_urls}) do defp do_clean({object_ids, attachment_urls}) do

View file

@ -8,13 +8,14 @@ defmodule Pleroma.Repo.Migrations.MoveSigningKeys do
# we do not handle remote users here! # we do not handle remote users here!
# because we want to store a key id -> user id mapping, and we don't # because we want to store a key id -> user id mapping, and we don't
# currently store key ids for remote users... # currently store key ids for remote users...
query = # Also this MUST use select, else the migration will fail in future installs with new user fields!
from(u in User) from(u in Pleroma.User,
|> where(local: true) where: u.local == true,
select: {u.id, u.keys, u.ap_id}
Repo.stream(query, timeout: :infinity) )
|> Repo.stream(timeout: :infinity)
|> Enum.each(fn |> Enum.each(fn
%User{id: user_id, keys: private_key, local: true, ap_id: ap_id} -> {user_id, private_key, ap_id} ->
IO.puts("Migrating user #{user_id}") IO.puts("Migrating user #{user_id}")
# we can precompute the public key here... # we can precompute the public key here...
# we do use it on every user view which makes it a bit of a dos attack vector # we do use it on every user view which makes it a bit of a dos attack vector

View file

@ -124,6 +124,119 @@ defmodule Pleroma.HTML.Scrubber.Default do
Meta.allow_tag_with_these_attributes(:font, ["face"]) Meta.allow_tag_with_these_attributes(:font, ["face"])
end end
if Pleroma.Config.get!([:markup, :allow_math]) do
Meta.allow_tag_with_these_attributes("annotation", ["encoding"])
Meta.allow_tag_with_these_attributes(:"annotation-xml", ["encoding"])
Meta.allow_tag_with_these_attributes(:math, [
"display",
"displaystyle",
"mathvariant",
"scriptlevel"
])
basic_math_tags = [
"maction",
"merror",
:mi,
"mmultiscripts",
:mn,
"mphantom",
"mprescripts",
"mroot",
"mrow",
"ms",
"msqrt",
"mstyle",
"msub",
"msubsup",
"msup",
"mtable",
"mtext",
"mtr",
"semantics"
]
for tag <- basic_math_tags do
Meta.allow_tag_with_these_attributes(unquote(tag), [
"mathvariant",
"displaystyle",
"scriptlevel"
])
end
Meta.allow_tag_with_these_attributes("mfrac", [
"displaystyle",
"linethickness",
"mathvariant",
"scriptlevel"
])
Meta.allow_tag_with_these_attributes(:mo, [
"displaystyle",
"form",
"largeop",
"lspace",
"mathvariant",
"minsize",
"movablelimits",
"rspace",
"scriptlevel",
"stretchy",
"symmetric"
])
Meta.allow_tag_with_these_attributes("mover", [
"accent",
"displaystyle",
"mathvariant",
"scriptlevel"
])
Meta.allow_tag_with_these_attributes("mpadded", [
"depth",
"displaystyle",
"height",
"lspace",
"mathvariant",
"scriptlevel",
"voffset",
"width"
])
Meta.allow_tag_with_these_attributes("mspace", [
"depth",
"displaystyle",
"height",
"mathvariant",
"scriptlevel",
"width"
])
Meta.allow_tag_with_these_attributes("mtd", [
"columnspan",
"displaystyle",
"mathvariant",
"rowspan",
"scriptlevel"
])
Meta.allow_tag_with_these_attributes("munder", [
"accentunder",
"displaystyle",
"mathvariant",
"scriptlevel"
])
Meta.allow_tag_with_these_attributes("munderover", [
"accent",
"accentunder",
"displaystyle",
"mathvariant",
"scriptlevel"
])
end
Meta.allow_tag_with_these_attributes(:center, []) Meta.allow_tag_with_these_attributes(:center, [])
Meta.allow_tag_with_these_attributes(:small, []) Meta.allow_tag_with_these_attributes(:small, [])

View file

@ -79,7 +79,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.ObjectAgePolicyTest do
{:ok, data} = ObjectAgePolicy.filter(data) {:ok, data} = ObjectAgePolicy.filter(data)
assert Visibility.get_visibility(%{data: data}) == "unlisted" assert Visibility.get_visibility(%{data: data}) == "direct"
end end
test "it delists an old post" do test "it delists an old post" do

View file

@ -698,7 +698,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do
assert Transmogrifier.take_emoji_tags(user) == [ assert Transmogrifier.take_emoji_tags(user) == [
%{ %{
"icon" => %{"type" => "Image", "url" => "https://example.org/firefox.png"}, "icon" => %{"type" => "Image", "url" => "https://example.org/firefox.png"},
"id" => nil,
"name" => ":firefox:", "name" => ":firefox:",
"type" => "Emoji", "type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z" "updated" => "1970-01-01T00:00:00Z"

View file

@ -47,7 +47,6 @@ defmodule Pleroma.Web.ActivityPub.UserViewTest do
"tag" => [ "tag" => [
%{ %{
"icon" => %{"type" => "Image", "url" => "/test"}, "icon" => %{"type" => "Image", "url" => "/test"},
"id" => nil,
"name" => ":bib:", "name" => ":bib:",
"type" => "Emoji", "type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z" "updated" => "1970-01-01T00:00:00Z"

View file

@ -588,6 +588,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do
end end
test "put the url advertised in the Activity in to the url attribute" do test "put the url advertised in the Activity in to the url attribute" do
Pleroma.Config.put([:instance, :limit_to_local_content], false)
id = "https://wedistribute.org/wp-json/pterotype/v1/object/85810" id = "https://wedistribute.org/wp-json/pterotype/v1/object/85810"
[activity] = Activity.search(nil, id) [activity] = Activity.search(nil, id)

View file

@ -37,6 +37,10 @@ defmodule Pleroma.Web.MediaProxyTest do
assert MediaProxy.url(local_root) == local_root assert MediaProxy.url(local_root) == local_root
end end
test "ignores data url" do
assert MediaProxy.url("data:image/png;base64,") == "data:image/png;base64,"
end
test "encodes and decodes URL" do test "encodes and decodes URL" do
url = "https://pleroma.soykaf.com/static/logo.png" url = "https://pleroma.soykaf.com/static/logo.png"
encoded = MediaProxy.url(url) encoded = MediaProxy.url(url)

View file

@ -0,0 +1,86 @@
# Akkoma: Magically expressive social media
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.AttachmentsCleanupWorkerTest do
use Pleroma.DataCase, async: false
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
alias Pleroma.Object
alias Pleroma.Workers.AttachmentsCleanupWorker
alias Pleroma.Tests.ObanHelpers
setup do
clear_config([:instance, :cleanup_attachments], true)
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image.jpg"),
filename: "an_image.jpg"
}
user = insert(:user)
{:ok, %Pleroma.Object{} = attachment} =
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
{:ok, attachment: attachment, user: user}
end
test "does not enqueue remote post" do
remote_data = %{
"id" => "https://remote.example/obj/123",
"actor" => "https://remote.example/user/1",
"content" => "content",
"attachment" => [
%{
"type" => "Document",
"mediaType" => "image/png",
"name" => "marvellous image",
"url" => "https://remote.example/files/image.png"
}
]
}
assert {:ok, :skip} = AttachmentsCleanupWorker.enqueue_if_needed(remote_data)
end
test "enqueues local post", %{attachment: attachment, user: user} do
local_url = Pleroma.Web.Endpoint.url()
local_data = %{
"id" => local_url <> "/obj/123",
"actor" => user.ap_id,
"content" => "content",
"attachment" => [attachment.data]
}
assert {:ok, %Oban.Job{}} = AttachmentsCleanupWorker.enqueue_if_needed(local_data)
end
test "doesn't delete immediately", %{attachment: attachment, user: user} do
delay = 6000
clear_config([:instance, :cleanup_attachments_delay], delay)
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
%{"url" => [%{"href" => href}]} = attachment.data
path = "#{uploads_dir}/#{Path.basename(href)}"
assert File.exists?(path)
Object.delete(note)
Process.sleep(2000)
assert File.exists?(path)
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
refute File.exists?(path)
end
end