2020-10-12 17:00:50 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2022-06-30 15:28:31 +00:00
|
|
|
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
2020-10-12 17:00:50 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2019-10-16 14:16:39 +00:00
|
|
|
defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|
|
|
@moduledoc """
|
|
|
|
This module looks at an inserted object and executes the side effects that it
|
|
|
|
implies. For example, a `Like` activity will increase the like count on the
|
|
|
|
liked object, a `Follow` activity will add the user to the follower
|
|
|
|
collection, and so on.
|
|
|
|
"""
|
2020-05-05 13:08:41 +00:00
|
|
|
alias Pleroma.Activity
|
2020-07-08 12:30:53 +00:00
|
|
|
alias Pleroma.FollowingRelationship
|
2019-10-16 14:16:39 +00:00
|
|
|
alias Pleroma.Notification
|
2019-10-23 10:18:05 +00:00
|
|
|
alias Pleroma.Object
|
2020-04-28 14:26:19 +00:00
|
|
|
alias Pleroma.Repo
|
2020-04-09 10:44:20 +00:00
|
|
|
alias Pleroma.User
|
2020-04-30 15:58:09 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2020-08-11 15:43:16 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Builder
|
2020-04-28 14:26:19 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Pipeline
|
2019-10-23 10:18:05 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2020-06-05 14:47:02 +00:00
|
|
|
alias Pleroma.Web.Push
|
2020-06-05 15:18:48 +00:00
|
|
|
alias Pleroma.Web.Streamer
|
2021-07-18 15:49:57 +00:00
|
|
|
alias Pleroma.Workers.PollWorker
|
2019-10-16 14:16:39 +00:00
|
|
|
|
2022-09-06 19:24:02 +00:00
|
|
|
require Pleroma.Constants
|
2020-08-11 08:54:38 +00:00
|
|
|
require Logger
|
|
|
|
|
2021-01-04 17:40:59 +00:00
|
|
|
@logger Pleroma.Config.get([:side_effects, :logger], Logger)
|
2020-12-18 16:44:46 +00:00
|
|
|
|
2020-12-16 16:51:48 +00:00
|
|
|
@behaviour Pleroma.Web.ActivityPub.SideEffects.Handling
|
|
|
|
|
2021-06-08 23:18:25 +00:00
|
|
|
defp ap_streamer, do: Pleroma.Config.get([:side_effects, :ap_streamer], ActivityPub)
|
|
|
|
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2019-10-16 14:16:39 +00:00
|
|
|
def handle(object, meta \\ [])
|
|
|
|
|
2020-08-11 15:26:01 +00:00
|
|
|
# Task this handles
|
|
|
|
# - Follows
|
|
|
|
# - Sends a notification
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-08-11 15:26:01 +00:00
|
|
|
def handle(
|
|
|
|
%{
|
|
|
|
data: %{
|
|
|
|
"actor" => actor,
|
|
|
|
"type" => "Accept",
|
|
|
|
"object" => follow_activity_id
|
|
|
|
}
|
|
|
|
} = object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
with %Activity{actor: follower_id} = follow_activity <-
|
|
|
|
Activity.get_by_ap_id(follow_activity_id),
|
|
|
|
%User{} = followed <- User.get_cached_by_ap_id(actor),
|
|
|
|
%User{} = follower <- User.get_cached_by_ap_id(follower_id),
|
|
|
|
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
|
2020-12-01 20:17:52 +00:00
|
|
|
{:ok, _follower, followed} <-
|
|
|
|
FollowingRelationship.update(follower, followed, :follow_accept) do
|
2020-08-11 15:26:01 +00:00
|
|
|
Notification.update_notification_type(followed, follow_activity)
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2020-08-12 12:48:51 +00:00
|
|
|
# Task this handles
|
|
|
|
# - Rejects all existing follow activities for this person
|
|
|
|
# - Updates the follow state
|
2020-08-13 16:10:43 +00:00
|
|
|
# - Dismisses notification
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-08-12 12:48:51 +00:00
|
|
|
def handle(
|
|
|
|
%{
|
|
|
|
data: %{
|
|
|
|
"actor" => actor,
|
|
|
|
"type" => "Reject",
|
|
|
|
"object" => follow_activity_id
|
|
|
|
}
|
|
|
|
} = object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
with %Activity{actor: follower_id} = follow_activity <-
|
|
|
|
Activity.get_by_ap_id(follow_activity_id),
|
|
|
|
%User{} = followed <- User.get_cached_by_ap_id(actor),
|
|
|
|
%User{} = follower <- User.get_cached_by_ap_id(follower_id),
|
|
|
|
{:ok, _follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject") do
|
|
|
|
FollowingRelationship.update(follower, followed, :follow_reject)
|
2020-08-12 13:07:46 +00:00
|
|
|
Notification.dismiss(follow_activity)
|
2020-08-12 12:48:51 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2020-07-08 12:30:53 +00:00
|
|
|
# Tasks this handle
|
|
|
|
# - Follows if possible
|
|
|
|
# - Sends a notification
|
|
|
|
# - Generates accept or reject if appropriate
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-07-08 12:30:53 +00:00
|
|
|
def handle(
|
|
|
|
%{
|
|
|
|
data: %{
|
|
|
|
"id" => follow_id,
|
|
|
|
"type" => "Follow",
|
|
|
|
"object" => followed_user,
|
|
|
|
"actor" => following_user
|
|
|
|
}
|
|
|
|
} = object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
with %User{} = follower <- User.get_cached_by_ap_id(following_user),
|
|
|
|
%User{} = followed <- User.get_cached_by_ap_id(followed_user),
|
2020-12-01 20:17:52 +00:00
|
|
|
{_, {:ok, _, _}, _, _} <-
|
2020-07-08 12:30:53 +00:00
|
|
|
{:following, User.follow(follower, followed, :follow_pending), follower, followed} do
|
2020-10-13 14:31:13 +00:00
|
|
|
if followed.local && !followed.is_locked do
|
2020-08-11 15:43:16 +00:00
|
|
|
{:ok, accept_data, _} = Builder.accept(followed, object)
|
|
|
|
{:ok, _activity, _} = Pipeline.common_pipeline(accept_data, local: true)
|
2020-07-08 12:30:53 +00:00
|
|
|
end
|
|
|
|
else
|
2020-08-12 13:07:46 +00:00
|
|
|
{:following, {:error, _}, _follower, followed} ->
|
|
|
|
{:ok, reject_data, _} = Builder.reject(followed, object)
|
|
|
|
{:ok, _activity, _} = Pipeline.common_pipeline(reject_data, local: true)
|
2020-07-08 12:30:53 +00:00
|
|
|
|
|
|
|
_ ->
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, notifications} = Notification.create_notifications(object, do_send: false)
|
|
|
|
|
|
|
|
meta =
|
|
|
|
meta
|
|
|
|
|> add_notifications(notifications)
|
|
|
|
|
2020-07-08 13:40:56 +00:00
|
|
|
updated_object = Activity.get_by_ap_id(follow_id)
|
|
|
|
|
|
|
|
{:ok, updated_object, meta}
|
2020-07-08 12:30:53 +00:00
|
|
|
end
|
|
|
|
|
2020-06-25 09:33:54 +00:00
|
|
|
# Tasks this handles:
|
|
|
|
# - Unfollow and block
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-06-25 09:33:54 +00:00
|
|
|
def handle(
|
|
|
|
%{data: %{"type" => "Block", "object" => blocked_user, "actor" => blocking_user}} =
|
|
|
|
object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
with %User{} = blocker <- User.get_cached_by_ap_id(blocking_user),
|
|
|
|
%User{} = blocked <- User.get_cached_by_ap_id(blocked_user) do
|
|
|
|
User.block(blocker, blocked)
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2020-06-22 11:15:37 +00:00
|
|
|
# Tasks this handles:
|
2020-06-22 11:59:45 +00:00
|
|
|
# - Update the user
|
2022-09-06 19:24:02 +00:00
|
|
|
# - Update a non-user object (Note, Question, etc.)
|
2020-06-22 11:59:45 +00:00
|
|
|
#
|
|
|
|
# For a local user, we also get a changeset with the full information, so we
|
|
|
|
# can update non-federating, non-activitypub settings as well.
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-06-22 11:15:37 +00:00
|
|
|
def handle(%{data: %{"type" => "Update", "object" => updated_object}} = object, meta) do
|
2022-09-06 19:24:02 +00:00
|
|
|
updated_object_id = updated_object["id"]
|
|
|
|
|
|
|
|
with {_, true} <- {:has_id, is_binary(updated_object_id)},
|
|
|
|
%{"type" => type} <- updated_object,
|
|
|
|
{_, is_user} <- {:is_user, type in Pleroma.Constants.actor_types()} do
|
|
|
|
if is_user do
|
|
|
|
handle_update_user(object, meta)
|
|
|
|
else
|
|
|
|
handle_update_object(object, meta)
|
|
|
|
end
|
2020-06-22 11:59:45 +00:00
|
|
|
else
|
2022-09-06 19:24:02 +00:00
|
|
|
_ ->
|
|
|
|
{:ok, object, meta}
|
2020-06-22 11:59:45 +00:00
|
|
|
end
|
2020-06-22 11:15:37 +00:00
|
|
|
end
|
|
|
|
|
2019-10-16 14:16:39 +00:00
|
|
|
# Tasks this handles:
|
|
|
|
# - Add like to object
|
|
|
|
# - Set up notification
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2019-10-16 14:16:39 +00:00
|
|
|
def handle(%{data: %{"type" => "Like"}} = object, meta) do
|
2020-04-29 11:52:23 +00:00
|
|
|
liked_object = Object.get_by_ap_id(object.data["object"])
|
|
|
|
Utils.add_like_to_object(object, liked_object)
|
2020-04-17 13:50:15 +00:00
|
|
|
|
2020-04-29 11:52:23 +00:00
|
|
|
Notification.create_notifications(object)
|
2020-04-17 13:50:15 +00:00
|
|
|
|
2020-04-29 11:52:23 +00:00
|
|
|
{:ok, object, meta}
|
2019-10-16 14:16:39 +00:00
|
|
|
end
|
|
|
|
|
2020-04-28 14:26:19 +00:00
|
|
|
# Tasks this handles
|
|
|
|
# - Actually create object
|
|
|
|
# - Rollback if we couldn't create it
|
2020-06-18 02:05:42 +00:00
|
|
|
# - Increase the user note count
|
|
|
|
# - Increase the reply count
|
2020-06-19 21:43:36 +00:00
|
|
|
# - Increase replies count
|
|
|
|
# - Set up ActivityExpiration
|
2020-04-28 14:26:19 +00:00
|
|
|
# - Set up notifications
|
2021-08-22 13:37:52 +00:00
|
|
|
# - Index incoming posts for search (if needed)
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-04-28 11:43:58 +00:00
|
|
|
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
|
2021-07-18 15:49:57 +00:00
|
|
|
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
|
2020-06-18 02:05:42 +00:00
|
|
|
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
|
2020-06-05 14:47:02 +00:00
|
|
|
{:ok, notifications} = Notification.create_notifications(activity, do_send: false)
|
2020-06-18 02:05:42 +00:00
|
|
|
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
|
2021-12-26 02:35:17 +00:00
|
|
|
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
|
2020-06-18 02:05:42 +00:00
|
|
|
|
2021-11-23 10:31:09 +00:00
|
|
|
if in_reply_to = object.data["type"] != "Answer" && object.data["inReplyTo"] do
|
2020-06-18 02:05:42 +00:00
|
|
|
Object.increase_replies_count(in_reply_to)
|
|
|
|
end
|
|
|
|
|
2020-09-10 09:11:10 +00:00
|
|
|
reply_depth = (meta[:depth] || 0) + 1
|
|
|
|
|
|
|
|
# FIXME: Force inReplyTo to replies
|
|
|
|
if Pleroma.Web.Federator.allowed_thread_distance?(reply_depth) and
|
|
|
|
object.data["replies"] != nil do
|
|
|
|
for reply_id <- object.data["replies"] do
|
|
|
|
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
|
|
|
"id" => reply_id,
|
|
|
|
"depth" => reply_depth
|
|
|
|
})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-10 07:54:57 +00:00
|
|
|
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
|
|
|
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
|
|
|
end)
|
2020-06-05 14:47:02 +00:00
|
|
|
|
2021-11-17 19:29:49 +00:00
|
|
|
Pleroma.Search.add_to_index(Map.put(activity, :object, object))
|
2021-08-22 13:37:52 +00:00
|
|
|
|
2020-06-05 14:47:02 +00:00
|
|
|
meta =
|
|
|
|
meta
|
|
|
|
|> add_notifications(notifications)
|
|
|
|
|
2021-08-09 09:03:58 +00:00
|
|
|
ap_streamer().stream_out(activity)
|
|
|
|
|
2020-04-28 14:26:19 +00:00
|
|
|
{:ok, activity, meta}
|
|
|
|
else
|
|
|
|
e -> Repo.rollback(e)
|
|
|
|
end
|
2020-04-09 10:44:20 +00:00
|
|
|
end
|
|
|
|
|
2020-05-20 13:44:37 +00:00
|
|
|
# Tasks this handles:
|
|
|
|
# - Add announce to object
|
|
|
|
# - Set up notification
|
2020-05-21 10:43:09 +00:00
|
|
|
# - Stream out the announce
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-05-20 13:44:37 +00:00
|
|
|
def handle(%{data: %{"type" => "Announce"}} = object, meta) do
|
|
|
|
announced_object = Object.get_by_ap_id(object.data["object"])
|
2020-05-27 05:24:36 +00:00
|
|
|
user = User.get_cached_by_ap_id(object.data["actor"])
|
2020-05-21 10:43:09 +00:00
|
|
|
|
2020-05-21 11:16:21 +00:00
|
|
|
Utils.add_announce_to_object(object, announced_object)
|
2020-05-20 13:44:37 +00:00
|
|
|
|
2020-05-27 05:24:36 +00:00
|
|
|
if !User.is_internal_user?(user) do
|
2020-05-26 11:32:05 +00:00
|
|
|
Notification.create_notifications(object)
|
2020-07-02 07:33:50 +00:00
|
|
|
|
2021-08-09 09:03:58 +00:00
|
|
|
ap_streamer().stream_out(object)
|
2020-05-26 11:32:05 +00:00
|
|
|
end
|
2020-05-20 13:44:37 +00:00
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-05-05 13:08:41 +00:00
|
|
|
def handle(%{data: %{"type" => "Undo", "object" => undone_object}} = object, meta) do
|
|
|
|
with undone_object <- Activity.get_by_ap_id(undone_object),
|
|
|
|
:ok <- handle_undoing(undone_object) do
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-05 10:11:46 +00:00
|
|
|
# Tasks this handles:
|
|
|
|
# - Add reaction to object
|
|
|
|
# - Set up notification
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-05-05 10:11:46 +00:00
|
|
|
def handle(%{data: %{"type" => "EmojiReact"}} = object, meta) do
|
|
|
|
reacted_object = Object.get_by_ap_id(object.data["object"])
|
|
|
|
Utils.add_emoji_reaction_to_object(object, reacted_object)
|
2022-06-30 15:28:31 +00:00
|
|
|
|
2020-05-05 10:11:46 +00:00
|
|
|
Notification.create_notifications(object)
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2020-04-30 13:26:23 +00:00
|
|
|
# Tasks this handles:
|
2020-04-30 14:15:38 +00:00
|
|
|
# - Delete and unpins the create activity
|
2020-04-30 13:26:23 +00:00
|
|
|
# - Replace object with Tombstone
|
|
|
|
# - Set up notification
|
2020-04-30 16:19:39 +00:00
|
|
|
# - Reduce the user note count
|
2020-04-30 17:53:30 +00:00
|
|
|
# - Reduce the reply count
|
2020-05-01 11:34:47 +00:00
|
|
|
# - Stream out the activity
|
2021-08-22 13:37:52 +00:00
|
|
|
# - Removes posts from search index (if needed)
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2020-04-30 13:26:23 +00:00
|
|
|
def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, meta) do
|
2020-04-30 13:57:27 +00:00
|
|
|
deleted_object =
|
2021-01-04 12:38:31 +00:00
|
|
|
Object.normalize(deleted_object, fetch: false) ||
|
2020-08-11 08:54:38 +00:00
|
|
|
User.get_cached_by_ap_id(deleted_object)
|
2020-04-30 13:57:27 +00:00
|
|
|
|
|
|
|
result =
|
|
|
|
case deleted_object do
|
|
|
|
%Object{} ->
|
2021-02-03 13:09:28 +00:00
|
|
|
with {:ok, deleted_object, _activity} <- Object.delete(deleted_object),
|
2020-08-11 08:54:38 +00:00
|
|
|
{_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]},
|
|
|
|
%User{} = user <- User.get_cached_by_ap_id(actor) do
|
2021-02-03 13:09:28 +00:00
|
|
|
User.remove_pinned_object_id(user, deleted_object.data["id"])
|
2020-04-30 15:58:09 +00:00
|
|
|
|
2020-04-30 16:19:39 +00:00
|
|
|
{:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object)
|
2020-04-30 17:47:13 +00:00
|
|
|
|
|
|
|
if in_reply_to = deleted_object.data["inReplyTo"] do
|
|
|
|
Object.decrease_replies_count(in_reply_to)
|
|
|
|
end
|
|
|
|
|
2021-06-08 23:18:25 +00:00
|
|
|
ap_streamer().stream_out(object)
|
|
|
|
ap_streamer().stream_out_participations(deleted_object, user)
|
2020-04-30 13:57:27 +00:00
|
|
|
:ok
|
2020-08-11 08:54:38 +00:00
|
|
|
else
|
|
|
|
{:actor, _} ->
|
2021-01-04 17:40:59 +00:00
|
|
|
@logger.error("The object doesn't have an actor: #{inspect(deleted_object)}")
|
2020-08-11 08:54:38 +00:00
|
|
|
:no_object_actor
|
2020-04-30 13:57:27 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
%User{} ->
|
|
|
|
with {:ok, _} <- User.delete(deleted_object) do
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if result == :ok do
|
2020-04-30 13:26:23 +00:00
|
|
|
Notification.create_notifications(object)
|
2021-08-22 13:37:52 +00:00
|
|
|
|
2022-01-22 13:52:06 +00:00
|
|
|
# Only remove from index when deleting actual objects, not users or anything else
|
|
|
|
with %Pleroma.Object{} <- deleted_object do
|
|
|
|
Pleroma.Search.remove_from_index(deleted_object)
|
|
|
|
end
|
2021-08-22 13:37:52 +00:00
|
|
|
|
2020-04-30 13:26:23 +00:00
|
|
|
{:ok, object, meta}
|
2020-04-30 13:57:27 +00:00
|
|
|
else
|
|
|
|
{:error, result}
|
2020-04-30 13:26:23 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-03 13:09:28 +00:00
|
|
|
# Tasks this handles:
|
|
|
|
# - adds pin to user
|
|
|
|
# - removes expiration job for pinned activity, if was set for expiration
|
|
|
|
@impl true
|
|
|
|
def handle(%{data: %{"type" => "Add"} = data} = object, meta) do
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(data["actor"]),
|
|
|
|
{:ok, _user} <- User.add_pinned_object_id(user, data["object"]) do
|
|
|
|
# if pinned activity was scheduled for deletion, we remove job
|
|
|
|
if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(meta[:activity_id]) do
|
|
|
|
Oban.cancel_job(expiration.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
else
|
|
|
|
nil ->
|
|
|
|
{:error, :user_not_found}
|
|
|
|
|
|
|
|
{:error, changeset} ->
|
|
|
|
if changeset.errors[:pinned_objects] do
|
|
|
|
{:error, :pinned_statuses_limit_reached}
|
|
|
|
else
|
|
|
|
changeset.errors
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Tasks this handles:
|
|
|
|
# - removes pin from user
|
2021-03-09 08:57:20 +00:00
|
|
|
# - removes corresponding Add activity
|
2021-02-03 13:09:28 +00:00
|
|
|
# - if activity had expiration, recreates activity expiration job
|
|
|
|
@impl true
|
|
|
|
def handle(%{data: %{"type" => "Remove"} = data} = object, meta) do
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(data["actor"]),
|
|
|
|
{:ok, _user} <- User.remove_pinned_object_id(user, data["object"]) do
|
2021-03-09 08:57:20 +00:00
|
|
|
data["object"]
|
|
|
|
|> Activity.add_by_params_query(user.ap_id, user.featured_address)
|
|
|
|
|> Repo.delete_all()
|
|
|
|
|
2021-02-03 13:09:28 +00:00
|
|
|
# if pinned activity was scheduled for deletion, we reschedule it for deletion
|
|
|
|
if meta[:expires_at] do
|
|
|
|
# MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation
|
|
|
|
{:ok, expires_at} =
|
|
|
|
Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast(meta[:expires_at])
|
|
|
|
|
|
|
|
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
|
|
|
|
activity_id: meta[:activity_id],
|
|
|
|
expires_at: expires_at
|
|
|
|
})
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
else
|
|
|
|
nil -> {:error, :user_not_found}
|
|
|
|
error -> error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-16 14:16:39 +00:00
|
|
|
# Nothing to do
|
2020-12-16 16:51:48 +00:00
|
|
|
@impl true
|
2019-10-16 14:16:39 +00:00
|
|
|
def handle(object, meta) do
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
2020-04-09 10:44:20 +00:00
|
|
|
|
2022-09-06 19:24:02 +00:00
|
|
|
defp handle_update_user(
|
|
|
|
%{data: %{"type" => "Update", "object" => updated_object}} = object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
if changeset = Keyword.get(meta, :user_update_changeset) do
|
|
|
|
changeset
|
|
|
|
|> User.update_and_set_cache()
|
|
|
|
else
|
|
|
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(updated_object)
|
|
|
|
|
|
|
|
User.get_by_ap_id(updated_object["id"])
|
|
|
|
|> User.remote_user_changeset(new_user_data)
|
|
|
|
|> User.update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp handle_update_object(
|
|
|
|
%{data: %{"type" => "Update", "object" => updated_object}} = object,
|
|
|
|
meta
|
|
|
|
) do
|
|
|
|
orig_object_ap_id = updated_object["id"]
|
|
|
|
orig_object = Object.get_by_ap_id(orig_object_ap_id)
|
|
|
|
orig_object_data = orig_object.data
|
|
|
|
|
|
|
|
updated_object =
|
|
|
|
if meta[:local] do
|
|
|
|
# If this is a local Update, we don't process it by transmogrifier,
|
|
|
|
# so we use the embedded object as-is.
|
|
|
|
updated_object
|
|
|
|
else
|
|
|
|
meta[:object_data]
|
|
|
|
end
|
|
|
|
|
|
|
|
if orig_object_data["type"] in Pleroma.Constants.updatable_object_types() do
|
|
|
|
%{
|
|
|
|
updated_data: updated_object_data,
|
|
|
|
updated: updated,
|
|
|
|
used_history_in_new_object?: used_history_in_new_object?
|
|
|
|
} = Object.Updater.make_new_object_data_from_update_object(orig_object_data, updated_object)
|
|
|
|
|
|
|
|
changeset =
|
|
|
|
orig_object
|
|
|
|
|> Repo.preload(:hashtags)
|
|
|
|
|> Object.change(%{data: updated_object_data})
|
|
|
|
|
|
|
|
with {:ok, new_object} <- Repo.update(changeset),
|
|
|
|
{:ok, _} <- Object.invalid_object_cache(new_object),
|
|
|
|
{:ok, _} <- Object.set_cache(new_object),
|
|
|
|
# The metadata/utils.ex uses the object id for the cache.
|
|
|
|
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
|
|
|
|
if used_history_in_new_object? do
|
|
|
|
with create_activity when not is_nil(create_activity) <-
|
|
|
|
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
|
|
|
|
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
_ -> nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if updated do
|
|
|
|
object
|
|
|
|
|> Activity.normalize()
|
|
|
|
|> ActivityPub.notify_and_stream()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
|
2021-07-18 15:49:57 +00:00
|
|
|
def handle_object_creation(%{"type" => "Question"} = object, activity, meta) do
|
|
|
|
with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do
|
|
|
|
PollWorker.schedule_poll_end(activity)
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_object_creation(%{"type" => "Answer"} = object_map, _activity, meta) do
|
2020-06-18 02:05:42 +00:00
|
|
|
with {:ok, object, meta} <- Pipeline.common_pipeline(object_map, meta) do
|
|
|
|
Object.increase_vote_count(
|
|
|
|
object.data["inReplyTo"],
|
|
|
|
object.data["name"],
|
|
|
|
object.data["actor"]
|
|
|
|
)
|
|
|
|
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-07-18 15:49:57 +00:00
|
|
|
def handle_object_creation(%{"type" => objtype} = object, _activity, meta)
|
2021-07-18 16:36:41 +00:00
|
|
|
when objtype in ~w[Audio Video Event Article Note Page] do
|
2020-06-11 18:23:10 +00:00
|
|
|
with {:ok, object, meta} <- Pipeline.common_pipeline(object, meta) do
|
|
|
|
{:ok, object, meta}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-09 10:44:20 +00:00
|
|
|
# Nothing to do
|
2021-07-18 15:49:57 +00:00
|
|
|
def handle_object_creation(object, _activity, meta) do
|
2020-06-11 18:23:10 +00:00
|
|
|
{:ok, object, meta}
|
2020-04-09 10:44:20 +00:00
|
|
|
end
|
2020-05-08 11:13:37 +00:00
|
|
|
|
2020-07-07 06:10:02 +00:00
|
|
|
defp undo_like(nil, object), do: delete_object(object)
|
|
|
|
|
|
|
|
defp undo_like(%Object{} = liked_object, object) do
|
|
|
|
with {:ok, _} <- Utils.remove_like_from_object(object, liked_object) do
|
|
|
|
delete_object(object)
|
2020-05-05 13:08:41 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-07 06:10:02 +00:00
|
|
|
def handle_undoing(%{data: %{"type" => "Like"}} = object) do
|
|
|
|
object.data["object"]
|
|
|
|
|> Object.get_by_ap_id()
|
|
|
|
|> undo_like(object)
|
|
|
|
end
|
|
|
|
|
2020-05-05 14:17:09 +00:00
|
|
|
def handle_undoing(%{data: %{"type" => "EmojiReact"}} = object) do
|
|
|
|
with %Object{} = reacted_object <- Object.get_by_ap_id(object.data["object"]),
|
|
|
|
{:ok, _} <- Utils.remove_emoji_reaction_from_object(object, reacted_object),
|
|
|
|
{:ok, _} <- Repo.delete(object) do
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-05 14:42:34 +00:00
|
|
|
def handle_undoing(%{data: %{"type" => "Announce"}} = object) do
|
|
|
|
with %Object{} = liked_object <- Object.get_by_ap_id(object.data["object"]),
|
|
|
|
{:ok, _} <- Utils.remove_announce_from_object(object, liked_object),
|
|
|
|
{:ok, _} <- Repo.delete(object) do
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-05 16:00:37 +00:00
|
|
|
def handle_undoing(
|
|
|
|
%{data: %{"type" => "Block", "actor" => blocker, "object" => blocked}} = object
|
|
|
|
) do
|
|
|
|
with %User{} = blocker <- User.get_cached_by_ap_id(blocker),
|
|
|
|
%User{} = blocked <- User.get_cached_by_ap_id(blocked),
|
|
|
|
{:ok, _} <- User.unblock(blocker, blocked),
|
|
|
|
{:ok, _} <- Repo.delete(object) do
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-05 13:08:41 +00:00
|
|
|
def handle_undoing(object), do: {:error, ["don't know how to handle", object]}
|
2020-06-05 14:47:02 +00:00
|
|
|
|
2020-07-07 06:10:02 +00:00
|
|
|
@spec delete_object(Object.t()) :: :ok | {:error, Ecto.Changeset.t()}
|
|
|
|
defp delete_object(object) do
|
|
|
|
with {:ok, _} <- Repo.delete(object), do: :ok
|
|
|
|
end
|
|
|
|
|
2020-06-05 14:47:02 +00:00
|
|
|
defp send_notifications(meta) do
|
2020-06-07 12:52:56 +00:00
|
|
|
Keyword.get(meta, :notifications, [])
|
2020-06-05 14:47:02 +00:00
|
|
|
|> Enum.each(fn notification ->
|
|
|
|
Streamer.stream(["user", "user:notification"], notification)
|
|
|
|
Push.send(notification)
|
|
|
|
end)
|
|
|
|
|
|
|
|
meta
|
|
|
|
end
|
|
|
|
|
2020-06-07 12:52:56 +00:00
|
|
|
defp send_streamables(meta) do
|
|
|
|
Keyword.get(meta, :streamables, [])
|
|
|
|
|> Enum.each(fn {topics, items} ->
|
|
|
|
Streamer.stream(topics, items)
|
|
|
|
end)
|
|
|
|
|
|
|
|
meta
|
|
|
|
end
|
|
|
|
|
2020-06-05 14:47:02 +00:00
|
|
|
defp add_notifications(meta, notifications) do
|
2020-06-07 12:52:56 +00:00
|
|
|
existing = Keyword.get(meta, :notifications, [])
|
2020-06-05 14:47:02 +00:00
|
|
|
|
|
|
|
meta
|
2020-06-07 12:52:56 +00:00
|
|
|
|> Keyword.put(:notifications, notifications ++ existing)
|
2020-06-05 14:47:02 +00:00
|
|
|
end
|
|
|
|
|
2021-12-12 19:39:07 +00:00
|
|
|
@impl true
|
|
|
|
def handle_after_transaction(meta) do
|
2020-06-05 14:47:02 +00:00
|
|
|
meta
|
|
|
|
|> send_notifications()
|
2020-06-07 12:52:56 +00:00
|
|
|
|> send_streamables()
|
2020-06-05 14:47:02 +00:00
|
|
|
end
|
2019-10-16 14:16:39 +00:00
|
|
|
end
|