2018-12-23 20:04:54 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 05:08:45 +00:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 20:04:54 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2018-02-15 19:00:06 +00:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|
|
|
@moduledoc """
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
"""
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Activity
|
2019-10-10 19:35:32 +00:00
|
|
|
alias Pleroma.FollowingRelationship
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Object
|
2019-04-17 11:52:01 +00:00
|
|
|
alias Pleroma.Object.Containment
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Repo
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.User
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2019-10-23 09:52:27 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
|
|
|
alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator
|
|
|
|
alias Pleroma.Web.ActivityPub.Pipeline
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-02-22 12:29:52 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2019-06-29 17:04:50 +00:00
|
|
|
alias Pleroma.Web.Federator
|
2019-08-31 16:08:56 +00:00
|
|
|
alias Pleroma.Workers.TransmogrifierWorker
|
2018-02-15 19:00:06 +00:00
|
|
|
|
2018-02-21 21:21:40 +00:00
|
|
|
import Ecto.Query
|
|
|
|
|
2018-02-23 14:00:41 +00:00
|
|
|
require Logger
|
2019-07-29 02:43:19 +00:00
|
|
|
require Pleroma.Constants
|
2018-02-23 14:00:41 +00:00
|
|
|
|
2018-02-15 19:00:06 +00:00
|
|
|
@doc """
|
|
|
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
|
|
|
"""
|
2019-06-29 17:04:50 +00:00
|
|
|
def fix_object(object, options \\ []) do
|
2018-02-15 19:00:06 +00:00
|
|
|
object
|
2019-08-10 18:47:40 +00:00
|
|
|
|> strip_internal_fields
|
2018-07-12 16:37:42 +00:00
|
|
|
|> fix_actor
|
2018-11-01 08:56:37 +00:00
|
|
|
|> fix_url
|
2018-11-17 17:34:45 +00:00
|
|
|
|> fix_attachments
|
2018-02-19 09:39:03 +00:00
|
|
|
|> fix_context
|
2019-06-29 17:04:50 +00:00
|
|
|
|> fix_in_reply_to(options)
|
2018-03-13 07:05:43 +00:00
|
|
|
|> fix_emoji
|
2018-03-24 21:39:37 +00:00
|
|
|
|> fix_tag
|
2018-06-18 21:51:22 +00:00
|
|
|
|> fix_content_map
|
2018-08-14 17:05:11 +00:00
|
|
|
|> fix_addressing
|
2019-04-10 00:32:04 +00:00
|
|
|
|> fix_summary
|
2019-06-29 17:04:50 +00:00
|
|
|
|> fix_type(options)
|
2019-04-10 00:32:04 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => nil} = object) do
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "summary", "")
|
2019-04-10 00:32:04 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => _} = object) do
|
|
|
|
# summary is present, nothing to do
|
|
|
|
object
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def fix_summary(object), do: Map.put(object, "summary", "")
|
2018-08-14 17:05:11 +00:00
|
|
|
|
|
|
|
def fix_addressing_list(map, field) do
|
2019-03-19 17:30:25 +00:00
|
|
|
cond do
|
|
|
|
is_binary(map[field]) ->
|
|
|
|
Map.put(map, field, [map[field]])
|
|
|
|
|
|
|
|
is_nil(map[field]) ->
|
|
|
|
Map.put(map, field, [])
|
|
|
|
|
|
|
|
true ->
|
|
|
|
map
|
2018-08-14 17:05:11 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-01 03:26:45 +00:00
|
|
|
def fix_explicit_addressing(
|
|
|
|
%{"to" => to, "cc" => cc} = object,
|
|
|
|
explicit_mentions,
|
|
|
|
follower_collection
|
|
|
|
) do
|
2019-09-10 13:43:10 +00:00
|
|
|
explicit_to = Enum.filter(to, fn x -> x in explicit_mentions end)
|
2018-11-17 16:05:41 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
explicit_cc = Enum.filter(to, fn x -> x not in explicit_mentions end)
|
2018-11-17 16:05:41 +00:00
|
|
|
|
|
|
|
final_cc =
|
|
|
|
(cc ++ explicit_cc)
|
2019-06-01 03:26:45 +00:00
|
|
|
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|
2018-11-17 16:05:41 +00:00
|
|
|
|> Enum.uniq()
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("to", explicit_to)
|
|
|
|
|> Map.put("cc", final_cc)
|
|
|
|
end
|
|
|
|
|
2019-06-01 03:26:45 +00:00
|
|
|
def fix_explicit_addressing(object, _explicit_mentions, _followers_collection), do: object
|
2018-11-17 16:05:41 +00:00
|
|
|
|
2018-12-23 15:35:49 +00:00
|
|
|
# if directMessage flag is set to true, leave the addressing alone
|
|
|
|
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
|
2018-11-17 16:05:41 +00:00
|
|
|
|
2018-12-23 15:35:49 +00:00
|
|
|
def fix_explicit_addressing(object) do
|
2019-09-10 13:43:10 +00:00
|
|
|
explicit_mentions = Utils.determine_explicit_mentions(object)
|
2018-11-17 16:05:41 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
%User{follower_address: follower_collection} =
|
|
|
|
object
|
|
|
|
|> Containment.get_actor()
|
|
|
|
|> User.get_cached_by_ap_id()
|
2019-05-31 11:17:05 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
explicit_mentions =
|
|
|
|
explicit_mentions ++
|
|
|
|
[
|
|
|
|
Pleroma.Constants.as_public(),
|
|
|
|
follower_collection
|
|
|
|
]
|
2018-11-17 16:05:41 +00:00
|
|
|
|
2019-06-01 03:26:45 +00:00
|
|
|
fix_explicit_addressing(object, explicit_mentions, follower_collection)
|
2018-02-19 09:39:03 +00:00
|
|
|
end
|
|
|
|
|
2019-03-19 17:27:42 +00:00
|
|
|
# if as:Public is addressed, then make sure the followers collection is also addressed
|
|
|
|
# so that the activities will be delivered to local users.
|
|
|
|
def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do
|
|
|
|
recipients = to ++ cc
|
|
|
|
|
|
|
|
if followers_collection not in recipients do
|
|
|
|
cond do
|
2019-07-29 02:43:19 +00:00
|
|
|
Pleroma.Constants.as_public() in cc ->
|
2019-03-19 17:27:42 +00:00
|
|
|
to = to ++ [followers_collection]
|
|
|
|
Map.put(object, "to", to)
|
|
|
|
|
2019-07-29 02:43:19 +00:00
|
|
|
Pleroma.Constants.as_public() in to ->
|
2019-03-19 17:27:42 +00:00
|
|
|
cc = cc ++ [followers_collection]
|
|
|
|
Map.put(object, "cc", cc)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
object
|
|
|
|
end
|
2018-08-14 17:05:11 +00:00
|
|
|
else
|
2019-03-19 17:27:42 +00:00
|
|
|
object
|
2018-08-14 17:05:11 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-19 17:27:42 +00:00
|
|
|
def fix_implicit_addressing(object, _), do: object
|
|
|
|
|
2018-12-23 15:35:49 +00:00
|
|
|
def fix_addressing(object) do
|
2019-05-01 09:09:53 +00:00
|
|
|
{:ok, %User{} = user} = User.get_or_fetch_by_ap_id(object["actor"])
|
2019-03-19 17:27:42 +00:00
|
|
|
followers_collection = User.ap_followers(user)
|
|
|
|
|
2018-12-23 15:35:49 +00:00
|
|
|
object
|
2018-08-14 17:05:11 +00:00
|
|
|
|> fix_addressing_list("to")
|
|
|
|
|> fix_addressing_list("cc")
|
|
|
|
|> fix_addressing_list("bto")
|
|
|
|
|> fix_addressing_list("bcc")
|
2019-06-01 03:26:45 +00:00
|
|
|
|> fix_explicit_addressing()
|
2019-03-19 17:27:42 +00:00
|
|
|
|> fix_implicit_addressing(followers_collection)
|
2018-02-19 09:39:03 +00:00
|
|
|
end
|
|
|
|
|
2018-07-12 16:37:42 +00:00
|
|
|
def fix_actor(%{"attributedTo" => actor} = object) do
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "actor", Containment.get_actor(%{"actor" => actor}))
|
2018-07-12 16:37:42 +00:00
|
|
|
end
|
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def fix_in_reply_to(object, options \\ [])
|
|
|
|
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
2018-09-21 10:43:35 +00:00
|
|
|
when not is_nil(in_reply_to) do
|
2019-09-10 13:43:10 +00:00
|
|
|
in_reply_to_id = prepare_in_reply_to(in_reply_to)
|
2019-06-29 17:04:50 +00:00
|
|
|
object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
|
2020-02-15 17:41:38 +00:00
|
|
|
depth = (options[:depth] || 0) + 1
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2020-02-15 17:41:38 +00:00
|
|
|
if Federator.allowed_thread_distance?(depth) do
|
2019-09-10 13:43:10 +00:00
|
|
|
with {:ok, replied_object} <- get_obj_helper(in_reply_to_id, options),
|
2020-02-25 14:34:56 +00:00
|
|
|
%Activity{} <- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
2019-09-10 13:43:10 +00:00
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
|
|
|
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
|
|
|
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
|
|
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|
|
|
else
|
2019-06-29 17:04:50 +00:00
|
|
|
e ->
|
2019-09-11 21:16:09 +00:00
|
|
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
2019-06-29 17:04:50 +00:00
|
|
|
object
|
|
|
|
end
|
|
|
|
else
|
|
|
|
object
|
2018-02-25 09:56:01 +00:00
|
|
|
end
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def fix_in_reply_to(object, _options), do: object
|
2018-02-25 09:56:01 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
defp prepare_in_reply_to(in_reply_to) do
|
|
|
|
cond do
|
|
|
|
is_bitstring(in_reply_to) ->
|
|
|
|
in_reply_to
|
|
|
|
|
|
|
|
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
|
|
|
|
in_reply_to["id"]
|
|
|
|
|
|
|
|
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
|
|
|
|
Enum.at(in_reply_to, 0)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-19 09:39:03 +00:00
|
|
|
def fix_context(object) do
|
2018-07-12 17:06:28 +00:00
|
|
|
context = object["context"] || object["conversation"] || Utils.generate_context_id()
|
|
|
|
|
2018-02-19 09:39:03 +00:00
|
|
|
object
|
2018-07-12 17:06:28 +00:00
|
|
|
|> Map.put("context", context)
|
|
|
|
|> Map.put("conversation", context)
|
2018-02-17 17:38:58 +00:00
|
|
|
end
|
|
|
|
|
2020-03-19 18:10:03 +00:00
|
|
|
defp add_if_present(map, _key, nil), do: map
|
|
|
|
|
|
|
|
defp add_if_present(map, key, value) do
|
|
|
|
Map.put(map, key, value)
|
|
|
|
end
|
|
|
|
|
2018-09-21 10:57:31 +00:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
|
2018-03-30 13:01:53 +00:00
|
|
|
attachments =
|
2019-09-10 13:43:10 +00:00
|
|
|
Enum.map(attachment, fn data ->
|
2020-03-19 18:10:03 +00:00
|
|
|
url =
|
|
|
|
cond do
|
|
|
|
is_list(data["url"]) -> List.first(data["url"])
|
|
|
|
is_map(data["url"]) -> data["url"]
|
|
|
|
true -> nil
|
|
|
|
end
|
|
|
|
|
|
|
|
media_type =
|
|
|
|
cond do
|
|
|
|
is_map(url) && is_binary(url["mediaType"]) -> url["mediaType"]
|
|
|
|
is_binary(data["mediaType"]) -> data["mediaType"]
|
|
|
|
is_binary(data["mimeType"]) -> data["mimeType"]
|
|
|
|
true -> nil
|
|
|
|
end
|
|
|
|
|
|
|
|
href =
|
|
|
|
cond do
|
|
|
|
is_map(url) && is_binary(url["href"]) -> url["href"]
|
|
|
|
is_binary(data["url"]) -> data["url"]
|
|
|
|
is_binary(data["href"]) -> data["href"]
|
|
|
|
end
|
|
|
|
|
|
|
|
attachment_url =
|
|
|
|
%{"href" => href}
|
|
|
|
|> add_if_present("mediaType", media_type)
|
|
|
|
|> add_if_present("type", Map.get(url || %{}, "type"))
|
|
|
|
|
|
|
|
%{"url" => [attachment_url]}
|
|
|
|
|> add_if_present("mediaType", media_type)
|
|
|
|
|> add_if_present("type", data["type"])
|
|
|
|
|> add_if_present("name", data["name"])
|
2018-03-30 13:01:53 +00:00
|
|
|
end)
|
2018-02-17 17:38:58 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-15 19:00:06 +00:00
|
|
|
end
|
|
|
|
|
2018-09-21 10:57:31 +00:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
|
2019-09-11 04:23:33 +00:00
|
|
|
object
|
|
|
|
|> Map.put("attachment", [attachment])
|
|
|
|
|> fix_attachments()
|
2018-09-21 10:57:31 +00:00
|
|
|
end
|
|
|
|
|
2018-09-26 19:01:33 +00:00
|
|
|
def fix_attachments(object), do: object
|
2018-09-21 10:57:31 +00:00
|
|
|
|
2018-11-01 08:56:37 +00:00
|
|
|
def fix_url(%{"url" => url} = object) when is_map(url) do
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "url", url["href"])
|
2018-11-01 08:56:37 +00:00
|
|
|
end
|
|
|
|
|
2020-03-11 11:21:44 +00:00
|
|
|
def fix_url(%{"type" => object_type, "url" => url} = object)
|
|
|
|
when object_type in ["Video", "Audio"] and is_list(url) do
|
2018-12-23 13:28:17 +00:00
|
|
|
first_element = Enum.at(url, 0)
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
link_element = Enum.find(url, fn x -> is_map(x) and x["mimeType"] == "text/html" end)
|
2018-12-23 13:28:17 +00:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("attachment", [first_element])
|
|
|
|
|> Map.put("url", link_element["href"])
|
|
|
|
end
|
|
|
|
|
|
|
|
def fix_url(%{"type" => object_type, "url" => url} = object)
|
|
|
|
when object_type != "Video" and is_list(url) do
|
2018-11-01 08:56:37 +00:00
|
|
|
first_element = Enum.at(url, 0)
|
|
|
|
|
|
|
|
url_string =
|
|
|
|
cond do
|
|
|
|
is_bitstring(first_element) -> first_element
|
|
|
|
is_map(first_element) -> first_element["href"] || ""
|
|
|
|
true -> ""
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "url", url_string)
|
2018-11-01 08:56:37 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_url(object), do: object
|
|
|
|
|
2018-09-21 12:36:29 +00:00
|
|
|
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
|
2018-03-30 13:01:53 +00:00
|
|
|
emoji =
|
2019-09-10 13:43:10 +00:00
|
|
|
tags
|
|
|
|
|> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.reduce(%{}, fn data, mapping ->
|
2018-09-21 12:36:29 +00:00
|
|
|
name = String.trim(data["name"], ":")
|
2018-03-13 07:05:43 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(mapping, name, data["icon"]["url"])
|
2018-03-30 13:01:53 +00:00
|
|
|
end)
|
2018-03-13 07:05:43 +00:00
|
|
|
|
|
|
|
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
|
|
|
|
emoji = Map.merge(object["emoji"] || %{}, emoji)
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-03-13 07:05:43 +00:00
|
|
|
end
|
|
|
|
|
2018-09-21 12:36:29 +00:00
|
|
|
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
|
|
|
|
name = String.trim(tag["name"], ":")
|
|
|
|
emoji = %{name => tag["icon"]["url"]}
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-09-21 12:36:29 +00:00
|
|
|
end
|
|
|
|
|
2018-09-26 19:01:33 +00:00
|
|
|
def fix_emoji(object), do: object
|
2018-09-21 12:36:29 +00:00
|
|
|
|
2018-09-21 12:46:49 +00:00
|
|
|
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
|
2018-03-30 13:01:53 +00:00
|
|
|
tags =
|
2018-09-21 12:46:49 +00:00
|
|
|
tag
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
|
|
|
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
|
2018-03-24 21:39:37 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", tag ++ tags)
|
2018-03-24 21:39:37 +00:00
|
|
|
end
|
|
|
|
|
2018-09-26 09:27:00 +00:00
|
|
|
def fix_tag(%{"tag" => %{"type" => "Hashtag", "name" => hashtag} = tag} = object) do
|
|
|
|
combined = [tag, String.slice(hashtag, 1..-1)]
|
2018-09-26 08:21:58 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", combined)
|
2018-09-26 08:21:58 +00:00
|
|
|
end
|
|
|
|
|
2019-02-05 00:32:49 +00:00
|
|
|
def fix_tag(%{"tag" => %{} = tag} = object), do: Map.put(object, "tag", [tag])
|
|
|
|
|
2018-09-26 19:01:33 +00:00
|
|
|
def fix_tag(object), do: object
|
2018-09-21 12:46:49 +00:00
|
|
|
|
2018-06-18 21:51:22 +00:00
|
|
|
# content map usually only has one language so this will do for now.
|
|
|
|
def fix_content_map(%{"contentMap" => content_map} = object) do
|
|
|
|
content_groups = Map.to_list(content_map)
|
|
|
|
{_, content} = Enum.at(content_groups, 0)
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "content", content)
|
2018-06-18 21:51:22 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_content_map(object), do: object
|
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def fix_type(object, options \\ [])
|
|
|
|
|
2019-08-11 19:49:55 +00:00
|
|
|
def fix_type(%{"inReplyTo" => reply_id, "name" => _} = object, options)
|
|
|
|
when is_binary(reply_id) do
|
2020-02-15 17:41:38 +00:00
|
|
|
with true <- Federator.allowed_thread_distance?(options[:depth]),
|
2019-09-10 13:43:10 +00:00
|
|
|
{:ok, %{data: %{"type" => "Question"} = _} = _} <- get_obj_helper(reply_id, options) do
|
2019-05-22 18:17:57 +00:00
|
|
|
Map.put(object, "type", "Answer")
|
|
|
|
else
|
2019-09-10 13:43:10 +00:00
|
|
|
_ -> object
|
2019-05-22 18:17:57 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def fix_type(object, _), do: object
|
2019-05-22 18:17:57 +00:00
|
|
|
|
2018-12-09 09:12:48 +00:00
|
|
|
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
|
|
|
|
with true <- id =~ "follows",
|
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
|
|
|
|
%Activity{} = activity <- Utils.fetch_latest_follow(follower, followed) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_ -> {:error, nil}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp mastodon_follow_hack(_, _), do: {:error, nil}
|
|
|
|
|
|
|
|
defp get_follow_activity(follow_object, followed) do
|
|
|
|
with object_id when not is_nil(object_id) <- Utils.get_ap_id(follow_object),
|
|
|
|
{_, %Activity{} = activity} <- {:activity, Activity.get_by_ap_id(object_id)} do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
# Can't find the activity. This might a Mastodon 2.3 "Accept"
|
|
|
|
{:activity, nil} ->
|
|
|
|
mastodon_follow_hack(follow_object, followed)
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, nil}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
# Reduce the object list to find the reported user.
|
|
|
|
defp get_reported(objects) do
|
|
|
|
Enum.reduce_while(objects, nil, fn ap_id, _ ->
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
|
|
|
|
{:halt, user}
|
|
|
|
else
|
|
|
|
_ -> {:cont, nil}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def handle_incoming(data, options \\ [])
|
|
|
|
|
2019-03-14 19:06:02 +00:00
|
|
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
|
|
|
# with nil ID.
|
2019-06-29 17:04:50 +00:00
|
|
|
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
|
2019-03-14 19:06:02 +00:00
|
|
|
with context <- data["context"] || Utils.generate_context_id(),
|
|
|
|
content <- data["content"] || "",
|
|
|
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
|
|
|
# Reduce the object list to find the reported user.
|
2019-09-10 13:43:10 +00:00
|
|
|
%User{} = account <- get_reported(objects),
|
2019-03-14 19:06:02 +00:00
|
|
|
# Remove the reported user from the object list.
|
|
|
|
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
|
2019-09-10 13:43:10 +00:00
|
|
|
%{
|
2019-03-14 19:06:02 +00:00
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content,
|
2019-09-10 13:43:10 +00:00
|
|
|
additional: %{"cc" => [account.ap_id]}
|
2019-03-14 19:06:02 +00:00
|
|
|
}
|
2019-09-10 13:43:10 +00:00
|
|
|
|> ActivityPub.flag()
|
2019-03-14 19:06:02 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-23 01:23:02 +00:00
|
|
|
# disallow objects with bogus IDs
|
2019-06-29 17:04:50 +00:00
|
|
|
def handle_incoming(%{"id" => nil}, _options), do: :error
|
|
|
|
def handle_incoming(%{"id" => ""}, _options), do: :error
|
2018-08-23 01:23:02 +00:00
|
|
|
# length of https:// = 8, should validate better, but good enough for now.
|
2019-12-08 13:36:22 +00:00
|
|
|
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
|
2019-06-29 17:04:50 +00:00
|
|
|
do: :error
|
2018-08-23 01:23:02 +00:00
|
|
|
|
2018-02-15 19:00:06 +00:00
|
|
|
# TODO: validate those with a Ecto scheme
|
|
|
|
# - tags
|
|
|
|
# - emoji
|
2019-06-29 17:04:50 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
|
|
|
|
options
|
|
|
|
)
|
2020-03-11 11:21:44 +00:00
|
|
|
when objtype in ["Article", "Event", "Note", "Video", "Page", "Question", "Answer", "Audio"] do
|
2018-12-01 22:29:41 +00:00
|
|
|
actor = Containment.get_actor(data)
|
2018-08-14 17:05:11 +00:00
|
|
|
|
|
|
|
data =
|
|
|
|
Map.put(data, "actor", actor)
|
|
|
|
|> fix_addressing
|
2018-07-12 21:09:42 +00:00
|
|
|
|
2019-01-21 06:14:20 +00:00
|
|
|
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
2020-02-09 07:17:21 +00:00
|
|
|
object = fix_object(object, options)
|
2018-02-23 14:00:41 +00:00
|
|
|
|
2018-02-15 19:00:06 +00:00
|
|
|
params = %{
|
|
|
|
to: data["to"],
|
|
|
|
object: object,
|
|
|
|
actor: user,
|
2018-02-25 21:28:53 +00:00
|
|
|
context: object["conversation"],
|
2018-02-15 19:00:06 +00:00
|
|
|
local: false,
|
|
|
|
published: data["published"],
|
2018-03-30 13:01:53 +00:00
|
|
|
additional:
|
|
|
|
Map.take(data, [
|
|
|
|
"cc",
|
2018-12-23 15:35:49 +00:00
|
|
|
"directMessage",
|
2018-03-30 13:01:53 +00:00
|
|
|
"id"
|
|
|
|
])
|
2018-02-15 19:00:06 +00:00
|
|
|
}
|
|
|
|
|
2020-01-25 07:47:30 +00:00
|
|
|
with {:ok, created_activity} <- ActivityPub.create(params) do
|
2020-02-15 17:41:38 +00:00
|
|
|
reply_depth = (options[:depth] || 0) + 1
|
|
|
|
|
|
|
|
if Federator.allowed_thread_distance?(reply_depth) do
|
|
|
|
for reply_id <- replies(object) do
|
|
|
|
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
|
|
|
"id" => reply_id,
|
|
|
|
"depth" => reply_depth
|
|
|
|
})
|
|
|
|
end
|
2020-01-25 07:47:30 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, created_activity}
|
|
|
|
end
|
2018-02-15 19:00:06 +00:00
|
|
|
else
|
2018-02-19 16:37:45 +00:00
|
|
|
%Activity{} = activity -> {:ok, activity}
|
2018-02-15 19:00:06 +00:00
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-27 12:40:31 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
|
|
|
options
|
|
|
|
) do
|
|
|
|
actor = Containment.get_actor(data)
|
|
|
|
|
|
|
|
data =
|
|
|
|
Map.put(data, "actor", actor)
|
|
|
|
|> fix_addressing
|
|
|
|
|
|
|
|
with {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
2020-02-15 17:41:38 +00:00
|
|
|
reply_depth = (options[:depth] || 0) + 1
|
|
|
|
options = Keyword.put(options, :depth, reply_depth)
|
2019-09-27 12:40:31 +00:00
|
|
|
object = fix_object(object, options)
|
|
|
|
|
|
|
|
params = %{
|
|
|
|
to: data["to"],
|
|
|
|
object: object,
|
|
|
|
actor: user,
|
|
|
|
context: nil,
|
|
|
|
local: false,
|
|
|
|
published: data["published"],
|
|
|
|
additional: Map.take(data, ["cc", "id"])
|
|
|
|
}
|
|
|
|
|
|
|
|
ActivityPub.listen(params)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
def handle_incoming(
|
2019-06-29 17:04:50 +00:00
|
|
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
|
|
|
|
_options
|
2018-03-30 13:01:53 +00:00
|
|
|
) do
|
2019-08-27 17:22:30 +00:00
|
|
|
with %User{local: true} = followed <-
|
|
|
|
User.get_cached_by_ap_id(Containment.get_actor(%{"actor" => followed})),
|
|
|
|
{:ok, %User{} = follower} <-
|
|
|
|
User.get_or_fetch_by_ap_id(Containment.get_actor(%{"actor" => follower})),
|
2018-02-17 13:55:44 +00:00
|
|
|
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
|
2019-04-24 23:52:33 +00:00
|
|
|
with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]),
|
2019-07-29 02:43:19 +00:00
|
|
|
{_, false} <- {:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked},
|
2019-06-05 12:10:46 +00:00
|
|
|
{_, false} <- {:user_locked, User.locked?(followed)},
|
|
|
|
{_, {:ok, follower}} <- {:follow, User.follow(follower, followed)},
|
2019-06-05 14:51:28 +00:00
|
|
|
{_, {:ok, _}} <-
|
2019-10-10 19:35:32 +00:00
|
|
|
{:follow_state_update, Utils.update_follow_state_for_all(activity, "accept")},
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} <-
|
|
|
|
FollowingRelationship.update(follower, followed, :follow_accept) do
|
2018-05-28 18:31:48 +00:00
|
|
|
ActivityPub.accept(%{
|
|
|
|
to: [follower.ap_id],
|
2019-02-09 23:26:29 +00:00
|
|
|
actor: followed,
|
2018-05-28 18:31:48 +00:00
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
2019-04-24 23:52:33 +00:00
|
|
|
else
|
|
|
|
{:user_blocked, true} ->
|
2019-06-05 14:51:28 +00:00
|
|
|
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_reject)
|
2019-04-24 23:52:33 +00:00
|
|
|
|
|
|
|
ActivityPub.reject(%{
|
|
|
|
to: [follower.ap_id],
|
|
|
|
actor: followed,
|
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
|
|
|
|
|
|
|
{:follow, {:error, _}} ->
|
2019-06-05 14:51:28 +00:00
|
|
|
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_reject)
|
2019-04-24 23:52:33 +00:00
|
|
|
|
|
|
|
ActivityPub.reject(%{
|
|
|
|
to: [follower.ap_id],
|
|
|
|
actor: followed,
|
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
|
|
|
|
|
|
|
{:user_locked, true} ->
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_pending)
|
2019-04-24 23:52:33 +00:00
|
|
|
:noop
|
2018-05-26 14:55:16 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-02-17 13:55:44 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2019-04-24 23:52:33 +00:00
|
|
|
_e ->
|
|
|
|
:error
|
2018-02-17 13:55:44 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-25 06:09:01 +00:00
|
|
|
def handle_incoming(
|
2019-10-11 09:48:58 +00:00
|
|
|
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => id} = data,
|
2019-06-29 17:04:50 +00:00
|
|
|
_options
|
2018-05-25 06:09:01 +00:00
|
|
|
) do
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
2018-05-26 13:07:21 +00:00
|
|
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
2019-06-05 14:51:28 +00:00
|
|
|
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
|
2018-05-26 13:07:21 +00:00
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_accept) do
|
2019-06-05 12:24:31 +00:00
|
|
|
ActivityPub.accept(%{
|
|
|
|
to: follow_activity.data["to"],
|
|
|
|
type: "Accept",
|
|
|
|
actor: followed,
|
|
|
|
object: follow_activity.data["id"],
|
2019-10-11 09:48:58 +00:00
|
|
|
local: false,
|
|
|
|
activity_id: id
|
2019-06-05 12:24:31 +00:00
|
|
|
})
|
2018-05-26 11:07:04 +00:00
|
|
|
else
|
|
|
|
_e -> :error
|
2018-05-25 06:09:01 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_incoming(
|
2019-10-11 09:48:58 +00:00
|
|
|
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => id} = data,
|
2019-06-29 17:04:50 +00:00
|
|
|
_options
|
2018-05-25 06:09:01 +00:00
|
|
|
) do
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
2018-05-26 13:07:21 +00:00
|
|
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
2019-06-05 14:51:28 +00:00
|
|
|
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
|
2018-05-26 13:07:21 +00:00
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
|
2020-03-28 15:49:03 +00:00
|
|
|
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_reject),
|
2018-05-26 13:11:50 +00:00
|
|
|
{:ok, activity} <-
|
2019-01-29 11:57:46 +00:00
|
|
|
ActivityPub.reject(%{
|
2018-05-26 13:11:50 +00:00
|
|
|
to: follow_activity.data["to"],
|
2019-01-29 11:57:46 +00:00
|
|
|
type: "Reject",
|
2019-02-09 23:26:29 +00:00
|
|
|
actor: followed,
|
2018-05-26 13:11:50 +00:00
|
|
|
object: follow_activity.data["id"],
|
2019-10-11 09:48:58 +00:00
|
|
|
local: false,
|
|
|
|
activity_id: id
|
2018-05-26 13:11:50 +00:00
|
|
|
}) do
|
2018-05-25 12:51:04 +00:00
|
|
|
{:ok, activity}
|
2018-05-26 11:07:04 +00:00
|
|
|
else
|
|
|
|
_e -> :error
|
2018-05-25 06:09:01 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-04 15:01:04 +00:00
|
|
|
@misskey_reactions %{
|
|
|
|
"like" => "👍",
|
|
|
|
"love" => "❤️",
|
|
|
|
"laugh" => "😆",
|
|
|
|
"hmm" => "🤔",
|
|
|
|
"surprise" => "😮",
|
|
|
|
"congrats" => "🎉",
|
|
|
|
"angry" => "💢",
|
|
|
|
"confused" => "😥",
|
|
|
|
"rip" => "😇",
|
2019-10-05 08:45:42 +00:00
|
|
|
"pudding" => "🍮",
|
|
|
|
"star" => "⭐"
|
2019-10-04 15:01:04 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 17:09:57 +00:00
|
|
|
@doc "Rewrite misskey likes into EmojiReacts"
|
2019-10-04 15:01:04 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Like",
|
|
|
|
"_misskey_reaction" => reaction
|
|
|
|
} = data,
|
|
|
|
options
|
|
|
|
) do
|
|
|
|
data
|
2020-02-06 17:09:57 +00:00
|
|
|
|> Map.put("type", "EmojiReact")
|
2019-10-05 08:45:42 +00:00
|
|
|
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
2019-10-04 15:01:04 +00:00
|
|
|
|> handle_incoming(options)
|
|
|
|
end
|
|
|
|
|
2019-10-17 17:35:31 +00:00
|
|
|
def handle_incoming(%{"type" => "Like"} = data, _options) do
|
2019-11-05 14:02:09 +00:00
|
|
|
with {_, {:ok, cast_data_sym}} <-
|
|
|
|
{:casting_data,
|
|
|
|
data |> LikeValidator.cast_data() |> Ecto.Changeset.apply_action(:insert)},
|
2020-03-31 12:55:25 +00:00
|
|
|
cast_data = ObjectValidator.stringify_keys(Map.from_struct(cast_data_sym)),
|
2019-10-17 17:35:31 +00:00
|
|
|
:ok <- ObjectValidator.fetch_actor_and_object(cast_data),
|
2020-04-06 11:46:34 +00:00
|
|
|
{_, {:ok, cast_data}} <- {:ensure_context_presence, ensure_context_presence(cast_data)},
|
2019-10-17 17:35:31 +00:00
|
|
|
{_, {:ok, cast_data}} <-
|
2020-04-06 11:46:34 +00:00
|
|
|
{:ensure_recipients_presence, ensure_recipients_presence(cast_data)},
|
2019-10-16 15:03:21 +00:00
|
|
|
{_, {:ok, activity, _meta}} <-
|
2019-10-23 09:52:27 +00:00
|
|
|
{:common_pipeline, Pipeline.common_pipeline(cast_data, local: false)} do
|
2018-02-17 19:13:12 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2019-10-16 15:03:21 +00:00
|
|
|
e -> {:error, e}
|
2018-02-17 19:13:12 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-26 21:47:31 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
2020-02-06 17:09:57 +00:00
|
|
|
"type" => "EmojiReact",
|
2019-08-26 21:47:31 +00:00
|
|
|
"object" => object_id,
|
|
|
|
"actor" => _actor,
|
|
|
|
"id" => id,
|
|
|
|
"content" => emoji
|
|
|
|
} = data,
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with actor <- Containment.get_actor(data),
|
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
|
|
|
{:ok, activity, _object} <-
|
|
|
|
ActivityPub.react_with_emoji(actor, object, emoji, activity_id: id, local: false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
def handle_incoming(
|
2019-06-29 17:04:50 +00:00
|
|
|
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
|
|
|
_options
|
2018-03-30 13:01:53 +00:00
|
|
|
) do
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2019-10-02 11:18:51 +00:00
|
|
|
{:ok, object} <- get_embedded_obj_helper(object_id, actor),
|
2019-02-22 12:29:52 +00:00
|
|
|
public <- Visibility.is_public?(data),
|
2019-01-17 23:19:15 +00:00
|
|
|
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false, public) do
|
2018-02-17 20:57:31 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
def handle_incoming(
|
2018-09-10 01:57:03 +00:00
|
|
|
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
|
2019-06-29 17:04:50 +00:00
|
|
|
data,
|
|
|
|
_options
|
2018-09-10 01:57:03 +00:00
|
|
|
)
|
2019-10-05 20:41:33 +00:00
|
|
|
when object_type in [
|
|
|
|
"Person",
|
|
|
|
"Application",
|
|
|
|
"Service",
|
2019-10-19 17:46:24 +00:00
|
|
|
"Organization"
|
2019-10-05 20:41:33 +00:00
|
|
|
] do
|
2019-04-22 07:20:43 +00:00
|
|
|
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
2018-02-25 15:14:25 +00:00
|
|
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
|
|
|
|
|
|
|
actor
|
2020-01-19 06:02:16 +00:00
|
|
|
|> User.upgrade_changeset(new_user_data, true)
|
2018-02-25 15:34:24 +00:00
|
|
|
|> User.update_and_set_cache()
|
2018-02-25 15:14:25 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
ActivityPub.update(%{
|
|
|
|
local: false,
|
|
|
|
to: data["to"] || [],
|
|
|
|
cc: data["cc"] || [],
|
|
|
|
object: object,
|
2019-10-05 12:49:45 +00:00
|
|
|
actor: actor_id,
|
|
|
|
activity_id: data["id"]
|
2018-03-30 13:01:53 +00:00
|
|
|
})
|
2018-02-25 15:14:25 +00:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.error(e)
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-17 21:22:30 +00:00
|
|
|
# TODO: We presently assume that any actor on the same origin domain as the object being
|
|
|
|
# deleted has the rights to delete that object. A better way to validate whether or not
|
|
|
|
# the object should be deleted is to refetch the object URI, which should return either
|
|
|
|
# an error or a tombstone. This would allow us to verify that a deletion actually took
|
|
|
|
# place.
|
2018-03-30 13:01:53 +00:00
|
|
|
def handle_incoming(
|
2019-10-10 15:17:33 +00:00
|
|
|
%{"type" => "Delete", "object" => object_id, "actor" => actor, "id" => id} = data,
|
2019-06-29 17:04:50 +00:00
|
|
|
_options
|
2018-03-30 13:01:53 +00:00
|
|
|
) do
|
2018-05-26 11:52:05 +00:00
|
|
|
object_id = Utils.get_ap_id(object_id)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 03:18:10 +00:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-12-01 22:29:41 +00:00
|
|
|
:ok <- Containment.contain_origin(actor.ap_id, object.data),
|
2019-10-11 09:25:45 +00:00
|
|
|
{:ok, activity} <-
|
|
|
|
ActivityPub.delete(object, local: false, activity_id: id, actor: actor.ap_id) do
|
2018-03-03 17:37:40 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2019-07-10 05:16:08 +00:00
|
|
|
nil ->
|
|
|
|
case User.get_cached_by_ap_id(object_id) do
|
|
|
|
%User{ap_id: ^actor} = user ->
|
2019-07-28 21:29:10 +00:00
|
|
|
User.delete(user)
|
2019-07-10 05:16:08 +00:00
|
|
|
|
|
|
|
nil ->
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
|
|
|
|
_e ->
|
|
|
|
:error
|
2018-03-03 17:37:40 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 03:50:19 +00:00
|
|
|
def handle_incoming(
|
2018-05-11 19:34:46 +00:00
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
2018-05-13 07:42:31 +00:00
|
|
|
"object" => %{"type" => "Announce", "object" => object_id},
|
2018-12-09 09:12:48 +00:00
|
|
|
"actor" => _actor,
|
2018-05-11 19:34:46 +00:00
|
|
|
"id" => id
|
2019-06-29 17:04:50 +00:00
|
|
|
} = data,
|
|
|
|
_options
|
2018-05-11 19:34:46 +00:00
|
|
|
) do
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 03:18:10 +00:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-06-14 01:29:55 +00:00
|
|
|
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
|
2018-05-09 03:50:19 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 16:05:34 +00:00
|
|
|
_e -> :error
|
2018-05-09 03:50:19 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-18 03:55:00 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
"actor" => follower,
|
|
|
|
"id" => id
|
2019-06-29 17:04:50 +00:00
|
|
|
} = _data,
|
|
|
|
_options
|
2018-05-18 03:55:00 +00:00
|
|
|
) do
|
2018-05-21 08:35:43 +00:00
|
|
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
2018-05-21 01:01:14 +00:00
|
|
|
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2018-05-18 03:55:00 +00:00
|
|
|
User.unfollow(follower, followed)
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 09:12:48 +00:00
|
|
|
_e -> :error
|
2018-05-18 03:55:00 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-03 16:37:23 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
2020-02-06 17:09:57 +00:00
|
|
|
"object" => %{"type" => "EmojiReact", "id" => reaction_activity_id},
|
2019-10-03 16:37:23 +00:00
|
|
|
"actor" => _actor,
|
|
|
|
"id" => id
|
|
|
|
} = data,
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with actor <- Containment.get_actor(data),
|
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
ActivityPub.unreact_with_emoji(actor, reaction_activity_id,
|
|
|
|
activity_id: id,
|
|
|
|
local: false
|
|
|
|
) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-21 08:35:43 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Block", "object" => blocked},
|
|
|
|
"actor" => blocker,
|
|
|
|
"id" => id
|
2019-06-29 17:04:50 +00:00
|
|
|
} = _data,
|
|
|
|
_options
|
2018-05-21 08:35:43 +00:00
|
|
|
) do
|
2019-08-13 02:15:21 +00:00
|
|
|
with %User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = blocker} <- User.get_or_fetch_by_ap_id(blocker),
|
2018-05-21 08:35:43 +00:00
|
|
|
{:ok, activity} <- ActivityPub.unblock(blocker, blocked, id, false) do
|
2018-05-21 09:00:58 +00:00
|
|
|
User.unblock(blocker, blocked)
|
2018-05-21 08:35:43 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 09:12:48 +00:00
|
|
|
_e -> :error
|
2018-05-21 08:35:43 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-20 01:23:52 +00:00
|
|
|
def handle_incoming(
|
2019-06-29 17:04:50 +00:00
|
|
|
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
|
|
|
|
_options
|
2018-05-20 01:23:52 +00:00
|
|
|
) do
|
2019-08-13 02:15:21 +00:00
|
|
|
with %User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
|
2019-03-18 14:05:10 +00:00
|
|
|
{:ok, %User{} = blocker} = User.get_or_fetch_by_ap_id(blocker),
|
2018-05-21 01:01:14 +00:00
|
|
|
{:ok, activity} <- ActivityPub.block(blocker, blocked, id, false) do
|
2018-05-20 00:57:37 +00:00
|
|
|
User.unfollow(blocker, blocked)
|
2018-05-20 02:02:13 +00:00
|
|
|
User.block(blocker, blocked)
|
2018-05-18 22:09:56 +00:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 09:12:48 +00:00
|
|
|
_e -> :error
|
2018-05-18 22:09:56 +00:00
|
|
|
end
|
|
|
|
end
|
2018-05-20 01:23:52 +00:00
|
|
|
|
2018-05-19 13:22:43 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Like", "object" => object_id},
|
2018-12-09 09:12:48 +00:00
|
|
|
"actor" => _actor,
|
2018-05-19 13:22:43 +00:00
|
|
|
"id" => id
|
2019-06-29 17:04:50 +00:00
|
|
|
} = data,
|
|
|
|
_options
|
2018-05-19 13:22:43 +00:00
|
|
|
) do
|
2018-12-01 22:29:41 +00:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 13:56:59 +00:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 03:18:10 +00:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-05-19 13:22:43 +00:00
|
|
|
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 16:05:34 +00:00
|
|
|
_e -> :error
|
2018-05-19 13:22:43 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 16:13:05 +00:00
|
|
|
# For Undos that don't have the complete object attached, try to find it in our database.
|
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => object
|
|
|
|
} = activity,
|
|
|
|
options
|
|
|
|
)
|
|
|
|
when is_binary(object) do
|
|
|
|
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
|
|
|
activity
|
|
|
|
|> Map.put("object", data)
|
|
|
|
|> handle_incoming(options)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-30 11:21:49 +00:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Move",
|
|
|
|
"actor" => origin_actor,
|
|
|
|
"object" => origin_actor,
|
|
|
|
"target" => target_actor
|
|
|
|
},
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
|
|
|
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
|
|
|
|
true <- origin_actor in target_user.also_known_as do
|
|
|
|
ActivityPub.move(origin_user, target_user, false)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-29 17:04:50 +00:00
|
|
|
def handle_incoming(_, _), do: :error
|
2018-02-17 13:55:44 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
2019-06-29 17:04:50 +00:00
|
|
|
def get_obj_helper(id, options \\ []) do
|
2019-09-11 04:23:33 +00:00
|
|
|
case Object.normalize(id, true, options) do
|
|
|
|
%Object{} = object -> {:ok, object}
|
|
|
|
_ -> nil
|
2019-09-10 13:43:10 +00:00
|
|
|
end
|
2018-02-18 10:24:54 +00:00
|
|
|
end
|
|
|
|
|
2019-10-02 11:18:51 +00:00
|
|
|
@spec get_embedded_obj_helper(String.t() | Object.t(), User.t()) :: {:ok, Object.t()} | nil
|
2019-10-02 11:46:06 +00:00
|
|
|
def get_embedded_obj_helper(%{"attributedTo" => attributed_to, "id" => object_id} = data, %User{
|
2019-10-02 11:18:51 +00:00
|
|
|
ap_id: ap_id
|
|
|
|
})
|
2019-10-02 11:46:06 +00:00
|
|
|
when attributed_to == ap_id do
|
2019-10-02 11:18:51 +00:00
|
|
|
with {:ok, activity} <-
|
|
|
|
handle_incoming(%{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2019-10-02 11:46:06 +00:00
|
|
|
"actor" => attributed_to,
|
2019-10-02 11:18:51 +00:00
|
|
|
"object" => data
|
|
|
|
}) do
|
|
|
|
{:ok, Object.normalize(activity)}
|
|
|
|
else
|
|
|
|
_ -> get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_embedded_obj_helper(object_id, _) do
|
|
|
|
get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
|
2019-03-05 03:36:19 +00:00
|
|
|
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
|
|
|
|
with false <- String.starts_with?(in_reply_to, "http"),
|
|
|
|
{:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do
|
|
|
|
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
|
2018-03-23 15:07:02 +00:00
|
|
|
else
|
|
|
|
_e -> object
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-03-23 15:07:02 +00:00
|
|
|
def set_reply_to_uri(obj), do: obj
|
|
|
|
|
2020-01-22 18:10:17 +00:00
|
|
|
@doc """
|
|
|
|
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
|
|
|
|
Based on Mastodon's ActivityPub::NoteSerializer#replies.
|
|
|
|
"""
|
2020-02-09 07:17:21 +00:00
|
|
|
def set_replies(obj_data) do
|
2020-01-22 18:10:17 +00:00
|
|
|
replies_uris =
|
2020-02-09 11:09:01 +00:00
|
|
|
with limit when limit > 0 <-
|
|
|
|
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
|
2020-02-09 07:17:21 +00:00
|
|
|
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
|
|
|
|
object
|
|
|
|
|> Object.self_replies()
|
|
|
|
|> select([o], fragment("?->>'id'", o.data))
|
2020-01-22 18:10:17 +00:00
|
|
|
|> limit(^limit)
|
|
|
|
|> Repo.all()
|
2020-02-09 11:09:01 +00:00
|
|
|
else
|
|
|
|
_ -> []
|
2020-01-22 18:10:17 +00:00
|
|
|
end
|
|
|
|
|
2020-02-09 11:09:01 +00:00
|
|
|
set_replies(obj_data, replies_uris)
|
2020-01-22 18:10:17 +00:00
|
|
|
end
|
|
|
|
|
2020-02-09 11:09:01 +00:00
|
|
|
defp set_replies(obj, []) do
|
2020-01-22 18:10:17 +00:00
|
|
|
obj
|
|
|
|
end
|
|
|
|
|
|
|
|
defp set_replies(obj, replies_uris) do
|
|
|
|
replies_collection = %{
|
|
|
|
"type" => "Collection",
|
2020-02-09 14:34:48 +00:00
|
|
|
"items" => replies_uris
|
2020-01-22 18:10:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Map.merge(obj, %{"replies" => replies_collection})
|
|
|
|
end
|
|
|
|
|
2020-02-09 11:09:01 +00:00
|
|
|
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
|
|
|
|
items
|
|
|
|
end
|
2020-02-08 16:58:02 +00:00
|
|
|
|
2020-02-09 11:09:01 +00:00
|
|
|
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
|
|
|
|
items
|
2020-01-25 07:47:30 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def replies(_), do: []
|
|
|
|
|
2018-03-23 15:07:02 +00:00
|
|
|
# Prepares the object of an outgoing create activity.
|
2018-02-24 19:16:41 +00:00
|
|
|
def prepare_object(object) do
|
|
|
|
object
|
2018-02-18 13:07:13 +00:00
|
|
|
|> set_sensitive
|
2018-02-18 12:51:03 +00:00
|
|
|
|> add_hashtags
|
2018-02-17 13:11:20 +00:00
|
|
|
|> add_mention_tags
|
2018-03-13 07:05:43 +00:00
|
|
|
|> add_emoji_tags
|
2018-02-17 13:11:20 +00:00
|
|
|
|> add_attributed_to
|
2018-02-17 17:38:58 +00:00
|
|
|
|> prepare_attachments
|
2018-02-18 12:58:52 +00:00
|
|
|
|> set_conversation
|
2018-03-23 15:07:02 +00:00
|
|
|
|> set_reply_to_uri
|
2020-01-22 18:10:17 +00:00
|
|
|
|> set_replies
|
2018-11-10 12:08:53 +00:00
|
|
|
|> strip_internal_fields
|
|
|
|
|> strip_internal_tags
|
2019-05-22 18:17:57 +00:00
|
|
|
|> set_type
|
2018-02-24 19:16:41 +00:00
|
|
|
end
|
|
|
|
|
2018-05-04 22:03:14 +00:00
|
|
|
# @doc
|
|
|
|
# """
|
|
|
|
# internal -> Mastodon
|
|
|
|
# """
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-09-27 12:40:31 +00:00
|
|
|
def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
|
|
|
|
when activity_type in ["Create", "Listen"] do
|
2018-03-30 13:01:53 +00:00
|
|
|
object =
|
2019-05-01 09:11:17 +00:00
|
|
|
object_id
|
|
|
|
|> Object.normalize()
|
|
|
|
|> Map.get(:data)
|
2018-03-30 13:01:53 +00:00
|
|
|
|> prepare_object
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 15:39:38 +00:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2019-05-01 09:11:17 +00:00
|
|
|
|> Map.delete("bcc")
|
2018-02-17 13:11:20 +00:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-10-02 10:14:08 +00:00
|
|
|
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
|
|
|
|
object =
|
|
|
|
object_id
|
|
|
|
|> Object.normalize()
|
|
|
|
|
|
|
|
data =
|
|
|
|
if Visibility.is_private?(object) && object.data["actor"] == ap_id do
|
|
|
|
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
|
|
|
|
else
|
|
|
|
data |> maybe_fix_object_url
|
|
|
|
end
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> strip_internal_fields
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
|
|
|
|> Map.delete("bcc")
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2018-05-26 18:03:23 +00:00
|
|
|
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
|
|
|
|
# because of course it does.
|
|
|
|
def prepare_outgoing(%{"type" => "Accept"} = data) do
|
2018-06-18 21:21:03 +00:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-26 18:03:23 +00:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 15:39:38 +00:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-26 18:03:23 +00:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-27 09:10:46 +00:00
|
|
|
def prepare_outgoing(%{"type" => "Reject"} = data) do
|
2018-06-18 21:21:03 +00:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-27 09:10:46 +00:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 15:39:38 +00:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-27 09:10:46 +00:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-04 21:16:02 +00:00
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-03-30 13:01:53 +00:00
|
|
|
data =
|
|
|
|
data
|
2019-03-09 11:12:15 +00:00
|
|
|
|> strip_internal_fields
|
2018-03-30 13:01:53 +00:00
|
|
|
|> maybe_fix_object_url
|
2018-11-08 15:39:38 +00:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-02-17 15:08:55 +00:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
|
|
|
|
with false <- String.starts_with?(object, "http"),
|
|
|
|
{:fetch, {:ok, relative_object}} <- {:fetch, get_obj_helper(object)},
|
|
|
|
%{data: %{"external_url" => external_url}} when not is_nil(external_url) <-
|
|
|
|
relative_object do
|
|
|
|
Map.put(data, "object", external_url)
|
2018-03-13 17:46:37 +00:00
|
|
|
else
|
2019-09-10 13:43:10 +00:00
|
|
|
{:fetch, e} ->
|
|
|
|
Logger.error("Couldn't fetch #{object} #{inspect(e)}")
|
|
|
|
data
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
data
|
2018-03-13 17:46:37 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def maybe_fix_object_url(data), do: data
|
|
|
|
|
2018-02-18 12:51:03 +00:00
|
|
|
def add_hashtags(object) do
|
2018-03-30 13:01:53 +00:00
|
|
|
tags =
|
|
|
|
(object["tag"] || [])
|
2019-02-14 00:27:35 +00:00
|
|
|
|> Enum.map(fn
|
|
|
|
# Expand internal representation tags into AS2 tags.
|
|
|
|
tag when is_binary(tag) ->
|
|
|
|
%{
|
|
|
|
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
|
|
|
|
"name" => "##{tag}",
|
|
|
|
"type" => "Hashtag"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Do not process tags which are already AS2 tag objects.
|
|
|
|
tag when is_map(tag) ->
|
|
|
|
tag
|
2018-03-30 13:01:53 +00:00
|
|
|
end)
|
2018-02-18 12:51:03 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", tags)
|
2018-02-18 12:51:03 +00:00
|
|
|
end
|
|
|
|
|
2018-02-17 13:11:20 +00:00
|
|
|
def add_mention_tags(object) do
|
2020-03-15 18:00:12 +00:00
|
|
|
{enabled_receivers, disabled_receivers} = Utils.get_notified_from_object(object)
|
|
|
|
potential_receivers = enabled_receivers ++ disabled_receivers
|
|
|
|
mentions = Enum.map(potential_receivers, &build_mention_tag/1)
|
2018-02-17 13:11:20 +00:00
|
|
|
|
2018-02-17 13:20:53 +00:00
|
|
|
tags = object["tag"] || []
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", tags ++ mentions)
|
2018-02-17 13:11:20 +00:00
|
|
|
end
|
|
|
|
|
2019-09-11 20:19:06 +00:00
|
|
|
defp build_mention_tag(%{ap_id: ap_id, nickname: nickname} = _) do
|
|
|
|
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
|
|
|
|
end
|
2019-02-12 13:59:34 +00:00
|
|
|
|
2019-10-16 18:59:21 +00:00
|
|
|
def take_emoji_tags(%User{emoji: emoji}) do
|
2019-09-11 20:19:06 +00:00
|
|
|
emoji
|
|
|
|
|> Enum.flat_map(&Map.to_list/1)
|
|
|
|
|> Enum.map(&build_emoji_tag/1)
|
2019-02-12 13:59:34 +00:00
|
|
|
end
|
|
|
|
|
2018-03-13 07:05:43 +00:00
|
|
|
# TODO: we should probably send mtime instead of unix epoch time for updated
|
2019-02-12 13:59:34 +00:00
|
|
|
def add_emoji_tags(%{"emoji" => emoji} = object) do
|
2018-03-13 07:05:43 +00:00
|
|
|
tags = object["tag"] || []
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-09-11 20:19:06 +00:00
|
|
|
out = Enum.map(emoji, &build_emoji_tag/1)
|
2018-03-13 07:05:43 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", tags ++ out)
|
2018-03-13 07:05:43 +00:00
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def add_emoji_tags(object), do: object
|
2019-02-12 13:59:34 +00:00
|
|
|
|
2019-09-11 20:19:06 +00:00
|
|
|
defp build_emoji_tag({name, url}) do
|
|
|
|
%{
|
|
|
|
"icon" => %{"url" => url, "type" => "Image"},
|
|
|
|
"name" => ":" <> name <> ":",
|
|
|
|
"type" => "Emoji",
|
|
|
|
"updated" => "1970-01-01T00:00:00Z",
|
|
|
|
"id" => url
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-02-18 12:58:52 +00:00
|
|
|
def set_conversation(object) do
|
|
|
|
Map.put(object, "conversation", object["context"])
|
|
|
|
end
|
|
|
|
|
2018-02-18 13:07:13 +00:00
|
|
|
def set_sensitive(object) do
|
|
|
|
tags = object["tag"] || []
|
|
|
|
Map.put(object, "sensitive", "nsfw" in tags)
|
|
|
|
end
|
|
|
|
|
2019-05-22 18:17:57 +00:00
|
|
|
def set_type(%{"type" => "Answer"} = object) do
|
|
|
|
Map.put(object, "type", "Note")
|
|
|
|
end
|
|
|
|
|
|
|
|
def set_type(object), do: object
|
|
|
|
|
2018-02-17 13:11:20 +00:00
|
|
|
def add_attributed_to(object) do
|
2019-03-05 03:36:19 +00:00
|
|
|
attributed_to = object["attributedTo"] || object["actor"]
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "attributedTo", attributed_to)
|
2018-02-15 19:00:06 +00:00
|
|
|
end
|
2018-02-17 17:38:58 +00:00
|
|
|
|
|
|
|
def prepare_attachments(object) do
|
2018-03-30 13:01:53 +00:00
|
|
|
attachments =
|
|
|
|
(object["attachment"] || [])
|
|
|
|
|> Enum.map(fn data ->
|
|
|
|
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
|
|
|
|
%{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
|
|
|
|
end)
|
2018-02-17 17:38:58 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-17 17:38:58 +00:00
|
|
|
end
|
2018-02-21 21:21:40 +00:00
|
|
|
|
2019-09-12 16:59:13 +00:00
|
|
|
def strip_internal_fields(object) do
|
2018-11-10 12:08:53 +00:00
|
|
|
object
|
2019-09-18 16:24:20 +00:00
|
|
|
|> Map.drop(Pleroma.Constants.object_internal_fields())
|
2018-11-10 12:08:53 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(%{"tag" => tags} = object) do
|
2019-09-10 13:43:10 +00:00
|
|
|
tags = Enum.filter(tags, fn x -> is_map(x) end)
|
2018-11-10 12:08:53 +00:00
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
Map.put(object, "tag", tags)
|
2018-11-10 12:08:53 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(object), do: object
|
|
|
|
|
2019-04-04 09:10:43 +00:00
|
|
|
def perform(:user_upgrade, user) do
|
2019-03-19 18:39:33 +00:00
|
|
|
# we pass a fake user so that the followers collection is stripped away
|
|
|
|
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-10-10 19:35:32 +00:00
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where: ^old_follower_address in a.recipients,
|
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
recipients:
|
|
|
|
fragment(
|
|
|
|
"array_replace(?,?,?)",
|
|
|
|
a.recipients,
|
|
|
|
^old_follower_address,
|
|
|
|
^user.follower_address
|
|
|
|
)
|
2018-03-30 13:01:53 +00:00
|
|
|
]
|
2019-10-10 19:35:32 +00:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|> Repo.update_all([])
|
2018-02-24 09:51:15 +00:00
|
|
|
end
|
|
|
|
|
2019-04-04 09:10:43 +00:00
|
|
|
def upgrade_user_from_ap_id(ap_id) do
|
2019-04-22 07:20:43 +00:00
|
|
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
2019-04-04 09:10:43 +00:00
|
|
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
|
|
|
already_ap <- User.ap_enabled?(user),
|
2019-09-12 06:59:34 +00:00
|
|
|
{:ok, user} <- upgrade_user(user, data) do
|
|
|
|
if not already_ap do
|
2019-08-31 18:58:42 +00:00
|
|
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
2018-02-24 09:51:15 +00:00
|
|
|
end
|
2018-02-21 21:21:40 +00:00
|
|
|
|
|
|
|
{:ok, user}
|
|
|
|
else
|
2019-04-04 09:10:43 +00:00
|
|
|
%User{} = user -> {:ok, user}
|
2018-02-21 21:21:40 +00:00
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
2018-02-24 16:36:02 +00:00
|
|
|
|
2019-09-12 06:59:34 +00:00
|
|
|
defp upgrade_user(user, data) do
|
|
|
|
user
|
2019-09-17 18:20:08 +00:00
|
|
|
|> User.upgrade_changeset(data, true)
|
2019-09-12 06:59:34 +00:00
|
|
|
|> User.update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
|
|
|
|
Map.put(data, "url", url["href"])
|
2018-05-19 07:30:02 +00:00
|
|
|
end
|
|
|
|
|
2019-09-10 13:43:10 +00:00
|
|
|
def maybe_fix_user_url(data), do: data
|
|
|
|
|
|
|
|
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
|
2019-10-17 17:35:31 +00:00
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_context_presence(%{"context" => context} = data) when is_binary(context),
|
2019-10-17 17:35:31 +00:00
|
|
|
do: {:ok, data}
|
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_context_presence(%{"object" => object} = data) when is_binary(object) do
|
2020-03-31 17:16:45 +00:00
|
|
|
with %{data: %{"context" => context}} when is_binary(context) <- Object.normalize(object) do
|
|
|
|
{:ok, Map.put(data, "context", context)}
|
2019-10-17 17:35:31 +00:00
|
|
|
else
|
2020-03-31 17:16:45 +00:00
|
|
|
_ ->
|
|
|
|
{:error, :no_context}
|
2019-10-17 17:35:31 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_context_presence(_) do
|
2020-03-31 17:00:55 +00:00
|
|
|
{:error, :no_context}
|
2019-10-17 17:35:31 +00:00
|
|
|
end
|
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_recipients_presence(%{"to" => [_ | _], "cc" => [_ | _]} = data),
|
2020-03-31 17:33:41 +00:00
|
|
|
do: {:ok, data}
|
2019-10-17 17:35:31 +00:00
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_recipients_presence(%{"object" => object} = data) do
|
2020-03-31 17:28:18 +00:00
|
|
|
case Object.normalize(object) do
|
2020-03-31 17:33:41 +00:00
|
|
|
%{data: %{"actor" => actor}} ->
|
2019-10-17 17:35:31 +00:00
|
|
|
data =
|
|
|
|
data
|
2020-03-31 17:28:18 +00:00
|
|
|
|> Map.put("to", [actor])
|
|
|
|
|> Map.put("cc", data["cc"] || [])
|
2019-10-17 17:35:31 +00:00
|
|
|
|
|
|
|
{:ok, data}
|
2020-03-31 17:33:41 +00:00
|
|
|
|
|
|
|
nil ->
|
|
|
|
{:error, :no_object}
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, :no_actor}
|
|
|
|
end
|
2019-10-17 17:35:31 +00:00
|
|
|
end
|
|
|
|
|
2020-04-06 11:46:34 +00:00
|
|
|
defp ensure_recipients_presence(_) do
|
2020-03-31 17:00:48 +00:00
|
|
|
{:error, :no_object}
|
2019-10-17 17:35:31 +00:00
|
|
|
end
|
2018-02-15 19:00:06 +00:00
|
|
|
end
|