2018-12-23 13:04:54 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2021-01-12 23:49:20 -07:00
|
|
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2018-02-15 12:00:06 -07:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|
|
|
@moduledoc """
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
"""
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Activity
|
2020-06-16 08:50:33 -06:00
|
|
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
2020-06-05 08:48:02 -06:00
|
|
|
alias Pleroma.Maps
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Object
|
2019-04-17 05:52:01 -06:00
|
|
|
alias Pleroma.Object.Containment
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Repo
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.User
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2020-05-11 07:06:23 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Builder
|
2019-10-23 03:52:27 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
|
|
|
alias Pleroma.Web.ActivityPub.Pipeline
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-02-22 05:29:52 -07:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2022-08-27 12:05:48 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
2019-06-29 11:04:50 -06:00
|
|
|
alias Pleroma.Web.Federator
|
2019-08-31 10:08:56 -06:00
|
|
|
alias Pleroma.Workers.TransmogrifierWorker
|
2018-02-15 12:00:06 -07:00
|
|
|
|
2018-02-21 14:21:40 -07:00
|
|
|
import Ecto.Query
|
|
|
|
|
2019-07-28 20:43:19 -06:00
|
|
|
require Pleroma.Constants
|
2024-04-13 15:25:31 -06:00
|
|
|
require Logger
|
2018-02-23 07:00:41 -07:00
|
|
|
|
2018-02-15 12:00:06 -07:00
|
|
|
@doc """
|
|
|
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
|
|
|
"""
|
2019-06-29 11:04:50 -06:00
|
|
|
def fix_object(object, options \\ []) do
|
2018-02-15 12:00:06 -07:00
|
|
|
object
|
2020-12-21 12:54:26 -07:00
|
|
|
|> strip_internal_fields()
|
|
|
|
|> fix_actor()
|
|
|
|
|> fix_url()
|
|
|
|
|> fix_attachments()
|
|
|
|
|> fix_context()
|
2019-06-29 11:04:50 -06:00
|
|
|
|> fix_in_reply_to(options)
|
2022-07-26 11:28:47 -06:00
|
|
|
|> fix_quote_url(options)
|
2020-12-21 12:54:26 -07:00
|
|
|
|> fix_emoji()
|
|
|
|
|> fix_tag()
|
|
|
|
|> fix_content_map()
|
|
|
|
|> fix_addressing()
|
|
|
|
|> fix_summary()
|
2019-04-09 18:32:04 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => nil} = object) do
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "summary", "")
|
2019-04-09 18:32:04 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => _} = object) do
|
|
|
|
# summary is present, nothing to do
|
|
|
|
object
|
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def fix_summary(object), do: Map.put(object, "summary", "")
|
2018-08-14 11:05:11 -06:00
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp fix_addressing_list(addrs) do
|
2019-03-19 11:30:25 -06:00
|
|
|
cond do
|
2024-01-23 12:10:01 -07:00
|
|
|
is_list(addrs) -> Enum.filter(addrs, &is_binary/1)
|
|
|
|
is_binary(addrs) -> [addrs]
|
|
|
|
true -> []
|
|
|
|
end
|
|
|
|
end
|
2019-03-19 11:30:25 -06:00
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
# Due to JSON-LD simply "Public" and "as:Public" are equivalent to the full URI
|
|
|
|
# but to simplify later checks we only want to deal with one reperesentation internally
|
|
|
|
defp normalise_addressing_public_list(map, all_fields)
|
2019-03-19 11:30:25 -06:00
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp normalise_addressing_public_list(%{} = map, [field | fields]) do
|
|
|
|
full_uri = Pleroma.Constants.as_public()
|
|
|
|
|
|
|
|
map =
|
|
|
|
if map[field] != nil do
|
|
|
|
new_fval =
|
|
|
|
map[field]
|
|
|
|
|> fix_addressing_list()
|
|
|
|
|> Enum.map(fn
|
|
|
|
"Public" -> full_uri
|
|
|
|
"as:Public" -> full_uri
|
|
|
|
x -> x
|
|
|
|
end)
|
|
|
|
|
|
|
|
Map.put(map, field, new_fval)
|
|
|
|
else
|
|
|
|
map
|
|
|
|
end
|
|
|
|
|
|
|
|
normalise_addressing_public_list(map, fields)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp normalise_addressing_public_list(map, _) do
|
|
|
|
map
|
|
|
|
end
|
|
|
|
|
|
|
|
defp normalise_addressing_public(map) do
|
|
|
|
normalise_addressing_public_list(map, ["to", "cc", "bto", "bcc"])
|
2018-08-14 11:05:11 -06:00
|
|
|
end
|
|
|
|
|
2020-09-10 03:09:11 -06:00
|
|
|
# if directMessage flag is set to true, leave the addressing alone
|
|
|
|
def fix_explicit_addressing(%{"directMessage" => true} = object, _follower_collection),
|
|
|
|
do: object
|
2018-11-17 09:05:41 -07:00
|
|
|
|
2020-09-10 03:09:11 -06:00
|
|
|
def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, follower_collection) do
|
|
|
|
explicit_mentions =
|
|
|
|
Utils.determine_explicit_mentions(object) ++
|
|
|
|
[Pleroma.Constants.as_public(), follower_collection]
|
|
|
|
|
|
|
|
explicit_to = Enum.filter(to, fn x -> x in explicit_mentions end)
|
2019-09-10 07:43:10 -06:00
|
|
|
explicit_cc = Enum.filter(to, fn x -> x not in explicit_mentions end)
|
2018-11-17 09:05:41 -07:00
|
|
|
|
|
|
|
final_cc =
|
|
|
|
(cc ++ explicit_cc)
|
2020-09-10 03:09:11 -06:00
|
|
|
|> Enum.filter(& &1)
|
2019-05-31 21:26:45 -06:00
|
|
|
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|
2018-11-17 09:05:41 -07:00
|
|
|
|> Enum.uniq()
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("to", explicit_to)
|
|
|
|
|> Map.put("cc", final_cc)
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
def fix_addressing_list_key(map, field) do
|
|
|
|
Map.put(map, field, fix_addressing_list(map[field]))
|
|
|
|
end
|
|
|
|
|
2018-12-23 08:35:49 -07:00
|
|
|
def fix_addressing(object) do
|
2020-09-10 03:09:11 -06:00
|
|
|
{:ok, %User{follower_address: follower_collection}} =
|
|
|
|
object
|
|
|
|
|> Containment.get_actor()
|
|
|
|
|> User.get_or_fetch_by_ap_id()
|
2019-03-19 11:27:42 -06:00
|
|
|
|
2018-12-23 08:35:49 -07:00
|
|
|
object
|
2024-01-23 12:10:01 -07:00
|
|
|
|> fix_addressing_list_key("to")
|
|
|
|
|> fix_addressing_list_key("cc")
|
|
|
|
|> fix_addressing_list_key("bto")
|
|
|
|
|> fix_addressing_list_key("bcc")
|
2020-09-10 03:09:11 -06:00
|
|
|
|> fix_explicit_addressing(follower_collection)
|
2022-08-27 12:05:48 -06:00
|
|
|
|> CommonFixes.fix_implicit_addressing(follower_collection)
|
2018-02-19 02:39:03 -07:00
|
|
|
end
|
|
|
|
|
2018-07-12 10:37:42 -06:00
|
|
|
def fix_actor(%{"attributedTo" => actor} = object) do
|
2020-06-25 16:07:43 -06:00
|
|
|
actor = Containment.get_actor(%{"actor" => actor})
|
|
|
|
|
|
|
|
# TODO: Remove actor field for Objects
|
|
|
|
object
|
|
|
|
|> Map.put("actor", actor)
|
|
|
|
|> Map.put("attributedTo", actor)
|
2018-07-12 10:37:42 -06:00
|
|
|
end
|
|
|
|
|
2019-06-29 11:04:50 -06:00
|
|
|
def fix_in_reply_to(object, options \\ [])
|
|
|
|
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
2018-09-21 04:43:35 -06:00
|
|
|
when not is_nil(in_reply_to) do
|
2019-09-10 07:43:10 -06:00
|
|
|
in_reply_to_id = prepare_in_reply_to(in_reply_to)
|
2020-02-15 10:41:38 -07:00
|
|
|
depth = (options[:depth] || 0) + 1
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2020-02-15 10:41:38 -07:00
|
|
|
if Federator.allowed_thread_distance?(depth) do
|
2019-09-10 07:43:10 -06:00
|
|
|
with {:ok, replied_object} <- get_obj_helper(in_reply_to_id, options),
|
2020-02-25 07:34:56 -07:00
|
|
|
%Activity{} <- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
2019-09-10 07:43:10 -06:00
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
|
|
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
2020-09-08 02:43:57 -06:00
|
|
|
|> Map.drop(["conversation", "inReplyToAtomUri"])
|
2019-09-10 07:43:10 -06:00
|
|
|
else
|
2023-12-27 19:39:17 -07:00
|
|
|
_ ->
|
2019-06-29 11:04:50 -06:00
|
|
|
object
|
|
|
|
end
|
|
|
|
else
|
|
|
|
object
|
2018-02-25 02:56:01 -07:00
|
|
|
end
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-06-29 11:04:50 -06:00
|
|
|
def fix_in_reply_to(object, _options), do: object
|
2018-02-25 02:56:01 -07:00
|
|
|
|
2022-07-26 11:28:47 -06:00
|
|
|
def fix_quote_url(object, options \\ [])
|
|
|
|
|
|
|
|
def fix_quote_url(%{"quoteUri" => quote_url} = object, options)
|
|
|
|
when not is_nil(quote_url) do
|
|
|
|
depth = (options[:depth] || 0) + 1
|
|
|
|
|
|
|
|
if Federator.allowed_thread_distance?(depth) do
|
|
|
|
with {:ok, quoted_object} <- get_obj_helper(quote_url, options),
|
|
|
|
%Activity{} <- Activity.get_create_by_object_ap_id(quoted_object.data["id"]) do
|
|
|
|
object
|
|
|
|
|> Map.put("quoteUri", quoted_object.data["id"])
|
|
|
|
else
|
|
|
|
e ->
|
2023-08-01 04:43:50 -06:00
|
|
|
Logger.warning("Couldn't fetch quote@#{inspect(quote_url)}, error: #{inspect(e)}")
|
2022-07-26 11:28:47 -06:00
|
|
|
object
|
|
|
|
end
|
|
|
|
else
|
2024-04-13 16:55:26 -06:00
|
|
|
object
|
2022-07-26 11:28:47 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Soapbox
|
|
|
|
def fix_quote_url(%{"quoteUrl" => quote_url} = object, options) do
|
|
|
|
object
|
|
|
|
|> Map.put("quoteUri", quote_url)
|
2022-07-29 03:10:12 -06:00
|
|
|
|> Map.delete("quoteUrl")
|
2022-07-26 11:28:47 -06:00
|
|
|
|> fix_quote_url(options)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Old Fedibird (bug)
|
|
|
|
# https://github.com/fedibird/mastodon/issues/9
|
|
|
|
def fix_quote_url(%{"quoteURL" => quote_url} = object, options) do
|
|
|
|
object
|
|
|
|
|> Map.put("quoteUri", quote_url)
|
2022-07-29 03:08:40 -06:00
|
|
|
|> Map.delete("quoteURL")
|
2022-07-26 11:28:47 -06:00
|
|
|
|> fix_quote_url(options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def fix_quote_url(%{"_misskey_quote" => quote_url} = object, options) do
|
|
|
|
object
|
|
|
|
|> Map.put("quoteUri", quote_url)
|
2022-07-29 03:08:40 -06:00
|
|
|
|> Map.delete("_misskey_quote")
|
2022-07-26 11:28:47 -06:00
|
|
|
|> fix_quote_url(options)
|
|
|
|
end
|
|
|
|
|
|
|
|
def fix_quote_url(object, _), do: object
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
defp prepare_in_reply_to(in_reply_to) do
|
|
|
|
cond do
|
|
|
|
is_bitstring(in_reply_to) ->
|
|
|
|
in_reply_to
|
|
|
|
|
|
|
|
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
|
|
|
|
in_reply_to["id"]
|
|
|
|
|
|
|
|
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
|
|
|
|
Enum.at(in_reply_to, 0)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-19 02:39:03 -07:00
|
|
|
def fix_context(object) do
|
2018-07-12 11:06:28 -06:00
|
|
|
context = object["context"] || object["conversation"] || Utils.generate_context_id()
|
|
|
|
|
2018-02-19 02:39:03 -07:00
|
|
|
object
|
2018-07-12 11:06:28 -06:00
|
|
|
|> Map.put("context", context)
|
2020-06-18 11:32:03 -06:00
|
|
|
|> Map.drop(["conversation"])
|
2018-02-17 10:38:58 -07:00
|
|
|
end
|
|
|
|
|
2018-09-21 04:57:31 -06:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
|
2018-03-30 07:01:53 -06:00
|
|
|
attachments =
|
2019-09-10 07:43:10 -06:00
|
|
|
Enum.map(attachment, fn data ->
|
2020-03-19 12:10:03 -06:00
|
|
|
url =
|
|
|
|
cond do
|
|
|
|
is_list(data["url"]) -> List.first(data["url"])
|
|
|
|
is_map(data["url"]) -> data["url"]
|
|
|
|
true -> nil
|
|
|
|
end
|
|
|
|
|
|
|
|
media_type =
|
|
|
|
cond do
|
2021-05-31 02:41:31 -06:00
|
|
|
is_map(url) && MIME.extensions(url["mediaType"]) != [] ->
|
|
|
|
url["mediaType"]
|
|
|
|
|
|
|
|
is_bitstring(data["mediaType"]) && MIME.extensions(data["mediaType"]) != [] ->
|
|
|
|
data["mediaType"]
|
|
|
|
|
|
|
|
is_bitstring(data["mimeType"]) && MIME.extensions(data["mimeType"]) != [] ->
|
|
|
|
data["mimeType"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
nil
|
2020-03-19 12:10:03 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
href =
|
|
|
|
cond do
|
|
|
|
is_map(url) && is_binary(url["href"]) -> url["href"]
|
|
|
|
is_binary(data["url"]) -> data["url"]
|
|
|
|
is_binary(data["href"]) -> data["href"]
|
2020-05-25 05:13:42 -06:00
|
|
|
true -> nil
|
2020-03-19 12:10:03 -06:00
|
|
|
end
|
|
|
|
|
2020-05-25 05:13:42 -06:00
|
|
|
if href do
|
|
|
|
attachment_url =
|
2020-06-22 21:30:34 -06:00
|
|
|
%{
|
|
|
|
"href" => href,
|
|
|
|
"type" => Map.get(url || %{}, "type", "Link")
|
|
|
|
}
|
2020-05-25 05:13:42 -06:00
|
|
|
|> Maps.put_if_present("mediaType", media_type)
|
2021-05-12 18:03:10 -06:00
|
|
|
|> Maps.put_if_present("width", (url || %{})["width"] || data["width"])
|
|
|
|
|> Maps.put_if_present("height", (url || %{})["height"] || data["height"])
|
2018-12-23 06:28:17 -07:00
|
|
|
|
2020-06-22 21:30:34 -06:00
|
|
|
%{
|
|
|
|
"url" => [attachment_url],
|
|
|
|
"type" => data["type"] || "Document"
|
|
|
|
}
|
2020-05-25 05:13:42 -06:00
|
|
|
|> Maps.put_if_present("mediaType", media_type)
|
|
|
|
|> Maps.put_if_present("name", data["name"])
|
2020-11-11 12:39:02 -07:00
|
|
|
|> Maps.put_if_present("blurhash", data["blurhash"])
|
2020-05-25 05:13:42 -06:00
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
end)
|
2020-05-25 05:13:42 -06:00
|
|
|
|> Enum.filter(& &1)
|
2018-02-17 10:38:58 -07:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|
|
|
|
|
2018-09-21 04:57:31 -06:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
|
2019-09-10 22:23:33 -06:00
|
|
|
object
|
|
|
|
|> Map.put("attachment", [attachment])
|
|
|
|
|> fix_attachments()
|
2018-09-21 04:57:31 -06:00
|
|
|
end
|
|
|
|
|
2018-09-26 13:01:33 -06:00
|
|
|
def fix_attachments(object), do: object
|
2018-09-21 04:57:31 -06:00
|
|
|
|
2018-11-01 02:56:37 -06:00
|
|
|
def fix_url(%{"url" => url} = object) when is_map(url) do
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "url", url["href"])
|
2018-11-01 02:56:37 -06:00
|
|
|
end
|
|
|
|
|
2020-08-19 18:00:04 -06:00
|
|
|
def fix_url(%{"url" => url} = object) when is_list(url) do
|
2018-11-01 02:56:37 -06:00
|
|
|
first_element = Enum.at(url, 0)
|
|
|
|
|
|
|
|
url_string =
|
|
|
|
cond do
|
|
|
|
is_bitstring(first_element) -> first_element
|
|
|
|
is_map(first_element) -> first_element["href"] || ""
|
|
|
|
true -> ""
|
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "url", url_string)
|
2018-11-01 02:56:37 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_url(object), do: object
|
|
|
|
|
2018-09-21 06:36:29 -06:00
|
|
|
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
|
2018-03-30 07:01:53 -06:00
|
|
|
emoji =
|
2019-09-10 07:43:10 -06:00
|
|
|
tags
|
2020-09-14 16:06:42 -06:00
|
|
|
|> Enum.filter(fn data -> is_map(data) and data["type"] == "Emoji" and data["icon"] end)
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Enum.reduce(%{}, fn data, mapping ->
|
2018-09-21 06:36:29 -06:00
|
|
|
name = String.trim(data["name"], ":")
|
2018-03-13 01:05:43 -06:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(mapping, name, data["icon"]["url"])
|
2018-03-30 07:01:53 -06:00
|
|
|
end)
|
2018-03-13 01:05:43 -06:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-03-13 01:05:43 -06:00
|
|
|
end
|
|
|
|
|
2018-09-21 06:36:29 -06:00
|
|
|
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
|
|
|
|
name = String.trim(tag["name"], ":")
|
|
|
|
emoji = %{name => tag["icon"]["url"]}
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-09-21 06:36:29 -06:00
|
|
|
end
|
|
|
|
|
2018-09-26 13:01:33 -06:00
|
|
|
def fix_emoji(object), do: object
|
2018-09-21 06:36:29 -06:00
|
|
|
|
2018-09-21 06:46:49 -06:00
|
|
|
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
|
2020-12-28 05:02:16 -07:00
|
|
|
tags =
|
2018-09-21 06:46:49 -06:00
|
|
|
tag
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
2020-12-21 12:54:26 -07:00
|
|
|
|> Enum.map(fn
|
|
|
|
%{"name" => "#" <> hashtag} -> String.downcase(hashtag)
|
|
|
|
%{"name" => hashtag} -> String.downcase(hashtag)
|
2020-10-19 07:40:50 -06:00
|
|
|
end)
|
2018-03-24 15:39:37 -06:00
|
|
|
|
2020-12-28 05:02:16 -07:00
|
|
|
Map.put(object, "tag", tag ++ tags)
|
2018-03-24 15:39:37 -06:00
|
|
|
end
|
|
|
|
|
2020-10-19 07:46:24 -06:00
|
|
|
def fix_tag(%{"tag" => %{} = tag} = object) do
|
|
|
|
object
|
|
|
|
|> Map.put("tag", [tag])
|
|
|
|
|> fix_tag
|
2018-09-26 02:21:58 -06:00
|
|
|
end
|
|
|
|
|
2018-09-26 13:01:33 -06:00
|
|
|
def fix_tag(object), do: object
|
2018-09-21 06:46:49 -06:00
|
|
|
|
2018-06-18 15:51:22 -06:00
|
|
|
# content map usually only has one language so this will do for now.
|
2023-01-01 19:41:56 -07:00
|
|
|
def fix_content_map(%{"contentMap" => content_map} = object) when is_map(content_map) do
|
2018-06-18 15:51:22 -06:00
|
|
|
content_groups = Map.to_list(content_map)
|
|
|
|
|
2023-01-01 19:41:56 -07:00
|
|
|
if Enum.empty?(content_groups) do
|
|
|
|
object
|
|
|
|
else
|
|
|
|
{_, content} = Enum.at(content_groups, 0)
|
|
|
|
|
|
|
|
Map.put(object, "content", content)
|
|
|
|
end
|
2018-06-18 15:51:22 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_content_map(object), do: object
|
|
|
|
|
2020-09-28 11:34:27 -06:00
|
|
|
defp fix_type(%{"type" => "Note", "inReplyTo" => reply_id, "name" => _} = object, options)
|
|
|
|
when is_binary(reply_id) do
|
|
|
|
options = Keyword.put(options, :fetch, true)
|
2019-06-29 11:04:50 -06:00
|
|
|
|
2020-09-28 11:34:27 -06:00
|
|
|
with %Object{data: %{"type" => "Question"}} <- Object.normalize(reply_id, options) do
|
2019-05-22 12:17:57 -06:00
|
|
|
Map.put(object, "type", "Answer")
|
|
|
|
else
|
2019-09-10 07:43:10 -06:00
|
|
|
_ -> object
|
2019-05-22 12:17:57 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-09-28 11:34:27 -06:00
|
|
|
defp fix_type(object, _options), do: object
|
2019-05-22 12:17:57 -06:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
# Reduce the object list to find the reported user.
|
|
|
|
defp get_reported(objects) do
|
|
|
|
Enum.reduce_while(objects, nil, fn ap_id, _ ->
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
|
|
|
|
{:halt, user}
|
|
|
|
else
|
|
|
|
_ -> {:cont, nil}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
def handle_incoming(data, options \\ []) do
|
|
|
|
data = normalise_addressing_public(data)
|
|
|
|
|
|
|
|
data =
|
|
|
|
if data["object"] != nil do
|
|
|
|
object = normalise_addressing_public(data["object"])
|
|
|
|
Map.put(data, "object", object)
|
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
|
|
|
handle_incoming_normalised(data, options)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp handle_incoming_normalised(data, options)
|
2019-06-29 11:04:50 -06:00
|
|
|
|
2019-03-14 13:06:02 -06:00
|
|
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
|
|
|
# with nil ID.
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{"type" => "Flag", "object" => objects, "actor" => actor} = data,
|
|
|
|
_options
|
|
|
|
) do
|
2019-03-14 13:06:02 -06:00
|
|
|
with context <- data["context"] || Utils.generate_context_id(),
|
|
|
|
content <- data["content"] || "",
|
|
|
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
|
|
|
# Reduce the object list to find the reported user.
|
2019-09-10 07:43:10 -06:00
|
|
|
%User{} = account <- get_reported(objects),
|
2019-03-14 13:06:02 -06:00
|
|
|
# Remove the reported user from the object list.
|
|
|
|
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
|
2019-09-10 07:43:10 -06:00
|
|
|
%{
|
2019-03-14 13:06:02 -06:00
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content,
|
2019-09-10 07:43:10 -06:00
|
|
|
additional: %{"cc" => [account.ap_id]}
|
2019-03-14 13:06:02 -06:00
|
|
|
}
|
2019-09-10 07:43:10 -06:00
|
|
|
|> ActivityPub.flag()
|
2019-03-14 13:06:02 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-22 19:23:02 -06:00
|
|
|
# disallow objects with bogus IDs
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(%{"id" => nil}, _options), do: :error
|
|
|
|
defp handle_incoming_normalised(%{"id" => ""}, _options), do: :error
|
2018-08-22 19:23:02 -06:00
|
|
|
# length of https:// = 8, should validate better, but good enough for now.
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(%{"id" => id}, _options)
|
|
|
|
when is_binary(id) and byte_size(id) < 8,
|
|
|
|
do: :error
|
|
|
|
|
|
|
|
# Rewrite misskey likes into EmojiReacts
|
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{
|
|
|
|
"type" => "Like",
|
|
|
|
"content" => reaction
|
|
|
|
} = data,
|
|
|
|
options
|
|
|
|
) do
|
2023-03-10 11:49:08 -07:00
|
|
|
if Pleroma.Emoji.is_unicode_emoji?(reaction) || Pleroma.Emoji.matches_shortcode?(reaction) do
|
2023-03-10 11:46:49 -07:00
|
|
|
data
|
|
|
|
|> Map.put("type", "EmojiReact")
|
|
|
|
|> handle_incoming(options)
|
|
|
|
else
|
|
|
|
data
|
|
|
|
|> Map.delete("content")
|
|
|
|
|> handle_incoming(options)
|
|
|
|
end
|
2019-10-04 09:01:04 -06:00
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
|
|
|
|
options
|
|
|
|
)
|
|
|
|
when objtype in ~w{Question Answer Audio Video Event Article Note Page} do
|
2020-09-28 11:34:27 -06:00
|
|
|
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
|
|
|
|
|
|
|
|
object =
|
|
|
|
data["object"]
|
|
|
|
|> strip_internal_fields()
|
|
|
|
|> fix_type(fetch_options)
|
|
|
|
|> fix_in_reply_to(fetch_options)
|
2022-07-26 11:28:47 -06:00
|
|
|
|> fix_quote_url(fetch_options)
|
2020-09-28 11:34:27 -06:00
|
|
|
|
2022-08-02 09:30:36 -06:00
|
|
|
# Only change the Create's context if the object's context has been modified.
|
|
|
|
data =
|
|
|
|
if data["object"]["context"] != object["context"] do
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
|
|
|
|> Map.put("context", object["context"])
|
|
|
|
else
|
|
|
|
Map.put(data, "object", object)
|
|
|
|
end
|
|
|
|
|
2020-09-10 03:11:10 -06:00
|
|
|
options = Keyword.put(options, :local, false)
|
2020-08-25 01:19:53 -06:00
|
|
|
|
2020-06-11 12:23:10 -06:00
|
|
|
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
2020-09-23 03:45:32 -06:00
|
|
|
nil <- Activity.get_create_by_object_ap_id(obj_id),
|
2020-09-10 03:11:10 -06:00
|
|
|
{:ok, activity, _} <- Pipeline.common_pipeline(data, options) do
|
2020-06-11 12:23:10 -06:00
|
|
|
{:ok, activity}
|
2020-09-23 03:45:32 -06:00
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity}
|
|
|
|
e -> e
|
2020-06-11 12:23:10 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(%{"type" => type} = data, _options)
|
|
|
|
when type in ~w{Like EmojiReact Announce Add Remove} do
|
2020-05-04 09:18:38 -06:00
|
|
|
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
2022-06-07 19:42:44 -06:00
|
|
|
{:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do
|
2018-02-17 12:13:12 -07:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2022-06-07 19:42:44 -06:00
|
|
|
e ->
|
|
|
|
{:error, e}
|
2018-02-17 12:13:12 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{"type" => type} = data,
|
|
|
|
_options
|
|
|
|
)
|
|
|
|
when type in ~w{Update Block Follow Accept Reject} do
|
2020-06-22 05:16:05 -06:00
|
|
|
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
2020-08-11 06:02:09 -06:00
|
|
|
{:ok, activity, _} <-
|
|
|
|
Pipeline.common_pipeline(data, local: false) do
|
2020-06-22 05:16:05 -06:00
|
|
|
{:ok, activity}
|
2018-02-25 08:14:25 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{"type" => "Delete"} = data,
|
|
|
|
_options
|
|
|
|
) do
|
2020-08-11 06:02:09 -06:00
|
|
|
with {:ok, activity, _} <-
|
|
|
|
Pipeline.common_pipeline(data, local: false) do
|
2018-03-03 10:37:40 -07:00
|
|
|
{:ok, activity}
|
2020-05-11 07:06:23 -06:00
|
|
|
else
|
2021-01-12 01:30:22 -07:00
|
|
|
{:error, {:validate, _}} = e ->
|
2020-05-11 07:06:23 -06:00
|
|
|
# Check if we have a create activity for this
|
2020-06-16 08:50:33 -06:00
|
|
|
with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
|
2020-05-11 07:06:23 -06:00
|
|
|
%Activity{data: %{"actor" => actor}} <-
|
|
|
|
Activity.create_by_object_ap_id(object_id) |> Repo.one(),
|
|
|
|
# We have one, insert a tombstone and retry
|
|
|
|
{:ok, tombstone_data, _} <- Builder.tombstone(actor, object_id),
|
|
|
|
{:ok, _tombstone} <- Object.create(tombstone_data) do
|
|
|
|
handle_incoming(data)
|
|
|
|
else
|
|
|
|
_ -> e
|
|
|
|
end
|
2018-03-03 10:37:40 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
"actor" => follower,
|
|
|
|
"id" => id
|
|
|
|
} = _data,
|
|
|
|
_options
|
|
|
|
) do
|
2018-05-21 02:35:43 -06:00
|
|
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
2019-03-18 07:56:59 -06:00
|
|
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
2018-05-20 19:01:14 -06:00
|
|
|
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2018-05-17 21:55:00 -06:00
|
|
|
User.unfollow(follower, followed)
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 02:12:48 -07:00
|
|
|
_e -> :error
|
2018-05-17 21:55:00 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => type}
|
|
|
|
} = data,
|
|
|
|
_options
|
|
|
|
)
|
|
|
|
when type in ["Like", "EmojiReact", "Announce", "Block"] do
|
2020-05-05 07:08:41 -06:00
|
|
|
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
2019-10-03 10:37:23 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 10:13:05 -06:00
|
|
|
# For Undos that don't have the complete object attached, try to find it in our database.
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => object
|
|
|
|
} = activity,
|
|
|
|
options
|
|
|
|
)
|
|
|
|
when is_binary(object) do
|
2019-09-30 10:13:05 -06:00
|
|
|
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
|
|
|
activity
|
|
|
|
|> Map.put("object", data)
|
|
|
|
|> handle_incoming(options)
|
2018-05-21 02:35:43 -06:00
|
|
|
else
|
2018-12-09 02:12:48 -07:00
|
|
|
_e -> :error
|
2018-05-21 02:35:43 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(
|
|
|
|
%{
|
|
|
|
"type" => "Move",
|
|
|
|
"actor" => origin_actor,
|
|
|
|
"object" => origin_actor,
|
|
|
|
"target" => target_actor
|
|
|
|
},
|
|
|
|
_options
|
|
|
|
) do
|
2019-10-30 05:21:49 -06:00
|
|
|
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
2024-02-29 13:08:25 -07:00
|
|
|
# Use a dramatically shortened maximum age before refresh here because it is reasonable
|
|
|
|
# for a user to
|
|
|
|
# 1. Add the alias to their new account and then
|
|
|
|
# 2. Press the button on their new account
|
|
|
|
# within a very short period of time and expect it to work
|
|
|
|
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor, maximum_age: 5),
|
2019-10-30 05:21:49 -06:00
|
|
|
true <- origin_actor in target_user.also_known_as do
|
|
|
|
ActivityPub.move(origin_user, target_user, false)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-01-23 12:10:01 -07:00
|
|
|
defp handle_incoming_normalised(_, _), do: :error
|
2018-02-17 06:55:44 -07:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
2019-06-29 11:04:50 -06:00
|
|
|
def get_obj_helper(id, options \\ []) do
|
2021-01-04 05:38:31 -07:00
|
|
|
options = Keyword.put(options, :fetch, true)
|
|
|
|
|
|
|
|
case Object.normalize(id, options) do
|
2019-09-10 22:23:33 -06:00
|
|
|
%Object{} = object -> {:ok, object}
|
|
|
|
_ -> nil
|
2019-09-10 07:43:10 -06:00
|
|
|
end
|
2018-02-18 03:24:54 -07:00
|
|
|
end
|
|
|
|
|
2019-10-02 05:18:51 -06:00
|
|
|
@spec get_embedded_obj_helper(String.t() | Object.t(), User.t()) :: {:ok, Object.t()} | nil
|
2019-10-02 05:46:06 -06:00
|
|
|
def get_embedded_obj_helper(%{"attributedTo" => attributed_to, "id" => object_id} = data, %User{
|
2019-10-02 05:18:51 -06:00
|
|
|
ap_id: ap_id
|
|
|
|
})
|
2019-10-02 05:46:06 -06:00
|
|
|
when attributed_to == ap_id do
|
2019-10-02 05:18:51 -06:00
|
|
|
with {:ok, activity} <-
|
|
|
|
handle_incoming(%{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2019-10-02 05:46:06 -06:00
|
|
|
"actor" => attributed_to,
|
2019-10-02 05:18:51 -06:00
|
|
|
"object" => data
|
|
|
|
}) do
|
2021-01-04 05:38:31 -07:00
|
|
|
{:ok, Object.normalize(activity, fetch: false)}
|
2019-10-02 05:18:51 -06:00
|
|
|
else
|
|
|
|
_ -> get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_embedded_obj_helper(object_id, _) do
|
|
|
|
get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
|
2019-03-04 20:36:19 -07:00
|
|
|
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
|
|
|
|
with false <- String.starts_with?(in_reply_to, "http"),
|
|
|
|
{:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do
|
|
|
|
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
|
2018-03-23 09:07:02 -06:00
|
|
|
else
|
|
|
|
_e -> object
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-03-23 09:07:02 -06:00
|
|
|
def set_reply_to_uri(obj), do: obj
|
|
|
|
|
2022-07-25 10:30:06 -06:00
|
|
|
def set_quote_url(%{"quoteUri" => quote} = object) when is_binary(quote) do
|
|
|
|
Map.put(object, "quoteUrl", quote)
|
|
|
|
end
|
|
|
|
|
|
|
|
def set_quote_url(obj), do: obj
|
|
|
|
|
2020-01-22 11:10:17 -07:00
|
|
|
@doc """
|
|
|
|
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
|
|
|
|
Based on Mastodon's ActivityPub::NoteSerializer#replies.
|
|
|
|
"""
|
2020-02-09 00:17:21 -07:00
|
|
|
def set_replies(obj_data) do
|
2020-01-22 11:10:17 -07:00
|
|
|
replies_uris =
|
2020-02-09 04:09:01 -07:00
|
|
|
with limit when limit > 0 <-
|
|
|
|
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
|
2020-02-09 00:17:21 -07:00
|
|
|
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
|
|
|
|
object
|
|
|
|
|> Object.self_replies()
|
|
|
|
|> select([o], fragment("?->>'id'", o.data))
|
2020-01-22 11:10:17 -07:00
|
|
|
|> limit(^limit)
|
|
|
|
|> Repo.all()
|
2020-02-09 04:09:01 -07:00
|
|
|
else
|
|
|
|
_ -> []
|
2020-01-22 11:10:17 -07:00
|
|
|
end
|
|
|
|
|
2020-02-09 04:09:01 -07:00
|
|
|
set_replies(obj_data, replies_uris)
|
2020-01-22 11:10:17 -07:00
|
|
|
end
|
|
|
|
|
2020-02-09 04:09:01 -07:00
|
|
|
defp set_replies(obj, []) do
|
2020-01-22 11:10:17 -07:00
|
|
|
obj
|
|
|
|
end
|
|
|
|
|
|
|
|
defp set_replies(obj, replies_uris) do
|
|
|
|
replies_collection = %{
|
|
|
|
"type" => "Collection",
|
2020-02-09 07:34:48 -07:00
|
|
|
"items" => replies_uris
|
2020-01-22 11:10:17 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
Map.merge(obj, %{"replies" => replies_collection})
|
|
|
|
end
|
|
|
|
|
2020-02-09 04:09:01 -07:00
|
|
|
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
|
|
|
|
items
|
|
|
|
end
|
2020-02-08 09:58:02 -07:00
|
|
|
|
2020-02-09 04:09:01 -07:00
|
|
|
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
|
|
|
|
items
|
2020-01-25 00:47:30 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def replies(_), do: []
|
|
|
|
|
2018-03-23 09:07:02 -06:00
|
|
|
# Prepares the object of an outgoing create activity.
|
2018-02-24 12:16:41 -07:00
|
|
|
def prepare_object(object) do
|
|
|
|
object
|
2018-02-18 05:51:03 -07:00
|
|
|
|> add_hashtags
|
2018-02-17 06:11:20 -07:00
|
|
|
|> add_mention_tags
|
2018-03-13 01:05:43 -06:00
|
|
|
|> add_emoji_tags
|
2018-02-17 06:11:20 -07:00
|
|
|
|> add_attributed_to
|
2018-02-17 10:38:58 -07:00
|
|
|
|> prepare_attachments
|
2018-02-18 05:58:52 -07:00
|
|
|
|> set_conversation
|
2018-03-23 09:07:02 -06:00
|
|
|
|> set_reply_to_uri
|
2022-07-25 10:30:06 -06:00
|
|
|
|> set_quote_url()
|
2020-01-22 11:10:17 -07:00
|
|
|
|> set_replies
|
2018-11-10 05:08:53 -07:00
|
|
|
|> strip_internal_fields
|
|
|
|
|> strip_internal_tags
|
2019-05-22 12:17:57 -06:00
|
|
|
|> set_type
|
2022-09-06 13:24:02 -06:00
|
|
|
|> maybe_process_history
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_process_history(%{"formerRepresentations" => %{"orderedItems" => history}} = object) do
|
|
|
|
processed_history =
|
|
|
|
Enum.map(
|
|
|
|
history,
|
|
|
|
fn
|
|
|
|
item when is_map(item) -> prepare_object(item)
|
|
|
|
item -> item
|
|
|
|
end
|
|
|
|
)
|
|
|
|
|
|
|
|
put_in(object, ["formerRepresentations", "orderedItems"], processed_history)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_process_history(object) do
|
|
|
|
object
|
2018-02-24 12:16:41 -07:00
|
|
|
end
|
|
|
|
|
2018-05-04 16:03:14 -06:00
|
|
|
# @doc
|
|
|
|
# """
|
|
|
|
# internal -> Mastodon
|
|
|
|
# """
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-09-27 06:40:31 -06:00
|
|
|
def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
|
2022-07-19 08:07:45 -06:00
|
|
|
when activity_type in ["Create"] do
|
2018-03-30 07:01:53 -06:00
|
|
|
object =
|
2019-05-01 03:11:17 -06:00
|
|
|
object_id
|
2021-01-04 05:38:31 -07:00
|
|
|
|> Object.normalize(fetch: false)
|
2019-05-01 03:11:17 -06:00
|
|
|
|> Map.get(:data)
|
2018-03-30 07:01:53 -06:00
|
|
|
|> prepare_object
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 08:39:38 -07:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2019-05-01 03:11:17 -06:00
|
|
|
|> Map.delete("bcc")
|
2018-02-17 06:11:20 -07:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2022-09-06 13:24:02 -06:00
|
|
|
def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data)
|
|
|
|
when objtype in Pleroma.Constants.updatable_object_types() do
|
|
|
|
object =
|
|
|
|
object
|
|
|
|
|> prepare_object
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
|
|
|
|> Map.delete("bcc")
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-10-02 04:14:08 -06:00
|
|
|
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
|
|
|
|
object =
|
|
|
|
object_id
|
2021-01-04 05:38:31 -07:00
|
|
|
|> Object.normalize(fetch: false)
|
2019-10-02 04:14:08 -06:00
|
|
|
|
|
|
|
data =
|
|
|
|
if Visibility.is_private?(object) && object.data["actor"] == ap_id do
|
|
|
|
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
|
|
|
|
else
|
|
|
|
data |> maybe_fix_object_url
|
|
|
|
end
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> strip_internal_fields
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
|
|
|
|> Map.delete("bcc")
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2018-05-26 12:03:23 -06:00
|
|
|
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
|
|
|
|
# because of course it does.
|
|
|
|
def prepare_outgoing(%{"type" => "Accept"} = data) do
|
2018-06-18 15:21:03 -06:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-26 12:03:23 -06:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 08:39:38 -07:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-26 12:03:23 -06:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-27 03:10:46 -06:00
|
|
|
def prepare_outgoing(%{"type" => "Reject"} = data) do
|
2018-06-18 15:21:03 -06:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-27 03:10:46 -06:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 08:39:38 -07:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-27 03:10:46 -06:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-04 15:16:02 -06:00
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-03-30 07:01:53 -06:00
|
|
|
data =
|
|
|
|
data
|
2019-03-09 04:12:15 -07:00
|
|
|
|> strip_internal_fields
|
2018-03-30 07:01:53 -06:00
|
|
|
|> maybe_fix_object_url
|
2018-11-08 08:39:38 -07:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-02-17 08:08:55 -07:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
|
|
|
|
with false <- String.starts_with?(object, "http"),
|
|
|
|
{:fetch, {:ok, relative_object}} <- {:fetch, get_obj_helper(object)},
|
|
|
|
%{data: %{"external_url" => external_url}} when not is_nil(external_url) <-
|
|
|
|
relative_object do
|
|
|
|
Map.put(data, "object", external_url)
|
2018-03-13 11:46:37 -06:00
|
|
|
else
|
2023-12-27 19:39:17 -07:00
|
|
|
{:fetch, _} ->
|
2019-09-10 07:43:10 -06:00
|
|
|
data
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
data
|
2018-03-13 11:46:37 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def maybe_fix_object_url(data), do: data
|
|
|
|
|
2018-02-18 05:51:03 -07:00
|
|
|
def add_hashtags(object) do
|
2020-12-28 05:02:16 -07:00
|
|
|
tags =
|
|
|
|
(object["tag"] || [])
|
|
|
|
|> Enum.map(fn
|
|
|
|
# Expand internal representation tags into AS2 tags.
|
|
|
|
tag when is_binary(tag) ->
|
|
|
|
%{
|
|
|
|
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
|
|
|
|
"name" => "##{tag}",
|
|
|
|
"type" => "Hashtag"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Do not process tags which are already AS2 tag objects.
|
|
|
|
tag when is_map(tag) ->
|
|
|
|
tag
|
2018-03-30 07:01:53 -06:00
|
|
|
end)
|
2018-02-18 05:51:03 -07:00
|
|
|
|
2020-12-28 05:02:16 -07:00
|
|
|
Map.put(object, "tag", tags)
|
2018-02-18 05:51:03 -07:00
|
|
|
end
|
|
|
|
|
2020-05-25 07:08:43 -06:00
|
|
|
# TODO These should be added on our side on insertion, it doesn't make much
|
|
|
|
# sense to regenerate these all the time
|
2018-02-17 06:11:20 -07:00
|
|
|
def add_mention_tags(object) do
|
2020-05-25 07:08:43 -06:00
|
|
|
to = object["to"] || []
|
|
|
|
cc = object["cc"] || []
|
|
|
|
mentioned = User.get_users_from_set(to ++ cc, local_only: false)
|
|
|
|
|
|
|
|
mentions = Enum.map(mentioned, &build_mention_tag/1)
|
2018-02-17 06:11:20 -07:00
|
|
|
|
2018-02-17 06:20:53 -07:00
|
|
|
tags = object["tag"] || []
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "tag", tags ++ mentions)
|
2018-02-17 06:11:20 -07:00
|
|
|
end
|
|
|
|
|
2019-09-11 14:19:06 -06:00
|
|
|
defp build_mention_tag(%{ap_id: ap_id, nickname: nickname} = _) do
|
|
|
|
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
|
|
|
|
end
|
2019-02-12 06:59:34 -07:00
|
|
|
|
2019-10-16 12:59:21 -06:00
|
|
|
def take_emoji_tags(%User{emoji: emoji}) do
|
2019-09-11 14:19:06 -06:00
|
|
|
emoji
|
2020-04-03 05:03:32 -06:00
|
|
|
|> Map.to_list()
|
2019-09-11 14:19:06 -06:00
|
|
|
|> Enum.map(&build_emoji_tag/1)
|
2019-02-12 06:59:34 -07:00
|
|
|
end
|
|
|
|
|
2018-03-13 01:05:43 -06:00
|
|
|
# TODO: we should probably send mtime instead of unix epoch time for updated
|
2019-02-12 06:59:34 -07:00
|
|
|
def add_emoji_tags(%{"emoji" => emoji} = object) do
|
2018-03-13 01:05:43 -06:00
|
|
|
tags = object["tag"] || []
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-09-11 14:19:06 -06:00
|
|
|
out = Enum.map(emoji, &build_emoji_tag/1)
|
2018-03-13 01:05:43 -06:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "tag", tags ++ out)
|
2018-03-13 01:05:43 -06:00
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def add_emoji_tags(object), do: object
|
2019-02-12 06:59:34 -07:00
|
|
|
|
2019-09-11 14:19:06 -06:00
|
|
|
defp build_emoji_tag({name, url}) do
|
|
|
|
%{
|
2021-01-05 11:25:30 -07:00
|
|
|
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
|
2019-09-11 14:19:06 -06:00
|
|
|
"name" => ":" <> name <> ":",
|
|
|
|
"type" => "Emoji",
|
|
|
|
"updated" => "1970-01-01T00:00:00Z",
|
|
|
|
"id" => url
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-02-18 05:58:52 -07:00
|
|
|
def set_conversation(object) do
|
|
|
|
Map.put(object, "conversation", object["context"])
|
|
|
|
end
|
|
|
|
|
2019-05-22 12:17:57 -06:00
|
|
|
def set_type(%{"type" => "Answer"} = object) do
|
|
|
|
Map.put(object, "type", "Note")
|
|
|
|
end
|
|
|
|
|
|
|
|
def set_type(object), do: object
|
|
|
|
|
2018-02-17 06:11:20 -07:00
|
|
|
def add_attributed_to(object) do
|
2019-03-04 20:36:19 -07:00
|
|
|
attributed_to = object["attributedTo"] || object["actor"]
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "attributedTo", attributed_to)
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|
2018-02-17 10:38:58 -07:00
|
|
|
|
|
|
|
def prepare_attachments(object) do
|
2023-07-20 10:34:05 -06:00
|
|
|
attachments =
|
|
|
|
case Map.get(object, "attachment", []) do
|
|
|
|
[_ | _] = list -> list
|
|
|
|
_ -> []
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
attachments =
|
2023-07-17 13:03:31 -06:00
|
|
|
attachments
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Enum.map(fn data ->
|
2021-05-12 18:03:10 -06:00
|
|
|
[%{"mediaType" => media_type, "href" => href} = url | _] = data["url"]
|
2020-04-12 21:53:45 -06:00
|
|
|
|
|
|
|
%{
|
|
|
|
"url" => href,
|
|
|
|
"mediaType" => media_type,
|
|
|
|
"name" => data["name"],
|
|
|
|
"type" => "Document"
|
|
|
|
}
|
2021-05-12 18:03:10 -06:00
|
|
|
|> Maps.put_if_present("width", url["width"])
|
|
|
|
|> Maps.put_if_present("height", url["height"])
|
2021-05-14 07:38:23 -06:00
|
|
|
|> Maps.put_if_present("blurhash", data["blurhash"])
|
2018-03-30 07:01:53 -06:00
|
|
|
end)
|
2018-02-17 10:38:58 -07:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-17 10:38:58 -07:00
|
|
|
end
|
2018-02-21 14:21:40 -07:00
|
|
|
|
2019-09-12 10:59:13 -06:00
|
|
|
def strip_internal_fields(object) do
|
2020-04-12 21:53:45 -06:00
|
|
|
Map.drop(object, Pleroma.Constants.object_internal_fields())
|
2018-11-10 05:08:53 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(%{"tag" => tags} = object) do
|
2019-09-10 07:43:10 -06:00
|
|
|
tags = Enum.filter(tags, fn x -> is_map(x) end)
|
2018-11-10 05:08:53 -07:00
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
Map.put(object, "tag", tags)
|
2018-11-10 05:08:53 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(object), do: object
|
|
|
|
|
2019-04-04 03:10:43 -06:00
|
|
|
def perform(:user_upgrade, user) do
|
2019-03-19 12:39:33 -06:00
|
|
|
# we pass a fake user so that the followers collection is stripped away
|
|
|
|
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-10-10 13:35:32 -06:00
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where: ^old_follower_address in a.recipients,
|
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
recipients:
|
|
|
|
fragment(
|
|
|
|
"array_replace(?,?,?)",
|
|
|
|
a.recipients,
|
|
|
|
^old_follower_address,
|
|
|
|
^user.follower_address
|
|
|
|
)
|
2018-03-30 07:01:53 -06:00
|
|
|
]
|
2019-10-10 13:35:32 -06:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|> Repo.update_all([])
|
2018-02-24 02:51:15 -07:00
|
|
|
end
|
|
|
|
|
2019-04-04 03:10:43 -06:00
|
|
|
def upgrade_user_from_ap_id(ap_id) do
|
2019-04-22 01:20:43 -06:00
|
|
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
2020-11-17 07:28:30 -07:00
|
|
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
2020-04-11 12:44:52 -06:00
|
|
|
{:ok, user} <- update_user(user, data) do
|
2024-05-31 02:58:52 -06:00
|
|
|
ActivityPub.enqueue_pin_fetches(user)
|
2020-04-11 12:44:52 -06:00
|
|
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
2018-02-21 14:21:40 -07:00
|
|
|
{:ok, user}
|
|
|
|
else
|
2019-04-04 03:10:43 -06:00
|
|
|
%User{} = user -> {:ok, user}
|
2018-02-21 14:21:40 -07:00
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
2018-02-24 09:36:02 -07:00
|
|
|
|
2020-04-11 12:44:52 -06:00
|
|
|
defp update_user(user, data) do
|
2019-09-12 00:59:34 -06:00
|
|
|
user
|
2020-04-11 12:44:52 -06:00
|
|
|
|> User.remote_user_changeset(data)
|
2019-09-12 00:59:34 -06:00
|
|
|
|> User.update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
|
|
|
|
Map.put(data, "url", url["href"])
|
2018-05-19 01:30:02 -06:00
|
|
|
end
|
|
|
|
|
2019-09-10 07:43:10 -06:00
|
|
|
def maybe_fix_user_url(data), do: data
|
|
|
|
|
|
|
|
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|