akkoma/lib/pleroma/web/activity_pub/transmogrifier.ex

980 lines
26 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 08:41:47 -07:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@moduledoc """
A module to handle coding from internal to wire ActivityPub and back.
"""
alias Pleroma.User
2018-02-17 12:13:12 -07:00
alias Pleroma.Object
alias Pleroma.Activity
2018-02-21 14:21:40 -07:00
alias Pleroma.Repo
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
2018-02-21 14:21:40 -07:00
import Ecto.Query
require Logger
def get_actor(%{"actor" => actor}) when is_binary(actor) do
actor
end
def get_actor(%{"actor" => actor}) when is_list(actor) do
if is_binary(Enum.at(actor, 0)) do
Enum.at(actor, 0)
else
Enum.find(actor, fn %{"type" => type} -> type in ["Person", "Service", "Application"] end)
|> Map.get("id")
end
end
def get_actor(%{"actor" => %{"id" => id}}) when is_bitstring(id) do
id
end
def get_actor(%{"actor" => nil, "attributedTo" => actor}) when not is_nil(actor) do
get_actor(%{"actor" => actor})
end
@doc """
Checks that an imported AP object's actor matches the domain it came from.
"""
2018-12-09 02:12:48 -07:00
def contain_origin(_id, %{"actor" => nil}), do: :error
2018-12-09 02:12:48 -07:00
def contain_origin(id, %{"actor" => _actor} = params) do
id_uri = URI.parse(id)
actor_uri = URI.parse(get_actor(params))
if id_uri.host == actor_uri.host do
:ok
else
:error
end
end
2018-12-09 02:12:48 -07:00
def contain_origin_from_id(_id, %{"id" => nil}), do: :error
2018-12-09 02:12:48 -07:00
def contain_origin_from_id(id, %{"id" => other_id} = _params) do
id_uri = URI.parse(id)
other_uri = URI.parse(other_id)
if id_uri.host == other_uri.host do
:ok
else
:error
end
end
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
def fix_object(object) do
object
|> fix_actor
|> fix_url
|> fix_attachments
2018-02-19 02:39:03 -07:00
|> fix_context
2018-02-25 02:56:01 -07:00
|> fix_in_reply_to
2018-03-13 01:05:43 -06:00
|> fix_emoji
|> fix_tag
|> fix_content_map
|> fix_likes
|> fix_addressing
end
def fix_addressing_list(map, field) do
if is_binary(map[field]) do
map
|> Map.put(field, [map[field]])
else
map
end
end
def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, explicit_mentions) do
explicit_to =
to
|> Enum.filter(fn x -> x in explicit_mentions end)
explicit_cc =
to
|> Enum.filter(fn x -> x not in explicit_mentions end)
final_cc =
(cc ++ explicit_cc)
|> Enum.uniq()
object
|> Map.put("to", explicit_to)
|> Map.put("cc", final_cc)
end
def fix_explicit_addressing(object, _explicit_mentions), do: object
# if directMessage flag is set to true, leave the addressing alone
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
def fix_explicit_addressing(object) do
explicit_mentions =
object
|> Utils.determine_explicit_mentions()
explicit_mentions = explicit_mentions ++ ["https://www.w3.org/ns/activitystreams#Public"]
object
|> fix_explicit_addressing(explicit_mentions)
2018-02-19 02:39:03 -07:00
end
def fix_addressing(object) do
object
|> fix_addressing_list("to")
|> fix_addressing_list("cc")
|> fix_addressing_list("bto")
|> fix_addressing_list("bcc")
|> fix_explicit_addressing
end
def fix_actor(%{"attributedTo" => actor} = object) do
object
|> Map.put("actor", get_actor(%{"actor" => actor}))
end
def fix_likes(%{"likes" => likes} = object)
when is_bitstring(likes) do
# Check for standardisation
# This is what Peertube does
# curl -H 'Accept: application/activity+json' $likes | jq .totalItems
object
|> Map.put("likes", [])
|> Map.put("like_count", 0)
end
def fix_likes(object) do
object
end
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
when not is_nil(in_reply_to) do
in_reply_to_id =
cond do
is_bitstring(in_reply_to) ->
in_reply_to
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
in_reply_to["id"]
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
Enum.at(in_reply_to, 0)
# Maybe I should output an error too?
true ->
""
end
case fetch_obj_helper(in_reply_to_id) do
2018-02-25 02:56:01 -07:00
{:ok, replied_object} ->
with %Activity{} = activity <-
Activity.get_create_activity_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|> Map.put("inReplyToStatusId", activity.id)
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|> Map.put("context", replied_object.data["context"] || object["conversation"])
else
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object
end
2018-03-30 07:01:53 -06:00
2018-02-25 02:56:01 -07:00
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
2018-02-25 02:56:01 -07:00
object
end
end
2018-03-30 07:01:53 -06:00
2018-02-25 02:56:01 -07:00
def fix_in_reply_to(object), do: object
2018-02-19 02:39:03 -07:00
def fix_context(object) do
2018-07-12 11:06:28 -06:00
context = object["context"] || object["conversation"] || Utils.generate_context_id()
2018-02-19 02:39:03 -07:00
object
2018-07-12 11:06:28 -06:00
|> Map.put("context", context)
|> Map.put("conversation", context)
2018-02-17 10:38:58 -07:00
end
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
2018-03-30 07:01:53 -06:00
attachments =
attachment
2018-03-30 07:01:53 -06:00
|> Enum.map(fn data ->
media_type = data["mediaType"] || data["mimeType"]
href = data["url"] || data["href"]
url = [%{"type" => "Link", "mediaType" => media_type, "href" => href}]
data
|> Map.put("mediaType", media_type)
|> Map.put("url", url)
2018-03-30 07:01:53 -06:00
end)
2018-02-17 10:38:58 -07:00
object
|> Map.put("attachment", attachments)
end
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
Map.put(object, "attachment", [attachment])
|> fix_attachments()
end
def fix_attachments(object), do: object
def fix_url(%{"url" => url} = object) when is_map(url) do
object
|> Map.put("url", url["href"])
end
def fix_url(%{"type" => "Video", "url" => url} = object) when is_list(url) do
first_element = Enum.at(url, 0)
link_element =
url
|> Enum.filter(fn x -> is_map(x) end)
|> Enum.filter(fn x -> x["mimeType"] == "text/html" end)
|> Enum.at(0)
object
|> Map.put("attachment", [first_element])
|> Map.put("url", link_element["href"])
end
def fix_url(%{"type" => object_type, "url" => url} = object)
when object_type != "Video" and is_list(url) do
first_element = Enum.at(url, 0)
url_string =
cond do
is_bitstring(first_element) -> first_element
is_map(first_element) -> first_element["href"] || ""
true -> ""
end
object
|> Map.put("url", url_string)
end
def fix_url(object), do: object
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
2018-03-30 07:01:53 -06:00
emoji = tags |> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
emoji =
emoji
|> Enum.reduce(%{}, fn data, mapping ->
name = String.trim(data["name"], ":")
2018-03-13 01:05:43 -06:00
2018-03-30 07:01:53 -06:00
mapping |> Map.put(name, data["icon"]["url"])
end)
2018-03-13 01:05:43 -06:00
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
emoji = Map.merge(object["emoji"] || %{}, emoji)
object
|> Map.put("emoji", emoji)
end
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
name = String.trim(tag["name"], ":")
emoji = %{name => tag["icon"]["url"]}
object
|> Map.put("emoji", emoji)
end
def fix_emoji(object), do: object
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
2018-03-30 07:01:53 -06:00
tags =
tag
2018-03-30 07:01:53 -06:00
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
combined = tag ++ tags
object
|> Map.put("tag", combined)
end
def fix_tag(%{"tag" => %{"type" => "Hashtag", "name" => hashtag} = tag} = object) do
combined = [tag, String.slice(hashtag, 1..-1)]
object
|> Map.put("tag", combined)
end
def fix_tag(object), do: object
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
object
|> Map.put("content", content)
end
def fix_content_map(object), do: object
2018-12-09 02:12:48 -07:00
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows",
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
%Activity{} = activity <- Utils.fetch_latest_follow(follower, followed) do
{:ok, activity}
else
_ -> {:error, nil}
end
end
defp mastodon_follow_hack(_, _), do: {:error, nil}
defp get_follow_activity(follow_object, followed) do
with object_id when not is_nil(object_id) <- Utils.get_ap_id(follow_object),
{_, %Activity{} = activity} <- {:activity, Activity.get_by_ap_id(object_id)} do
{:ok, activity}
else
# Can't find the activity. This might a Mastodon 2.3 "Accept"
{:activity, nil} ->
mastodon_follow_hack(follow_object, followed)
_ ->
{:error, nil}
end
end
# disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}), do: :error
def handle_incoming(%{"id" => ""}), do: :error
# length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
2018-06-24 00:34:44 -06:00
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
when objtype in ["Article", "Note", "Video", "Page"] do
actor = get_actor(data)
data =
Map.put(data, "actor", actor)
|> fix_addressing
2018-02-19 09:37:45 -07:00
with nil <- Activity.get_create_activity_by_object_ap_id(object["id"]),
%User{} = user <- User.get_or_fetch_by_ap_id(data["actor"]) do
object = fix_object(data["object"])
params = %{
to: data["to"],
object: object,
actor: user,
2018-02-25 14:28:53 -07:00
context: object["conversation"],
local: false,
published: data["published"],
2018-03-30 07:01:53 -06:00
additional:
Map.take(data, [
"cc",
"directMessage",
2018-03-30 07:01:53 -06:00
"id"
])
}
ActivityPub.create(params)
else
2018-02-19 09:37:45 -07:00
%Activity{} = activity -> {:ok, activity}
_e -> :error
end
end
2018-03-30 07:01:53 -06:00
def handle_incoming(
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
if not User.locked?(followed) do
2018-05-28 12:31:48 -06:00
ActivityPub.accept(%{
to: [follower.ap_id],
actor: followed.ap_id,
object: data,
local: true
})
User.follow(follower, followed)
end
2018-03-30 07:01:53 -06:00
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
2018-12-09 02:12:48 -07:00
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
) do
with actor <- get_actor(data),
%User{} = followed <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
2018-05-26 07:11:50 -06:00
{:ok, activity} <-
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed.ap_id,
object: follow_activity.data["id"],
local: false
}) do
if not User.following?(follower, followed) do
2018-12-09 02:12:48 -07:00
{:ok, _follower} = User.follow(follower, followed)
end
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
2018-12-09 02:12:48 -07:00
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
) do
with actor <- get_actor(data),
%User{} = followed <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
2018-05-26 07:11:50 -06:00
{:ok, activity} <-
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed.ap_id,
object: follow_activity.data["id"],
local: false
}) do
User.unfollow(follower, followed)
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 07:01:53 -06:00
def handle_incoming(
2018-12-09 02:12:48 -07:00
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
2018-03-30 07:01:53 -06:00
) do
with actor <- get_actor(data),
%User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
2018-02-17 12:13:12 -07:00
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 07:01:53 -06:00
def handle_incoming(
2018-12-09 02:12:48 -07:00
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
2018-03-30 07:01:53 -06:00
) do
with actor <- get_actor(data),
%User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
public <- ActivityPub.is_public?(data),
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false, public) do
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 07:01:53 -06:00
def handle_incoming(
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
2018-03-30 07:01:53 -06:00
data
)
when object_type in ["Person", "Application", "Service", "Organization"] do
2018-02-25 08:14:25 -07:00
with %User{ap_id: ^actor_id} = actor <- User.get_by_ap_id(object["id"]) do
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
banner = new_user_data[:info]["banner"]
locked = new_user_data[:info]["locked"] || false
2018-03-30 07:01:53 -06:00
update_data =
new_user_data
|> Map.take([:name, :bio, :avatar])
2018-11-30 09:31:05 -07:00
|> Map.put(:info, %{"banner" => banner, "locked" => locked})
2018-02-25 08:14:25 -07:00
actor
|> User.upgrade_changeset(update_data)
2018-02-25 08:34:24 -07:00
|> User.update_and_set_cache()
2018-02-25 08:14:25 -07:00
2018-03-30 07:01:53 -06:00
ActivityPub.update(%{
local: false,
to: data["to"] || [],
cc: data["cc"] || [],
object: object,
actor: actor_id
})
2018-02-25 08:14:25 -07:00
else
e ->
Logger.error(e)
:error
end
end
# TODO: We presently assume that any actor on the same origin domain as the object being
# deleted has the rights to delete that object. A better way to validate whether or not
# the object should be deleted is to refetch the object URI, which should return either
# an error or a tombstone. This would allow us to verify that a deletion actually took
# place.
2018-03-30 07:01:53 -06:00
def handle_incoming(
%{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data
2018-03-30 07:01:53 -06:00
) do
2018-05-26 05:52:05 -06:00
object_id = Utils.get_ap_id(object_id)
2018-03-30 07:01:53 -06:00
with actor <- get_actor(data),
%User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
:ok <- contain_origin(actor.ap_id, object.data),
2018-03-03 10:37:40 -07:00
{:ok, activity} <- ActivityPub.delete(object, false) do
{:ok, activity}
else
2018-05-04 15:16:02 -06:00
_e -> :error
2018-03-03 10:37:40 -07:00
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Announce", "object" => object_id},
2018-12-09 02:12:48 -07:00
"actor" => _actor,
"id" => id
} = data
) do
with actor <- get_actor(data),
%User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
2018-05-21 02:35:43 -06:00
} = _data
) do
2018-05-21 02:35:43 -06:00
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
2018-05-20 19:01:14 -06:00
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
User.unfollow(follower, followed)
{:ok, activity}
else
2018-12-09 02:12:48 -07:00
_e -> :error
end
end
2018-05-21 02:35:43 -06:00
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Block", "object" => blocked},
"actor" => blocker,
"id" => id
} = _data
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
2018-05-21 02:35:43 -06:00
%User{} = blocker <- User.get_or_fetch_by_ap_id(blocker),
{:ok, activity} <- ActivityPub.unblock(blocker, blocked, id, false) do
2018-05-21 03:00:58 -06:00
User.unblock(blocker, blocked)
2018-05-21 02:35:43 -06:00
{:ok, activity}
else
2018-12-09 02:12:48 -07:00
_e -> :error
2018-05-21 02:35:43 -06:00
end
end
def handle_incoming(
2018-12-09 02:12:48 -07:00
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
2018-05-18 16:09:56 -06:00
%User{} = blocker = User.get_or_fetch_by_ap_id(blocker),
2018-05-20 19:01:14 -06:00
{:ok, activity} <- ActivityPub.block(blocker, blocked, id, false) do
2018-05-19 18:57:37 -06:00
User.unfollow(blocker, blocked)
User.block(blocker, blocked)
2018-05-18 16:09:56 -06:00
{:ok, activity}
else
2018-12-09 02:12:48 -07:00
_e -> :error
2018-05-18 16:09:56 -06:00
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Like", "object" => object_id},
2018-12-09 02:12:48 -07:00
"actor" => _actor,
"id" => id
} = data
) do
with actor <- get_actor(data),
%User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(_), do: :error
def fetch_obj_helper(id) when is_bitstring(id), do: ActivityPub.fetch_object_from_id(id)
def fetch_obj_helper(obj) when is_map(obj), do: ActivityPub.fetch_object_from_id(obj["id"])
def get_obj_helper(id) do
if object = Object.normalize(id), do: {:ok, object}, else: nil
end
def set_reply_to_uri(%{"inReplyTo" => inReplyTo} = object) do
with false <- String.starts_with?(inReplyTo, "http"),
{:ok, %{data: replied_to_object}} <- get_obj_helper(inReplyTo) do
Map.put(object, "inReplyTo", replied_to_object["external_url"] || inReplyTo)
else
_e -> object
end
end
2018-03-30 07:01:53 -06:00
def set_reply_to_uri(obj), do: obj
# Prepares the object of an outgoing create activity.
2018-02-24 12:16:41 -07:00
def prepare_object(object) do
object
2018-02-18 06:07:13 -07:00
|> set_sensitive
2018-02-18 05:51:03 -07:00
|> add_hashtags
|> add_mention_tags
2018-03-13 01:05:43 -06:00
|> add_emoji_tags
|> add_attributed_to
|> add_likes
2018-02-17 10:38:58 -07:00
|> prepare_attachments
2018-02-18 05:58:52 -07:00
|> set_conversation
|> set_reply_to_uri
|> strip_internal_fields
|> strip_internal_tags
2018-02-24 12:16:41 -07:00
end
2018-05-04 16:03:14 -06:00
# @doc
# """
# internal -> Mastodon
# """
2018-03-30 07:01:53 -06:00
def prepare_outgoing(%{"type" => "Create", "object" => object} = data) do
2018-03-30 07:01:53 -06:00
object =
object
|> prepare_object
data =
data
|> Map.put("object", object)
2018-11-08 08:39:38 -07:00
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
2018-05-26 12:03:23 -06:00
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
# because of course it does.
def prepare_outgoing(%{"type" => "Accept"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
2018-05-26 12:03:23 -06:00
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
2018-11-08 08:39:38 -07:00
|> Map.merge(Utils.make_json_ld_header())
2018-05-26 12:03:23 -06:00
{:ok, data}
end
end
def prepare_outgoing(%{"type" => "Reject"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
2018-11-08 08:39:38 -07:00
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
end
2018-05-04 15:16:02 -06:00
def prepare_outgoing(%{"type" => _type} = data) do
2018-03-30 07:01:53 -06:00
data =
data
|> maybe_fix_object_url
2018-11-08 08:39:38 -07:00
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
def maybe_fix_object_url(data) do
if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
case fetch_obj_helper(data["object"]) do
{:ok, relative_object} ->
if relative_object.data["external_url"] do
2018-05-04 15:16:02 -06:00
_data =
2018-03-30 07:01:53 -06:00
data
|> Map.put("object", relative_object.data["external_url"])
else
data
end
2018-03-30 07:01:53 -06:00
e ->
Logger.error("Couldn't fetch #{data["object"]} #{inspect(e)}")
data
end
else
data
end
end
2018-02-18 05:51:03 -07:00
def add_hashtags(object) do
2018-03-30 07:01:53 -06:00
tags =
(object["tag"] || [])
|> Enum.map(fn tag ->
%{
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
"name" => "##{tag}",
"type" => "Hashtag"
}
end)
2018-02-18 05:51:03 -07:00
object
|> Map.put("tag", tags)
end
def add_mention_tags(object) do
2018-03-30 07:01:53 -06:00
mentions =
object
|> Utils.get_notified_from_object()
2018-03-30 07:01:53 -06:00
|> Enum.map(fn user ->
%{"type" => "Mention", "href" => user.ap_id, "name" => "@#{user.nickname}"}
end)
2018-02-17 06:20:53 -07:00
tags = object["tag"] || []
object
2018-02-17 06:20:53 -07:00
|> Map.put("tag", tags ++ mentions)
end
2018-03-13 01:05:43 -06:00
# TODO: we should probably send mtime instead of unix epoch time for updated
def add_emoji_tags(object) do
tags = object["tag"] || []
emoji = object["emoji"] || []
2018-03-30 07:01:53 -06:00
out =
emoji
|> Enum.map(fn {name, url} ->
%{
"icon" => %{"url" => url, "type" => "Image"},
"name" => ":" <> name <> ":",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z",
"id" => url
}
end)
2018-03-13 01:05:43 -06:00
object
|> Map.put("tag", tags ++ out)
end
2018-02-18 05:58:52 -07:00
def set_conversation(object) do
Map.put(object, "conversation", object["context"])
end
2018-02-18 06:07:13 -07:00
def set_sensitive(object) do
tags = object["tag"] || []
Map.put(object, "sensitive", "nsfw" in tags)
end
def add_attributed_to(object) do
attributedTo = object["attributedTo"] || object["actor"]
object
|> Map.put("attributedTo", attributedTo)
end
2018-02-17 10:38:58 -07:00
def add_likes(%{"id" => id, "like_count" => likes} = object) do
likes = %{
"id" => "#{id}/likes",
"first" => "#{id}/likes?page=1",
"type" => "OrderedCollection",
"totalItems" => likes
}
object
|> Map.put("likes", likes)
end
def add_likes(object) do
object
end
2018-02-17 10:38:58 -07:00
def prepare_attachments(object) do
2018-03-30 07:01:53 -06:00
attachments =
(object["attachment"] || [])
|> Enum.map(fn data ->
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
%{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
end)
2018-02-17 10:38:58 -07:00
object
|> Map.put("attachment", attachments)
end
2018-02-21 14:21:40 -07:00
defp strip_internal_fields(object) do
object
|> Map.drop([
"like_count",
"announcements",
"announcement_count",
"emoji",
"context_id"
])
end
defp strip_internal_tags(%{"tag" => tags} = object) do
tags =
tags
|> Enum.filter(fn x -> is_map(x) end)
object
|> Map.put("tag", tags)
end
defp strip_internal_tags(object), do: object
2018-02-24 02:51:15 -07:00
defp user_upgrade_task(user) do
old_follower_address = User.ap_followers(user)
2018-03-30 07:01:53 -06:00
q =
from(
u in User,
where: ^old_follower_address in u.following,
update: [
set: [
following:
fragment(
"array_replace(?,?,?)",
u.following,
^old_follower_address,
^user.follower_address
)
]
]
)
2018-02-24 02:51:15 -07:00
Repo.update_all(q, [])
maybe_retire_websub(user.ap_id)
2018-02-24 02:51:15 -07:00
# Only do this for recent activties, don't go through the whole db.
# Only look at the last 1000 activities.
since = (Repo.aggregate(Activity, :max, :id) || 0) - 1_000
2018-03-30 07:01:53 -06:00
q =
from(
a in Activity,
where: ^old_follower_address in a.recipients,
where: a.id > ^since,
update: [
set: [
recipients:
fragment(
"array_replace(?,?,?)",
a.recipients,
^old_follower_address,
^user.follower_address
)
]
]
)
2018-02-24 02:51:15 -07:00
Repo.update_all(q, [])
end
def upgrade_user_from_ap_id(ap_id, async \\ true) do
with %User{local: false} = user <- User.get_by_ap_id(ap_id),
2018-02-21 14:21:40 -07:00
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do
2018-03-07 12:19:48 -07:00
already_ap = User.ap_enabled?(user)
2018-03-30 07:01:53 -06:00
{:ok, user} =
User.upgrade_changeset(user, data)
|> Repo.update()
2018-02-21 14:21:40 -07:00
2018-03-07 12:19:48 -07:00
if !already_ap do
# This could potentially take a long time, do it in the background
if async do
Task.start(fn ->
user_upgrade_task(user)
end)
else
2018-02-24 02:51:15 -07:00
user_upgrade_task(user)
2018-03-07 12:19:48 -07:00
end
2018-02-24 02:51:15 -07:00
end
2018-02-21 14:21:40 -07:00
{:ok, user}
else
e -> e
end
end
def maybe_retire_websub(ap_id) do
# some sanity checks
2018-03-30 07:01:53 -06:00
if is_binary(ap_id) && String.length(ap_id) > 8 do
q =
from(
ws in Pleroma.Web.Websub.WebsubClientSubscription,
where: fragment("? like ?", ws.topic, ^"#{ap_id}%")
)
Repo.delete_all(q)
end
end
def maybe_fix_user_url(data) do
if is_map(data["url"]) do
Map.put(data, "url", data["url"]["href"])
else
data
end
end
def maybe_fix_user_object(data) do
data
|> maybe_fix_user_url
end
end