2018-02-15 12:00:06 -07:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|
|
|
@moduledoc """
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
"""
|
|
|
|
alias Pleroma.User
|
2018-02-17 12:13:12 -07:00
|
|
|
alias Pleroma.Object
|
2018-02-18 03:24:54 -07:00
|
|
|
alias Pleroma.Activity
|
2018-02-21 14:21:40 -07:00
|
|
|
alias Pleroma.Repo
|
2018-02-15 12:00:06 -07:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
|
|
|
|
2018-02-21 14:21:40 -07:00
|
|
|
import Ecto.Query
|
|
|
|
|
2018-02-23 07:00:41 -07:00
|
|
|
require Logger
|
|
|
|
|
2018-02-15 12:00:06 -07:00
|
|
|
@doc """
|
|
|
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
|
|
|
"""
|
|
|
|
def fix_object(object) do
|
|
|
|
object
|
|
|
|
|> Map.put("actor", object["attributedTo"])
|
2018-02-17 10:38:58 -07:00
|
|
|
|> fix_attachments
|
2018-02-19 02:39:03 -07:00
|
|
|
|> fix_context
|
2018-02-25 02:56:01 -07:00
|
|
|
|> fix_in_reply_to
|
2018-03-13 01:05:43 -06:00
|
|
|
|> fix_emoji
|
2018-03-24 15:39:37 -06:00
|
|
|
|> fix_tag
|
2018-02-19 02:39:03 -07:00
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to_id} = object)
|
|
|
|
when not is_nil(in_reply_to_id) do
|
2018-02-25 08:14:25 -07:00
|
|
|
case ActivityPub.fetch_object_from_id(in_reply_to_id) do
|
2018-02-25 02:56:01 -07:00
|
|
|
{:ok, replied_object} ->
|
|
|
|
activity = Activity.get_create_activity_by_object_ap_id(replied_object.data["id"])
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-25 02:56:01 -07:00
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
|
|
|
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
|
|
|
|> Map.put("inReplyToStatusId", activity.id)
|
2018-02-25 14:20:38 -07:00
|
|
|
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
|
|
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-25 02:56:01 -07:00
|
|
|
e ->
|
|
|
|
Logger.error("Couldn't fetch #{object["inReplyTo"]} #{inspect(e)}")
|
|
|
|
object
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-25 02:56:01 -07:00
|
|
|
def fix_in_reply_to(object), do: object
|
|
|
|
|
2018-02-19 02:39:03 -07:00
|
|
|
def fix_context(object) do
|
|
|
|
object
|
|
|
|
|> Map.put("context", object["conversation"])
|
2018-02-17 10:38:58 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_attachments(object) do
|
2018-03-30 07:01:53 -06:00
|
|
|
attachments =
|
|
|
|
(object["attachment"] || [])
|
|
|
|
|> Enum.map(fn data ->
|
|
|
|
url = [%{"type" => "Link", "mediaType" => data["mediaType"], "href" => data["url"]}]
|
|
|
|
Map.put(data, "url", url)
|
|
|
|
end)
|
2018-02-17 10:38:58 -07:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("attachment", attachments)
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|
|
|
|
|
2018-03-13 01:05:43 -06:00
|
|
|
def fix_emoji(object) do
|
2018-03-30 07:01:53 -06:00
|
|
|
tags = object["tag"] || []
|
|
|
|
emoji = tags |> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
|
|
|
|
|
|
|
|
emoji =
|
|
|
|
emoji
|
|
|
|
|> Enum.reduce(%{}, fn data, mapping ->
|
|
|
|
name = data["name"]
|
|
|
|
|
2018-05-06 00:58:59 -06:00
|
|
|
name =
|
|
|
|
if String.starts_with?(name, ":") do
|
2018-05-07 10:11:37 -06:00
|
|
|
name |> String.slice(1..-2)
|
2018-05-06 00:58:59 -06:00
|
|
|
else
|
|
|
|
name
|
|
|
|
end
|
2018-03-13 01:05:43 -06:00
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
mapping |> Map.put(name, data["icon"]["url"])
|
|
|
|
end)
|
2018-03-13 01:05:43 -06:00
|
|
|
|
|
|
|
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
|
|
|
|
emoji = Map.merge(object["emoji"] || %{}, emoji)
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("emoji", emoji)
|
|
|
|
end
|
|
|
|
|
2018-03-24 15:39:37 -06:00
|
|
|
def fix_tag(object) do
|
2018-03-30 07:01:53 -06:00
|
|
|
tags =
|
|
|
|
(object["tag"] || [])
|
|
|
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
|
|
|
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
|
2018-03-24 15:39:37 -06:00
|
|
|
|
|
|
|
combined = (object["tag"] || []) ++ tags
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("tag", combined)
|
|
|
|
end
|
|
|
|
|
2018-02-15 12:00:06 -07:00
|
|
|
# TODO: validate those with a Ecto scheme
|
|
|
|
# - tags
|
|
|
|
# - emoji
|
|
|
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => "Note"} = object} = data) do
|
2018-02-19 09:37:45 -07:00
|
|
|
with nil <- Activity.get_create_activity_by_object_ap_id(object["id"]),
|
|
|
|
%User{} = user <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
2018-02-15 12:00:06 -07:00
|
|
|
object = fix_object(data["object"])
|
2018-02-23 07:00:41 -07:00
|
|
|
|
2018-02-15 12:00:06 -07:00
|
|
|
params = %{
|
|
|
|
to: data["to"],
|
|
|
|
object: object,
|
|
|
|
actor: user,
|
2018-02-25 14:28:53 -07:00
|
|
|
context: object["conversation"],
|
2018-02-15 12:00:06 -07:00
|
|
|
local: false,
|
|
|
|
published: data["published"],
|
2018-03-30 07:01:53 -06:00
|
|
|
additional:
|
|
|
|
Map.take(data, [
|
|
|
|
"cc",
|
|
|
|
"id"
|
|
|
|
])
|
2018-02-15 12:00:06 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
ActivityPub.create(params)
|
|
|
|
else
|
2018-02-19 09:37:45 -07:00
|
|
|
%Activity{} = activity -> {:ok, activity}
|
2018-02-15 12:00:06 -07:00
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
|
|
|
|
) do
|
2018-02-17 08:08:55 -07:00
|
|
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
2018-02-17 06:55:44 -07:00
|
|
|
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
|
|
|
|
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
|
2018-02-17 08:08:55 -07:00
|
|
|
ActivityPub.accept(%{to: [follower.ap_id], actor: followed.ap_id, object: data, local: true})
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-17 06:55:44 -07:00
|
|
|
User.follow(follower, followed)
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def handle_incoming(
|
2018-05-04 14:59:01 -06:00
|
|
|
%{"type" => "Like", "object" => object_id, "actor" => actor, "id" => id} = _data
|
2018-03-30 07:01:53 -06:00
|
|
|
) do
|
2018-02-17 12:13:12 -07:00
|
|
|
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
|
2018-04-21 01:43:53 -06:00
|
|
|
{:ok, object} <-
|
|
|
|
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
2018-05-04 14:59:01 -06:00
|
|
|
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
|
2018-02-17 12:13:12 -07:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def handle_incoming(
|
2018-05-04 15:16:02 -06:00
|
|
|
%{"type" => "Announce", "object" => object_id, "actor" => actor, "id" => id} = _data
|
2018-03-30 07:01:53 -06:00
|
|
|
) do
|
2018-02-17 13:57:31 -07:00
|
|
|
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
|
2018-04-21 01:43:53 -06:00
|
|
|
{:ok, object} <-
|
|
|
|
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
2018-05-04 15:16:02 -06:00
|
|
|
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false) do
|
2018-02-17 13:57:31 -07:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Update", "object" => %{"type" => "Person"} = object, "actor" => actor_id} =
|
|
|
|
data
|
|
|
|
) do
|
2018-02-25 08:14:25 -07:00
|
|
|
with %User{ap_id: ^actor_id} = actor <- User.get_by_ap_id(object["id"]) do
|
|
|
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
|
|
|
|
|
|
|
banner = new_user_data[:info]["banner"]
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
update_data =
|
|
|
|
new_user_data
|
|
|
|
|> Map.take([:name, :bio, :avatar])
|
|
|
|
|> Map.put(:info, Map.merge(actor.info, %{"banner" => banner}))
|
2018-02-25 08:14:25 -07:00
|
|
|
|
|
|
|
actor
|
|
|
|
|> User.upgrade_changeset(update_data)
|
2018-02-25 08:34:24 -07:00
|
|
|
|> User.update_and_set_cache()
|
2018-02-25 08:14:25 -07:00
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
ActivityPub.update(%{
|
|
|
|
local: false,
|
|
|
|
to: data["to"] || [],
|
|
|
|
cc: data["cc"] || [],
|
|
|
|
object: object,
|
|
|
|
actor: actor_id
|
|
|
|
})
|
2018-02-25 08:14:25 -07:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.error(e)
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-03 10:37:40 -07:00
|
|
|
# TODO: Make secure.
|
2018-03-30 07:01:53 -06:00
|
|
|
def handle_incoming(
|
2018-05-04 15:16:02 -06:00
|
|
|
%{"type" => "Delete", "object" => object_id, "actor" => actor, "id" => _id} = _data
|
2018-03-30 07:01:53 -06:00
|
|
|
) do
|
|
|
|
object_id =
|
|
|
|
case object_id do
|
|
|
|
%{"id" => id} -> id
|
|
|
|
id -> id
|
|
|
|
end
|
|
|
|
|
2018-05-04 15:16:02 -06:00
|
|
|
with %User{} = _actor <- User.get_or_fetch_by_ap_id(actor),
|
2018-04-21 01:43:53 -06:00
|
|
|
{:ok, object} <-
|
|
|
|
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
2018-03-03 10:37:40 -07:00
|
|
|
{:ok, activity} <- ActivityPub.delete(object, false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-04 15:16:02 -06:00
|
|
|
_e -> :error
|
2018-03-03 10:37:40 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-08 21:50:19 -06:00
|
|
|
def handle_incoming(
|
2018-05-11 13:34:46 -06:00
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
2018-05-13 01:42:31 -06:00
|
|
|
"object" => %{"type" => "Announce", "object" => object_id},
|
2018-05-11 13:34:46 -06:00
|
|
|
"actor" => actor,
|
|
|
|
"id" => id
|
2018-05-20 10:05:34 -06:00
|
|
|
} = _data
|
2018-05-11 13:34:46 -06:00
|
|
|
) do
|
2018-05-08 21:50:19 -06:00
|
|
|
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, object} <-
|
|
|
|
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
2018-05-08 22:04:14 -06:00
|
|
|
{:ok, activity, _, _} <- ActivityPub.unannounce(actor, object, id, false) do
|
2018-05-08 21:50:19 -06:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 10:05:34 -06:00
|
|
|
_e -> :error
|
2018-05-08 21:50:19 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-17 21:55:00 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
"actor" => follower,
|
|
|
|
"id" => id
|
2018-05-21 02:35:43 -06:00
|
|
|
} = _data
|
2018-05-17 21:55:00 -06:00
|
|
|
) do
|
2018-05-21 02:35:43 -06:00
|
|
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
|
|
|
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
|
2018-05-20 19:01:14 -06:00
|
|
|
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2018-05-17 21:55:00 -06:00
|
|
|
User.unfollow(follower, followed)
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-26 02:24:50 -06:00
|
|
|
@ap_config Application.get_env(:pleroma, :activitypub)
|
|
|
|
@accept_blocks Keyword.get(@ap_config, :accept_blocks)
|
|
|
|
|
2018-05-21 02:35:43 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Block", "object" => blocked},
|
|
|
|
"actor" => blocker,
|
|
|
|
"id" => id
|
|
|
|
} = _data
|
|
|
|
) do
|
2018-05-26 02:24:50 -06:00
|
|
|
with true <- @accept_blocks,
|
|
|
|
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
|
2018-05-21 02:35:43 -06:00
|
|
|
%User{} = blocker <- User.get_or_fetch_by_ap_id(blocker),
|
|
|
|
{:ok, activity} <- ActivityPub.unblock(blocker, blocked, id, false) do
|
2018-05-21 03:00:58 -06:00
|
|
|
User.unblock(blocker, blocked)
|
2018-05-21 02:35:43 -06:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-19 19:23:52 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = data
|
|
|
|
) do
|
2018-05-26 02:24:50 -06:00
|
|
|
with true <- @accept_blocks,
|
|
|
|
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
|
2018-05-18 16:09:56 -06:00
|
|
|
%User{} = blocker = User.get_or_fetch_by_ap_id(blocker),
|
2018-05-20 19:01:14 -06:00
|
|
|
{:ok, activity} <- ActivityPub.block(blocker, blocked, id, false) do
|
2018-05-19 18:57:37 -06:00
|
|
|
User.unfollow(blocker, blocked)
|
2018-05-19 20:02:13 -06:00
|
|
|
User.block(blocker, blocked)
|
2018-05-18 16:09:56 -06:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
e -> :error
|
|
|
|
end
|
|
|
|
end
|
2018-05-19 19:23:52 -06:00
|
|
|
|
2018-05-19 07:22:43 -06:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Like", "object" => object_id},
|
|
|
|
"actor" => actor,
|
|
|
|
"id" => id
|
2018-05-20 10:05:34 -06:00
|
|
|
} = _data
|
2018-05-19 07:22:43 -06:00
|
|
|
) do
|
|
|
|
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, object} <-
|
|
|
|
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
|
|
|
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 10:05:34 -06:00
|
|
|
_e -> :error
|
2018-05-19 07:22:43 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-17 12:13:12 -07:00
|
|
|
# TODO
|
|
|
|
# Accept
|
|
|
|
|
2018-02-17 06:55:44 -07:00
|
|
|
def handle_incoming(_), do: :error
|
|
|
|
|
2018-02-18 03:24:54 -07:00
|
|
|
def get_obj_helper(id) do
|
|
|
|
if object = Object.get_by_ap_id(id), do: {:ok, object}, else: nil
|
|
|
|
end
|
|
|
|
|
2018-03-23 09:07:02 -06:00
|
|
|
def set_reply_to_uri(%{"inReplyTo" => inReplyTo} = object) do
|
|
|
|
with false <- String.starts_with?(inReplyTo, "http"),
|
|
|
|
{:ok, %{data: replied_to_object}} <- get_obj_helper(inReplyTo) do
|
|
|
|
Map.put(object, "inReplyTo", replied_to_object["external_url"] || inReplyTo)
|
|
|
|
else
|
|
|
|
_e -> object
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-03-23 09:07:02 -06:00
|
|
|
def set_reply_to_uri(obj), do: obj
|
|
|
|
|
|
|
|
# Prepares the object of an outgoing create activity.
|
2018-02-24 12:16:41 -07:00
|
|
|
def prepare_object(object) do
|
|
|
|
object
|
2018-02-18 06:07:13 -07:00
|
|
|
|> set_sensitive
|
2018-02-18 05:51:03 -07:00
|
|
|
|> add_hashtags
|
2018-02-17 06:11:20 -07:00
|
|
|
|> add_mention_tags
|
2018-03-13 01:05:43 -06:00
|
|
|
|> add_emoji_tags
|
2018-02-17 06:11:20 -07:00
|
|
|
|> add_attributed_to
|
2018-02-17 10:38:58 -07:00
|
|
|
|> prepare_attachments
|
2018-02-18 05:58:52 -07:00
|
|
|
|> set_conversation
|
2018-03-23 09:07:02 -06:00
|
|
|
|> set_reply_to_uri
|
2018-02-24 12:16:41 -07:00
|
|
|
end
|
|
|
|
|
2018-05-04 16:03:14 -06:00
|
|
|
# @doc
|
|
|
|
# """
|
|
|
|
# internal -> Mastodon
|
|
|
|
# """
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-24 12:16:41 -07:00
|
|
|
def prepare_outgoing(%{"type" => "Create", "object" => %{"type" => "Note"} = object} = data) do
|
2018-03-30 07:01:53 -06:00
|
|
|
object =
|
|
|
|
object
|
|
|
|
|> prepare_object
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
|
|
|
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
|
2018-02-17 06:11:20 -07:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2018-05-04 15:16:02 -06:00
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-03-30 07:01:53 -06:00
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> maybe_fix_object_url
|
|
|
|
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
|
2018-02-17 08:08:55 -07:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2018-03-13 11:46:37 -06:00
|
|
|
def maybe_fix_object_url(data) do
|
|
|
|
if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
|
|
|
|
case ActivityPub.fetch_object_from_id(data["object"]) do
|
|
|
|
{:ok, relative_object} ->
|
|
|
|
if relative_object.data["external_url"] do
|
2018-05-04 15:16:02 -06:00
|
|
|
_data =
|
2018-03-30 07:01:53 -06:00
|
|
|
data
|
|
|
|
|> Map.put("object", relative_object.data["external_url"])
|
2018-03-13 11:46:37 -06:00
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-03-13 11:46:37 -06:00
|
|
|
e ->
|
|
|
|
Logger.error("Couldn't fetch #{data["object"]} #{inspect(e)}")
|
|
|
|
data
|
|
|
|
end
|
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-18 05:51:03 -07:00
|
|
|
def add_hashtags(object) do
|
2018-03-30 07:01:53 -06:00
|
|
|
tags =
|
|
|
|
(object["tag"] || [])
|
|
|
|
|> Enum.map(fn tag ->
|
|
|
|
%{
|
|
|
|
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
|
|
|
|
"name" => "##{tag}",
|
|
|
|
"type" => "Hashtag"
|
|
|
|
}
|
|
|
|
end)
|
2018-02-18 05:51:03 -07:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("tag", tags)
|
|
|
|
end
|
|
|
|
|
2018-02-17 06:11:20 -07:00
|
|
|
def add_mention_tags(object) do
|
2018-02-19 02:39:03 -07:00
|
|
|
recipients = object["to"] ++ (object["cc"] || [])
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
mentions =
|
|
|
|
recipients
|
|
|
|
|> Enum.map(fn ap_id -> User.get_cached_by_ap_id(ap_id) end)
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
|> Enum.map(fn user ->
|
|
|
|
%{"type" => "Mention", "href" => user.ap_id, "name" => "@#{user.nickname}"}
|
|
|
|
end)
|
2018-02-17 06:11:20 -07:00
|
|
|
|
2018-02-17 06:20:53 -07:00
|
|
|
tags = object["tag"] || []
|
2018-02-17 06:11:20 -07:00
|
|
|
|
|
|
|
object
|
2018-02-17 06:20:53 -07:00
|
|
|
|> Map.put("tag", tags ++ mentions)
|
2018-02-17 06:11:20 -07:00
|
|
|
end
|
|
|
|
|
2018-03-13 01:05:43 -06:00
|
|
|
# TODO: we should probably send mtime instead of unix epoch time for updated
|
|
|
|
def add_emoji_tags(object) do
|
|
|
|
tags = object["tag"] || []
|
|
|
|
emoji = object["emoji"] || []
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
out =
|
|
|
|
emoji
|
|
|
|
|> Enum.map(fn {name, url} ->
|
|
|
|
%{
|
|
|
|
"icon" => %{"url" => url, "type" => "Image"},
|
|
|
|
"name" => ":" <> name <> ":",
|
|
|
|
"type" => "Emoji",
|
|
|
|
"updated" => "1970-01-01T00:00:00Z",
|
|
|
|
"id" => url
|
|
|
|
}
|
|
|
|
end)
|
2018-03-13 01:05:43 -06:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("tag", tags ++ out)
|
|
|
|
end
|
|
|
|
|
2018-02-18 05:58:52 -07:00
|
|
|
def set_conversation(object) do
|
|
|
|
Map.put(object, "conversation", object["context"])
|
|
|
|
end
|
|
|
|
|
2018-02-18 06:07:13 -07:00
|
|
|
def set_sensitive(object) do
|
|
|
|
tags = object["tag"] || []
|
|
|
|
Map.put(object, "sensitive", "nsfw" in tags)
|
|
|
|
end
|
|
|
|
|
2018-02-17 06:11:20 -07:00
|
|
|
def add_attributed_to(object) do
|
|
|
|
attributedTo = object["attributedTo"] || object["actor"]
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("attributedTo", attributedTo)
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|
2018-02-17 10:38:58 -07:00
|
|
|
|
|
|
|
def prepare_attachments(object) do
|
2018-03-30 07:01:53 -06:00
|
|
|
attachments =
|
|
|
|
(object["attachment"] || [])
|
|
|
|
|> Enum.map(fn data ->
|
|
|
|
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
|
|
|
|
%{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
|
|
|
|
end)
|
2018-02-17 10:38:58 -07:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("attachment", attachments)
|
|
|
|
end
|
2018-02-21 14:21:40 -07:00
|
|
|
|
2018-02-24 02:51:15 -07:00
|
|
|
defp user_upgrade_task(user) do
|
|
|
|
old_follower_address = User.ap_followers(user)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
q =
|
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: ^old_follower_address in u.following,
|
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
following:
|
|
|
|
fragment(
|
|
|
|
"array_replace(?,?,?)",
|
|
|
|
u.following,
|
|
|
|
^old_follower_address,
|
|
|
|
^user.follower_address
|
|
|
|
)
|
|
|
|
]
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2018-02-24 02:51:15 -07:00
|
|
|
Repo.update_all(q, [])
|
|
|
|
|
2018-02-24 09:36:02 -07:00
|
|
|
maybe_retire_websub(user.ap_id)
|
|
|
|
|
2018-02-24 02:51:15 -07:00
|
|
|
# Only do this for recent activties, don't go through the whole db.
|
2018-03-08 11:07:32 -07:00
|
|
|
# Only look at the last 1000 activities.
|
|
|
|
since = (Repo.aggregate(Activity, :max, :id) || 0) - 1_000
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
q =
|
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where: ^old_follower_address in a.recipients,
|
|
|
|
where: a.id > ^since,
|
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
recipients:
|
|
|
|
fragment(
|
|
|
|
"array_replace(?,?,?)",
|
|
|
|
a.recipients,
|
|
|
|
^old_follower_address,
|
|
|
|
^user.follower_address
|
|
|
|
)
|
|
|
|
]
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2018-02-24 02:51:15 -07:00
|
|
|
Repo.update_all(q, [])
|
|
|
|
end
|
|
|
|
|
|
|
|
def upgrade_user_from_ap_id(ap_id, async \\ true) do
|
2018-02-24 04:05:40 -07:00
|
|
|
with %User{local: false} = user <- User.get_by_ap_id(ap_id),
|
2018-02-21 14:21:40 -07:00
|
|
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do
|
2018-03-30 07:01:53 -06:00
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put(:info, Map.merge(user.info, data[:info]))
|
2018-02-21 14:21:40 -07:00
|
|
|
|
2018-03-07 12:19:48 -07:00
|
|
|
already_ap = User.ap_enabled?(user)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
|
|
{:ok, user} =
|
|
|
|
User.upgrade_changeset(user, data)
|
|
|
|
|> Repo.update()
|
2018-02-21 14:21:40 -07:00
|
|
|
|
2018-03-07 12:19:48 -07:00
|
|
|
if !already_ap do
|
|
|
|
# This could potentially take a long time, do it in the background
|
|
|
|
if async do
|
|
|
|
Task.start(fn ->
|
|
|
|
user_upgrade_task(user)
|
|
|
|
end)
|
|
|
|
else
|
2018-02-24 02:51:15 -07:00
|
|
|
user_upgrade_task(user)
|
2018-03-07 12:19:48 -07:00
|
|
|
end
|
2018-02-24 02:51:15 -07:00
|
|
|
end
|
2018-02-21 14:21:40 -07:00
|
|
|
|
|
|
|
{:ok, user}
|
|
|
|
else
|
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
2018-02-24 09:36:02 -07:00
|
|
|
|
|
|
|
def maybe_retire_websub(ap_id) do
|
|
|
|
# some sanity checks
|
2018-03-30 07:01:53 -06:00
|
|
|
if is_binary(ap_id) && String.length(ap_id) > 8 do
|
|
|
|
q =
|
|
|
|
from(
|
|
|
|
ws in Pleroma.Web.Websub.WebsubClientSubscription,
|
|
|
|
where: fragment("? like ?", ws.topic, ^"#{ap_id}%")
|
|
|
|
)
|
|
|
|
|
2018-02-24 09:36:02 -07:00
|
|
|
Repo.delete_all(q)
|
|
|
|
end
|
|
|
|
end
|
2018-05-19 01:30:02 -06:00
|
|
|
|
|
|
|
def maybe_fix_user_url(data) do
|
|
|
|
if is_map(data["url"]) do
|
2018-05-20 10:05:34 -06:00
|
|
|
Map.put(data, "url", data["url"]["href"])
|
|
|
|
else
|
|
|
|
data
|
2018-05-19 01:30:02 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_fix_user_object(data) do
|
|
|
|
data
|
|
|
|
|> maybe_fix_user_url
|
|
|
|
end
|
2018-02-15 12:00:06 -07:00
|
|
|
end
|