2018-12-23 13:04:54 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2018-12-31 08:41:47 -07:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-21 02:21:52 -06:00
|
|
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Activity
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Instances
|
|
|
|
alias Pleroma.Notification
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Object
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Repo
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Upload
|
|
|
|
alias Pleroma.User
|
|
|
|
alias Pleroma.Web.ActivityPub.MRF
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Web.Federator
|
|
|
|
alias Pleroma.Web.OStatus
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Web.WebFinger
|
2019-02-06 12:20:02 -07:00
|
|
|
|
2017-03-21 10:53:20 -06:00
|
|
|
import Ecto.Query
|
2017-05-16 07:31:11 -06:00
|
|
|
import Pleroma.Web.ActivityPub.Utils
|
2019-02-22 05:29:52 -07:00
|
|
|
import Pleroma.Web.ActivityPub.Visibility
|
2019-02-06 12:20:02 -07:00
|
|
|
|
2017-05-07 12:16:07 -06:00
|
|
|
require Logger
|
2017-03-21 02:21:52 -06:00
|
|
|
|
2018-02-11 09:20:02 -07:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2018-06-17 22:33:41 -06:00
|
|
|
# For Announce activities, we filter the recipients based on following status for any actors
|
|
|
|
# that match actual users. See issue #164 for more information about why this is necessary.
|
2018-08-29 02:37:36 -06:00
|
|
|
defp get_recipients(%{"type" => "Announce"} = data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
2018-06-17 22:33:41 -06:00
|
|
|
actor = User.get_cached_by_ap_id(data["actor"])
|
|
|
|
|
2019-02-06 13:19:35 -07:00
|
|
|
recipients =
|
|
|
|
(to ++ cc)
|
|
|
|
|> Enum.filter(fn recipient ->
|
|
|
|
case User.get_cached_by_ap_id(recipient) do
|
|
|
|
nil ->
|
|
|
|
true
|
|
|
|
|
|
|
|
user ->
|
|
|
|
User.following?(user, actor)
|
|
|
|
end
|
|
|
|
end)
|
2018-08-29 02:37:36 -06:00
|
|
|
|
|
|
|
{recipients, to, cc}
|
2018-06-17 22:33:41 -06:00
|
|
|
end
|
|
|
|
|
2019-01-18 12:40:52 -07:00
|
|
|
defp get_recipients(%{"type" => "Create"} = data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
|
|
|
actor = data["actor"] || []
|
|
|
|
recipients = (to ++ cc ++ [actor]) |> Enum.uniq()
|
|
|
|
{recipients, to, cc}
|
|
|
|
end
|
|
|
|
|
2018-08-29 02:37:36 -06:00
|
|
|
defp get_recipients(data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
|
|
|
recipients = to ++ cc
|
|
|
|
{recipients, to, cc}
|
2017-12-12 10:07:14 -07:00
|
|
|
end
|
|
|
|
|
2018-05-13 17:28:56 -06:00
|
|
|
defp check_actor_is_active(actor) do
|
2018-05-18 21:17:56 -06:00
|
|
|
if not is_nil(actor) do
|
|
|
|
with user <- User.get_cached_by_ap_id(actor),
|
2018-11-18 13:40:52 -07:00
|
|
|
false <- user.info.deactivated do
|
2018-05-18 21:17:56 -06:00
|
|
|
:ok
|
|
|
|
else
|
|
|
|
_e -> :reject
|
|
|
|
end
|
2018-05-13 17:28:56 -06:00
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-25 19:50:49 -07:00
|
|
|
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
|
2018-12-26 04:39:35 -07:00
|
|
|
limit = Pleroma.Config.get([:instance, :remote_limit])
|
|
|
|
String.length(content) <= limit
|
|
|
|
end
|
|
|
|
|
|
|
|
defp check_remote_limit(_), do: true
|
|
|
|
|
2019-03-03 03:21:03 -07:00
|
|
|
def increase_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
|
|
|
def decrease_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
2017-05-02 02:47:04 -06:00
|
|
|
def insert(map, local \\ true) when is_map(map) do
|
2018-06-18 15:21:03 -06:00
|
|
|
with nil <- Activity.normalize(map),
|
2017-05-16 07:31:11 -06:00
|
|
|
map <- lazy_put_activity_defaults(map),
|
2018-05-13 17:28:56 -06:00
|
|
|
:ok <- check_actor_is_active(map["actor"]),
|
2018-12-26 04:39:35 -07:00
|
|
|
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
|
2018-05-10 10:34:09 -06:00
|
|
|
{:ok, map} <- MRF.filter(map),
|
2018-04-28 08:10:24 -06:00
|
|
|
:ok <- insert_full_object(map) do
|
2018-08-29 12:38:30 -06:00
|
|
|
{recipients, _, _} = get_recipients(map)
|
2018-08-29 02:37:36 -06:00
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
{:ok, activity} =
|
|
|
|
Repo.insert(%Activity{
|
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
2018-08-29 12:38:30 -06:00
|
|
|
recipients: recipients
|
2018-03-30 07:01:53 -06:00
|
|
|
})
|
|
|
|
|
2019-01-27 23:07:18 -07:00
|
|
|
Task.start(fn ->
|
|
|
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
|
|
|
end)
|
|
|
|
|
2017-09-11 08:15:28 -06:00
|
|
|
Notification.create_notifications(activity)
|
2017-11-19 05:47:50 -07:00
|
|
|
stream_out(activity)
|
2017-09-11 08:15:28 -06:00
|
|
|
{:ok, activity}
|
2017-05-16 07:31:11 -06:00
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity}
|
|
|
|
error -> {:error, error}
|
2017-05-07 12:13:10 -06:00
|
|
|
end
|
2017-03-21 02:21:52 -06:00
|
|
|
end
|
2017-03-21 10:53:20 -06:00
|
|
|
|
2017-11-19 05:47:50 -07:00
|
|
|
def stream_out(activity) do
|
2018-05-13 13:33:59 -06:00
|
|
|
public = "https://www.w3.org/ns/activitystreams#Public"
|
|
|
|
|
2019-01-20 05:00:46 -07:00
|
|
|
if activity.data["type"] in ["Create", "Announce", "Delete"] do
|
2017-11-19 05:47:50 -07:00
|
|
|
Pleroma.Web.Streamer.stream("user", activity)
|
2018-05-30 07:33:37 -06:00
|
|
|
Pleroma.Web.Streamer.stream("list", activity)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-05-13 13:33:59 -06:00
|
|
|
if Enum.member?(activity.data["to"], public) do
|
|
|
|
Pleroma.Web.Streamer.stream("public", activity)
|
2018-05-10 20:17:59 -06:00
|
|
|
|
2018-05-13 13:33:59 -06:00
|
|
|
if activity.local do
|
|
|
|
Pleroma.Web.Streamer.stream("public:local", activity)
|
|
|
|
end
|
2018-06-17 07:01:27 -06:00
|
|
|
|
2019-01-20 05:00:46 -07:00
|
|
|
if activity.data["type"] in ["Create"] do
|
|
|
|
activity.data["object"]
|
|
|
|
|> Map.get("tag", [])
|
|
|
|
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
2019-02-03 10:44:18 -07:00
|
|
|
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
2018-08-07 14:45:40 -06:00
|
|
|
|
2019-01-20 05:00:46 -07:00
|
|
|
if activity.data["object"]["attachment"] != [] do
|
|
|
|
Pleroma.Web.Streamer.stream("public:media", activity)
|
2018-06-17 07:01:27 -06:00
|
|
|
|
2019-01-20 05:00:46 -07:00
|
|
|
if activity.local do
|
|
|
|
Pleroma.Web.Streamer.stream("public:local:media", activity)
|
|
|
|
end
|
2018-06-17 07:01:27 -06:00
|
|
|
end
|
|
|
|
end
|
2018-05-13 13:33:59 -06:00
|
|
|
else
|
|
|
|
if !Enum.member?(activity.data["cc"] || [], public) &&
|
|
|
|
!Enum.member?(
|
|
|
|
activity.data["to"],
|
|
|
|
User.get_by_ap_id(activity.data["actor"]).follower_address
|
2018-05-13 13:36:41 -06:00
|
|
|
),
|
|
|
|
do: Pleroma.Web.Streamer.stream("direct", activity)
|
2017-11-11 06:59:25 -07:00
|
|
|
end
|
2017-11-19 05:47:50 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-15 11:59:03 -07:00
|
|
|
def create(%{to: to, actor: actor, context: context, object: object} = params) do
|
|
|
|
additional = params[:additional] || %{}
|
2018-03-30 07:01:53 -06:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2018-02-15 11:59:03 -07:00
|
|
|
published = params[:published]
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
with create_data <-
|
|
|
|
make_create_data(
|
|
|
|
%{to: to, actor: actor, published: published, context: context, object: object},
|
|
|
|
additional
|
|
|
|
),
|
2017-11-19 05:47:50 -07:00
|
|
|
{:ok, activity} <- insert(create_data, local),
|
2019-03-04 21:37:33 -07:00
|
|
|
# Changing note count prior to enqueuing federation task in order to avoid
|
|
|
|
# race conditions on updating user.info
|
2019-03-03 03:21:03 -07:00
|
|
|
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
2019-01-21 04:16:51 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-04-24 10:46:34 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-21 02:21:52 -06:00
|
|
|
end
|
2017-03-21 10:53:20 -06:00
|
|
|
|
2018-02-17 08:08:55 -07:00
|
|
|
def accept(%{to: to, actor: actor, object: object} = params) do
|
2018-03-30 07:01:53 -06:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2018-02-17 08:08:55 -07:00
|
|
|
|
2019-02-09 16:26:29 -07:00
|
|
|
with data <- %{"to" => to, "type" => "Accept", "actor" => actor.ap_id, "object" => object},
|
2018-02-17 08:08:55 -07:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 08:39:37 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-02-17 08:08:55 -07:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-26 06:07:46 -06:00
|
|
|
def reject(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
2019-02-09 16:26:29 -07:00
|
|
|
with data <- %{"to" => to, "type" => "Reject", "actor" => actor.ap_id, "object" => object},
|
2018-05-26 06:07:46 -06:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 08:39:37 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-05-26 06:07:46 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-25 08:14:25 -07:00
|
|
|
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
|
2018-03-30 07:01:53 -06:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"type" => "Update",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object
|
|
|
|
},
|
2018-02-25 08:14:25 -07:00
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
2018-03-30 07:01:53 -06:00
|
|
|
def like(
|
|
|
|
%User{ap_id: ap_id} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
|
|
|
) do
|
2017-05-16 07:31:11 -06:00
|
|
|
with nil <- get_existing_like(ap_id, object),
|
|
|
|
like_data <- make_like_data(user, object, activity_id),
|
|
|
|
{:ok, activity} <- insert(like_data, local),
|
|
|
|
{:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity, object}
|
|
|
|
error -> {:error, error}
|
2017-04-14 07:07:24 -06:00
|
|
|
end
|
2017-04-13 07:50:05 -06:00
|
|
|
end
|
|
|
|
|
2018-05-19 07:22:43 -06:00
|
|
|
def unlike(
|
|
|
|
%User{} = actor,
|
|
|
|
%Object{} = object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
|
|
|
) do
|
|
|
|
with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
unlike_data <- make_unlike_data(actor, like_activity, activity_id),
|
|
|
|
{:ok, unlike_activity} <- insert(unlike_data, local),
|
|
|
|
{:ok, _activity} <- Repo.delete(like_activity),
|
|
|
|
{:ok, object} <- remove_like_from_object(like_activity, object),
|
|
|
|
:ok <- maybe_federate(unlike_activity) do
|
|
|
|
{:ok, unlike_activity, like_activity, object}
|
2018-03-30 07:01:53 -06:00
|
|
|
else
|
|
|
|
_e -> {:ok, object}
|
2017-04-14 10:08:47 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def announce(
|
|
|
|
%User{ap_id: _} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id \\ nil,
|
2019-01-17 16:12:42 -07:00
|
|
|
local \\ true,
|
|
|
|
public \\ true
|
2018-03-30 07:01:53 -06:00
|
|
|
) do
|
2018-02-18 07:58:18 -07:00
|
|
|
with true <- is_public?(object),
|
2019-01-17 16:12:42 -07:00
|
|
|
announce_data <- make_announce_data(user, object, activity_id, public),
|
2017-05-16 07:31:11 -06:00
|
|
|
{:ok, activity} <- insert(announce_data, local),
|
|
|
|
{:ok, object} <- add_announce_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
error -> {:error, error}
|
|
|
|
end
|
2017-03-23 16:34:10 -06:00
|
|
|
end
|
|
|
|
|
2018-04-22 19:28:51 -06:00
|
|
|
def unannounce(
|
|
|
|
%User{} = actor,
|
|
|
|
%Object{} = object,
|
2018-05-08 21:59:36 -06:00
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
2018-04-22 19:28:51 -06:00
|
|
|
) do
|
2018-05-08 19:52:21 -06:00
|
|
|
with %Activity{} = announce_activity <- get_existing_announce(actor.ap_id, object),
|
|
|
|
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
2018-04-18 01:39:42 -06:00
|
|
|
{:ok, unannounce_activity} <- insert(unannounce_data, local),
|
2018-05-08 19:52:21 -06:00
|
|
|
:ok <- maybe_federate(unannounce_activity),
|
|
|
|
{:ok, _activity} <- Repo.delete(announce_activity),
|
|
|
|
{:ok, object} <- remove_announce_from_object(announce_activity, object) do
|
2018-06-13 19:29:55 -06:00
|
|
|
{:ok, unannounce_activity, object}
|
2018-04-14 01:39:16 -06:00
|
|
|
else
|
|
|
|
_e -> {:ok, object}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
def follow(follower, followed, activity_id \\ nil, local \\ true) do
|
|
|
|
with data <- make_follow_data(follower, followed, activity_id),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 08:39:37 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 07:31:11 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 10:56:49 -06:00
|
|
|
end
|
|
|
|
|
2018-05-20 19:01:14 -06:00
|
|
|
def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
|
2017-05-16 07:31:11 -06:00
|
|
|
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
|
2018-05-27 07:51:13 -06:00
|
|
|
{:ok, follow_activity} <- update_follow_state(follow_activity, "cancelled"),
|
2018-05-20 19:01:14 -06:00
|
|
|
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
|
2017-05-16 07:31:11 -06:00
|
|
|
{:ok, activity} <- insert(unfollow_data, local),
|
2019-03-03 08:39:37 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 07:31:11 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 16:34:10 -06:00
|
|
|
end
|
|
|
|
|
2017-09-04 10:47:33 -06:00
|
|
|
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
|
|
|
|
user = User.get_cached_by_ap_id(actor)
|
2019-03-14 11:43:14 -06:00
|
|
|
to = (object.data["to"] || []) ++ (object.data["cc"] || [])
|
2019-03-04 02:47:04 -07:00
|
|
|
|
2019-03-09 04:12:15 -07:00
|
|
|
with {:ok, object, activity} <- Object.delete(object),
|
|
|
|
data <- %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => id,
|
|
|
|
"to" => to,
|
|
|
|
"deleted_activity_id" => activity && activity.id
|
|
|
|
},
|
2017-09-04 12:47:43 -06:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-04 21:37:33 -07:00
|
|
|
# Changing note count prior to enqueuing federation task in order to avoid
|
|
|
|
# race conditions on updating user.info
|
2019-03-03 03:21:03 -07:00
|
|
|
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
2019-01-21 04:16:51 -07:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-09-04 10:47:33 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-20 19:01:14 -06:00
|
|
|
def block(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2018-06-23 15:32:00 -06:00
|
|
|
ap_config = Application.get_env(:pleroma, :activitypub)
|
|
|
|
unfollow_blocked = Keyword.get(ap_config, :unfollow_blocked)
|
|
|
|
outgoing_blocks = Keyword.get(ap_config, :outgoing_blocks)
|
2018-05-18 16:09:56 -06:00
|
|
|
|
2018-06-23 15:27:07 -06:00
|
|
|
with true <- unfollow_blocked do
|
2018-06-08 18:12:16 -06:00
|
|
|
follow_activity = fetch_latest_follow(blocker, blocked)
|
2018-06-08 22:28:11 -06:00
|
|
|
|
2018-06-08 18:12:16 -06:00
|
|
|
if follow_activity do
|
|
|
|
unfollow(blocker, blocked, nil, local)
|
|
|
|
end
|
2018-05-18 16:09:56 -06:00
|
|
|
end
|
|
|
|
|
2018-06-25 00:05:44 -06:00
|
|
|
with true <- outgoing_blocks,
|
|
|
|
block_data <- make_block_data(blocker, blocked, activity_id),
|
2018-05-18 16:09:56 -06:00
|
|
|
{:ok, activity} <- insert(block_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
2018-06-08 18:12:16 -06:00
|
|
|
else
|
2018-06-08 20:07:14 -06:00
|
|
|
_e -> {:ok, nil}
|
2018-05-18 16:09:56 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-20 19:01:14 -06:00
|
|
|
def unblock(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2018-05-18 16:09:56 -06:00
|
|
|
with %Activity{} = block_activity <- fetch_latest_block(blocker, blocked),
|
2018-05-20 19:01:14 -06:00
|
|
|
unblock_data <- make_unblock_data(blocker, blocked, block_activity, activity_id),
|
2018-05-18 16:09:56 -06:00
|
|
|
{:ok, activity} <- insert(unblock_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-20 09:51:25 -07:00
|
|
|
def flag(
|
|
|
|
%{
|
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content
|
|
|
|
} = params
|
|
|
|
) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2019-03-14 13:29:47 -06:00
|
|
|
forward = !(params[:forward] == false)
|
|
|
|
|
|
|
|
additional = params[:additional] || %{}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
2019-03-14 13:29:47 -06:00
|
|
|
params = %{
|
2019-02-20 09:51:25 -07:00
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content
|
|
|
|
}
|
2019-03-14 13:29:47 -06:00
|
|
|
|
|
|
|
additional =
|
|
|
|
if forward do
|
|
|
|
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
|
|
|
|
else
|
2019-03-14 13:52:08 -06:00
|
|
|
Map.merge(additional, %{"to" => [], "cc" => []})
|
2019-03-14 13:29:47 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
with flag_data <- make_flag_data(params, additional),
|
|
|
|
{:ok, activity} <- insert(flag_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-03-14 13:38:46 -06:00
|
|
|
Enum.each(User.all_superusers(), fn superuser ->
|
|
|
|
superuser
|
|
|
|
|> Pleroma.AdminEmail.report(actor, account, statuses, content)
|
|
|
|
|> Pleroma.Mailer.deliver_async()
|
|
|
|
end)
|
|
|
|
|
2019-03-14 13:29:47 -06:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2019-02-20 09:51:25 -07:00
|
|
|
end
|
|
|
|
|
2017-11-02 15:44:36 -06:00
|
|
|
def fetch_activities_for_context(context, opts \\ %{}) do
|
2018-02-18 12:52:07 -07:00
|
|
|
public = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
recipients =
|
|
|
|
if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
|
|
|
|
|
|
|
|
query = from(activity in Activity)
|
|
|
|
|
|
|
|
query =
|
|
|
|
query
|
2018-02-18 07:50:34 -07:00
|
|
|
|> restrict_blocked(opts)
|
2018-02-18 12:52:07 -07:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
query =
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"?->>'type' = ? and ?->>'context' = ?",
|
|
|
|
activity.data,
|
|
|
|
"Create",
|
|
|
|
activity.data,
|
|
|
|
^context
|
|
|
|
),
|
|
|
|
order_by: [desc: :id]
|
|
|
|
)
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
Repo.all(query)
|
2017-03-23 14:22:49 -06:00
|
|
|
end
|
|
|
|
|
2017-03-21 13:31:48 -06:00
|
|
|
def fetch_public_activities(opts \\ %{}) do
|
2018-02-26 02:09:30 -07:00
|
|
|
q = fetch_activities_query(["https://www.w3.org/ns/activitystreams#Public"], opts)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-18 07:32:11 -07:00
|
|
|
q
|
2018-05-13 04:38:13 -06:00
|
|
|
|> restrict_unlisted()
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
2017-03-22 07:45:17 -06:00
|
|
|
end
|
|
|
|
|
2018-05-10 20:17:33 -06:00
|
|
|
@valid_visibilities ~w[direct unlisted public private]
|
|
|
|
|
2019-02-28 23:37:29 -07:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when is_list(visibility) do
|
|
|
|
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
|
|
|
query =
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"activity_visibility(?, ?, ?) = ANY (?)",
|
|
|
|
a.actor,
|
|
|
|
a.recipients,
|
|
|
|
a.data,
|
|
|
|
^visibility
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
Ecto.Adapters.SQL.to_sql(:all, Repo, query)
|
|
|
|
|
|
|
|
query
|
|
|
|
else
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-09 08:45:09 -07:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when visibility in @valid_visibilities do
|
|
|
|
query =
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
|
|
|
|
)
|
|
|
|
|
|
|
|
Ecto.Adapters.SQL.to_sql(:all, Repo, query)
|
2018-05-10 20:17:33 -06:00
|
|
|
|
2019-01-09 08:45:09 -07:00
|
|
|
query
|
2018-05-10 20:17:33 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(_query, %{visibility: visibility})
|
|
|
|
when visibility not in @valid_visibilities do
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(query, _visibility), do: query
|
|
|
|
|
2018-05-20 08:15:18 -06:00
|
|
|
def fetch_user_activities(user, reading_user, params \\ %{}) do
|
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("type", ["Create", "Announce"])
|
|
|
|
|> Map.put("actor_id", user.ap_id)
|
|
|
|
|> Map.put("whole_db", true)
|
2019-01-07 06:45:33 -07:00
|
|
|
|> Map.put("pinned_activity_ids", user.info.pinned_activities)
|
2018-05-20 08:15:18 -06:00
|
|
|
|
|
|
|
recipients =
|
|
|
|
if reading_user do
|
|
|
|
["https://www.w3.org/ns/activitystreams#Public"] ++
|
|
|
|
[reading_user.ap_id | reading_user.following]
|
|
|
|
else
|
|
|
|
["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
end
|
|
|
|
|
|
|
|
fetch_activities(recipients, params)
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2019-01-15 08:39:23 -07:00
|
|
|
defp restrict_since(query, %{"since_id" => ""}), do: query
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_since(query, %{"since_id" => since_id}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: activity.id > ^since_id)
|
2017-05-16 07:31:11 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_since(query, _), do: query
|
2017-03-21 10:53:20 -06:00
|
|
|
|
2019-01-10 08:44:28 -07:00
|
|
|
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
|
|
|
|
when is_list(tag_reject) and tag_reject != [] do
|
2018-12-19 09:21:35 -07:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(\(not \(? #> '{"object","tag"}'\) \\?| ?\)), activity.data, ^tag_reject)
|
2018-12-21 10:24:13 -07:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-10 08:44:28 -07:00
|
|
|
defp restrict_tag_reject(query, _), do: query
|
|
|
|
|
|
|
|
defp restrict_tag_all(query, %{"tag_all" => tag_all})
|
|
|
|
when is_list(tag_all) and tag_all != [] do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?& ?), activity.data, ^tag_all)
|
2019-01-10 08:44:28 -07:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_tag_all(query, _), do: query
|
|
|
|
|
2018-12-21 10:24:13 -07:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?| ?), activity.data, ^tag)
|
2018-12-19 09:21:35 -07:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-12-21 10:24:13 -07:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(? <@ (? #> '{"object","tag"}'\)), ^tag, activity.data)
|
2018-03-30 07:01:53 -06:00
|
|
|
)
|
2017-09-14 05:22:09 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-14 05:22:09 -06:00
|
|
|
defp restrict_tag(query, _), do: query
|
|
|
|
|
2018-08-29 02:51:23 -06:00
|
|
|
defp restrict_to_cc(query, recipients_to, recipients_cc) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
2018-08-29 12:32:04 -06:00
|
|
|
"(?->'to' \\?| ?) or (?->'cc' \\?| ?)",
|
|
|
|
activity.data,
|
2018-08-29 02:51:23 -06:00
|
|
|
^recipients_to,
|
2018-08-29 12:32:04 -06:00
|
|
|
activity.data,
|
|
|
|
^recipients_cc
|
2018-08-29 02:51:23 -06:00
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-05-04 15:16:02 -06:00
|
|
|
defp restrict_recipients(query, [], _user), do: query
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-18 07:20:36 -07:00
|
|
|
defp restrict_recipients(query, recipients, nil) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
|
2017-03-21 10:53:20 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-18 07:20:36 -07:00
|
|
|
defp restrict_recipients(query, recipients, user) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(
|
|
|
|
activity in query,
|
2018-02-18 07:20:36 -07:00
|
|
|
where: fragment("? && ?", ^recipients, activity.recipients),
|
|
|
|
or_where: activity.actor == ^user.ap_id
|
2018-03-30 07:01:53 -06:00
|
|
|
)
|
2018-02-18 07:20:36 -07:00
|
|
|
end
|
2017-03-23 17:09:08 -06:00
|
|
|
|
2018-03-21 22:47:18 -06:00
|
|
|
defp restrict_limit(query, %{"limit" => limit}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, limit: ^limit)
|
2018-03-21 22:47:18 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-03-21 22:47:18 -06:00
|
|
|
defp restrict_limit(query, _), do: query
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_local(query, %{"local_only" => true}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: activity.local == true)
|
2017-04-15 04:11:20 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_local(query, _), do: query
|
2017-04-15 04:11:20 -06:00
|
|
|
|
2019-01-15 08:39:23 -07:00
|
|
|
defp restrict_max(query, %{"max_id" => ""}), do: query
|
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_max(query, %{"max_id" => max_id}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: activity.id < ^max_id)
|
2017-05-07 11:28:23 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_max(query, _), do: query
|
2017-05-07 11:28:23 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: activity.actor == ^actor_id)
|
2017-05-07 11:28:23 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
defp restrict_actor(query, _), do: query
|
2017-05-07 11:28:23 -06:00
|
|
|
|
2017-09-17 06:20:54 -06:00
|
|
|
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
|
2019-02-04 15:47:29 -07:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ?", activity.data, ^type))
|
2017-09-17 06:20:54 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-09 04:05:17 -06:00
|
|
|
defp restrict_type(query, %{"type" => type}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
|
2017-09-09 04:05:17 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-09 04:05:17 -06:00
|
|
|
defp restrict_type(query, _), do: query
|
|
|
|
|
2017-09-17 05:09:49 -06:00
|
|
|
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(? <@ (? #> '{"object","likes"}'\)), ^ap_id, activity.data)
|
2018-03-30 07:01:53 -06:00
|
|
|
)
|
2017-09-17 05:09:49 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-17 05:09:49 -06:00
|
|
|
defp restrict_favorited_by(query, _), do: query
|
|
|
|
|
2017-11-14 06:50:23 -07:00
|
|
|
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
2018-03-30 07:01:53 -06:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-04 21:44:53 -07:00
|
|
|
where: fragment(~s(not (? #> '{"object","attachment"}' = ?\)), activity.data, ^[])
|
2018-03-30 07:01:53 -06:00
|
|
|
)
|
2017-11-14 06:41:16 -07:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-11-14 06:41:16 -07:00
|
|
|
defp restrict_media(query, _), do: query
|
|
|
|
|
2018-06-17 21:18:39 -06:00
|
|
|
defp restrict_replies(query, %{"exclude_replies" => val}) when val == "true" or val == "1" do
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("?->'object'->>'inReplyTo' is null", activity.data)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_replies(query, _), do: query
|
|
|
|
|
2018-12-26 22:30:01 -07:00
|
|
|
defp restrict_reblogs(query, %{"exclude_reblogs" => val}) when val == "true" or val == "1" do
|
|
|
|
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_reblogs(query, _), do: query
|
|
|
|
|
2019-02-27 08:37:42 -07:00
|
|
|
defp restrict_muted(query, %{"with_muted" => val}) when val in [true, "true", "1"], do: query
|
|
|
|
|
2018-09-05 14:49:15 -06:00
|
|
|
defp restrict_muted(query, %{"muting_user" => %User{info: info}}) do
|
2019-02-19 13:09:16 -07:00
|
|
|
mutes = info.mutes
|
2018-09-05 14:49:15 -06:00
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
|
|
|
|
where: fragment("not (?->'to' \\?| ?)", activity.data, ^mutes)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted(query, _), do: query
|
|
|
|
|
2017-11-02 15:47:11 -06:00
|
|
|
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
|
2018-11-20 12:12:39 -07:00
|
|
|
blocks = info.blocks || []
|
|
|
|
domain_blocks = info.domain_blocks || []
|
2018-04-28 08:10:24 -06:00
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
2018-04-14 05:26:20 -06:00
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
|
2018-06-03 14:33:33 -06:00
|
|
|
where: fragment("not (?->'to' \\?| ?)", activity.data, ^blocks),
|
2018-06-06 12:13:40 -06:00
|
|
|
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks)
|
2018-04-14 05:26:20 -06:00
|
|
|
)
|
2017-11-02 15:37:26 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-11-02 15:37:26 -06:00
|
|
|
defp restrict_blocked(query, _), do: query
|
|
|
|
|
2018-04-28 20:53:19 -06:00
|
|
|
defp restrict_unlisted(query) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2018-05-13 02:56:44 -06:00
|
|
|
where:
|
|
|
|
fragment(
|
2018-05-13 03:58:03 -06:00
|
|
|
"not (coalesce(?->'cc', '{}'::jsonb) \\?| ?)",
|
2018-05-13 02:56:44 -06:00
|
|
|
activity.data,
|
|
|
|
^["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
)
|
2018-04-28 20:53:19 -06:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-07 06:45:33 -07:00
|
|
|
defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids}) do
|
|
|
|
from(activity in query, where: activity.id in ^ids)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_pinned(query, _), do: query
|
|
|
|
|
2019-03-11 09:57:54 -06:00
|
|
|
defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do
|
|
|
|
muted_reblogs = info.muted_reblogs || []
|
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("not ?->>'type' = 'Announce'", activity.data),
|
|
|
|
where: fragment("not ? = ANY(?)", activity.actor, ^muted_reblogs)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted_reblogs(query, _), do: query
|
|
|
|
|
2018-02-18 07:32:11 -07:00
|
|
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
2018-03-30 07:01:53 -06:00
|
|
|
base_query =
|
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
limit: 20,
|
|
|
|
order_by: [fragment("? desc nulls last", activity.id)]
|
|
|
|
)
|
2017-03-28 18:05:51 -06:00
|
|
|
|
2017-05-16 07:31:11 -06:00
|
|
|
base_query
|
2018-02-18 07:20:36 -07:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
2017-09-14 05:22:09 -06:00
|
|
|
|> restrict_tag(opts)
|
2019-01-10 08:44:28 -07:00
|
|
|
|> restrict_tag_reject(opts)
|
|
|
|
|> restrict_tag_all(opts)
|
2017-05-16 07:31:11 -06:00
|
|
|
|> restrict_since(opts)
|
|
|
|
|> restrict_local(opts)
|
2018-03-21 22:47:18 -06:00
|
|
|
|> restrict_limit(opts)
|
2017-05-16 07:31:11 -06:00
|
|
|
|> restrict_max(opts)
|
|
|
|
|> restrict_actor(opts)
|
2017-09-09 04:05:17 -06:00
|
|
|
|> restrict_type(opts)
|
2017-09-17 05:09:49 -06:00
|
|
|
|> restrict_favorited_by(opts)
|
2017-11-02 15:37:26 -06:00
|
|
|
|> restrict_blocked(opts)
|
2018-09-05 14:49:15 -06:00
|
|
|
|> restrict_muted(opts)
|
2017-11-14 06:41:16 -07:00
|
|
|
|> restrict_media(opts)
|
2018-05-10 20:17:33 -06:00
|
|
|
|> restrict_visibility(opts)
|
2018-06-17 21:18:39 -06:00
|
|
|
|> restrict_replies(opts)
|
2018-12-26 22:30:01 -07:00
|
|
|
|> restrict_reblogs(opts)
|
2019-01-07 06:45:33 -07:00
|
|
|
|> restrict_pinned(opts)
|
2019-03-11 09:57:54 -06:00
|
|
|
|> restrict_muted_reblogs(opts)
|
2018-02-18 07:32:11 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_activities(recipients, opts \\ %{}) do
|
|
|
|
fetch_activities_query(recipients, opts)
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
2017-04-21 10:54:21 -06:00
|
|
|
end
|
|
|
|
|
2018-08-29 02:51:23 -06:00
|
|
|
def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do
|
|
|
|
fetch_activities_query([], opts)
|
|
|
|
|> restrict_to_cc(recipients_to, recipients_cc)
|
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2018-11-23 09:40:45 -07:00
|
|
|
def upload(file, opts \\ []) do
|
|
|
|
with {:ok, data} <- Upload.store(file, opts) do
|
2018-12-06 00:26:17 -07:00
|
|
|
obj_data =
|
|
|
|
if opts[:actor] do
|
|
|
|
Map.put(data, "actor", opts[:actor])
|
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2018-12-05 03:37:06 -07:00
|
|
|
Repo.insert(%Object{data: obj_data})
|
2018-10-29 10:30:12 -06:00
|
|
|
end
|
2017-03-28 18:05:51 -06:00
|
|
|
end
|
2017-12-12 10:07:14 -07:00
|
|
|
|
2018-02-25 08:14:25 -07:00
|
|
|
def user_data_from_user_object(data) do
|
2018-03-30 07:01:53 -06:00
|
|
|
avatar =
|
|
|
|
data["icon"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["icon"]["url"]}]
|
|
|
|
}
|
|
|
|
|
|
|
|
banner =
|
|
|
|
data["image"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["image"]["url"]}]
|
|
|
|
}
|
2018-02-25 08:14:25 -07:00
|
|
|
|
2018-05-24 22:15:42 -06:00
|
|
|
locked = data["manuallyApprovesFollowers"] || false
|
2018-05-19 01:30:02 -06:00
|
|
|
data = Transmogrifier.maybe_fix_user_object(data)
|
|
|
|
|
2018-02-25 08:14:25 -07:00
|
|
|
user_data = %{
|
|
|
|
ap_id: data["id"],
|
|
|
|
info: %{
|
|
|
|
"ap_enabled" => true,
|
|
|
|
"source_data" => data,
|
2018-05-24 22:15:42 -06:00
|
|
|
"banner" => banner,
|
|
|
|
"locked" => locked
|
2018-02-25 08:14:25 -07:00
|
|
|
},
|
|
|
|
avatar: avatar,
|
|
|
|
name: data["name"],
|
|
|
|
follower_address: data["followers"],
|
|
|
|
bio: data["summary"]
|
|
|
|
}
|
|
|
|
|
2018-08-06 00:50:18 -06:00
|
|
|
# nickname can be nil because of virtual actors
|
|
|
|
user_data =
|
|
|
|
if data["preferredUsername"] do
|
2018-08-06 02:26:36 -06:00
|
|
|
Map.put(
|
|
|
|
user_data,
|
|
|
|
:nickname,
|
|
|
|
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
|
|
|
|
)
|
2018-08-06 00:50:18 -06:00
|
|
|
else
|
|
|
|
Map.put(user_data, :nickname, nil)
|
|
|
|
end
|
|
|
|
|
2018-02-25 08:14:25 -07:00
|
|
|
{:ok, user_data}
|
|
|
|
end
|
|
|
|
|
2018-02-21 14:21:40 -07:00
|
|
|
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
2018-11-17 13:13:38 -07:00
|
|
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(ap_id) do
|
2018-02-25 08:14:25 -07:00
|
|
|
user_data_from_user_object(data)
|
2018-02-23 07:00:19 -07:00
|
|
|
else
|
2018-03-19 12:18:52 -06:00
|
|
|
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
2018-02-21 14:21:40 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_user_from_ap_id(ap_id) do
|
2018-05-04 15:16:02 -06:00
|
|
|
if _user = User.get_by_ap_id(ap_id) do
|
2018-02-21 14:21:40 -07:00
|
|
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
|
|
|
else
|
|
|
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
|
|
|
User.insert_or_update_user(data)
|
2018-02-18 15:11:31 -07:00
|
|
|
else
|
2018-02-25 08:52:33 -07:00
|
|
|
e -> {:error, e}
|
2018-02-18 15:11:31 -07:00
|
|
|
end
|
2018-02-11 09:20:02 -07:00
|
|
|
end
|
|
|
|
end
|
2018-02-11 12:43:33 -07:00
|
|
|
|
2018-02-18 04:27:05 -07:00
|
|
|
def make_user_from_nickname(nickname) do
|
|
|
|
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
|
|
|
|
make_user_from_ap_id(ap_id)
|
2018-02-25 08:52:33 -07:00
|
|
|
else
|
2018-03-19 11:56:49 -06:00
|
|
|
_e -> {:error, "No AP id in WebFinger"}
|
2018-02-18 04:27:05 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-06-06 21:26:41 -06:00
|
|
|
def should_federate?(inbox, public) do
|
|
|
|
if public do
|
|
|
|
true
|
|
|
|
else
|
|
|
|
inbox_info = URI.parse(inbox)
|
2018-11-06 11:34:57 -07:00
|
|
|
!Enum.member?(Pleroma.Config.get([:instance, :quarantined_instances], []), inbox_info.host)
|
2018-06-06 21:26:41 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-11 12:43:33 -07:00
|
|
|
def publish(actor, activity) do
|
2019-01-24 09:15:23 -07:00
|
|
|
remote_followers =
|
2018-03-30 07:01:53 -06:00
|
|
|
if actor.follower_address in activity.recipients do
|
|
|
|
{:ok, followers} = User.get_followers(actor)
|
|
|
|
followers |> Enum.filter(&(!&1.local))
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
2018-02-17 08:08:55 -07:00
|
|
|
|
2018-06-06 21:26:41 -06:00
|
|
|
public = is_public?(activity)
|
|
|
|
|
2018-02-17 06:11:20 -07:00
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
2018-03-27 08:45:38 -06:00
|
|
|
json = Jason.encode!(data)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-02-04 06:50:28 -07:00
|
|
|
(Pleroma.Web.Salmon.remote_users(activity) ++ remote_followers)
|
2019-01-28 08:17:17 -07:00
|
|
|
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
|
|
|
|
|> Enum.map(fn %{info: %{source_data: data}} ->
|
|
|
|
(is_map(data["endpoints"]) && Map.get(data["endpoints"], "sharedInbox")) || data["inbox"]
|
|
|
|
end)
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|
2019-02-04 06:50:28 -07:00
|
|
|
|> Instances.filter_reachable()
|
|
|
|
|> Enum.each(fn {inbox, unreachable_since} ->
|
2019-01-28 08:17:17 -07:00
|
|
|
Federator.publish_single_ap(%{
|
2018-03-30 07:01:53 -06:00
|
|
|
inbox: inbox,
|
|
|
|
json: json,
|
|
|
|
actor: actor,
|
2019-02-03 02:41:27 -07:00
|
|
|
id: activity.data["id"],
|
|
|
|
unreachable_since: unreachable_since
|
2018-03-30 07:01:53 -06:00
|
|
|
})
|
|
|
|
end)
|
2018-02-11 12:43:33 -07:00
|
|
|
end
|
2018-02-17 13:56:33 -07:00
|
|
|
|
2019-02-03 02:41:27 -07:00
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id} = params) do
|
2018-02-20 00:51:19 -07:00
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
|
|
|
host = URI.parse(inbox).host
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-07-31 15:41:18 -06:00
|
|
|
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
|
|
|
|
|
2019-02-20 17:23:17 -07:00
|
|
|
date =
|
|
|
|
NaiveDateTime.utc_now()
|
2019-03-01 05:21:09 -07:00
|
|
|
|> Timex.format!("{WDshort}, {0D} {Mshort} {YYYY} {h24}:{m}:{s} GMT")
|
2019-02-20 17:23:17 -07:00
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
signature =
|
2018-07-31 15:41:18 -06:00
|
|
|
Pleroma.Web.HTTPSignatures.sign(actor, %{
|
|
|
|
host: host,
|
|
|
|
"content-length": byte_size(json),
|
2019-02-20 17:23:17 -07:00
|
|
|
digest: digest,
|
|
|
|
date: date
|
2018-07-31 15:41:18 -06:00
|
|
|
})
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-01-24 01:54:52 -07:00
|
|
|
with {:ok, %{status: code}} when code in 200..299 <-
|
2019-01-23 08:37:25 -07:00
|
|
|
result =
|
|
|
|
@httpoison.post(
|
|
|
|
inbox,
|
|
|
|
json,
|
|
|
|
[
|
|
|
|
{"Content-Type", "application/activity+json"},
|
2019-02-20 17:23:17 -07:00
|
|
|
{"Date", date},
|
2019-01-23 08:37:25 -07:00
|
|
|
{"signature", signature},
|
2019-01-29 03:12:28 -07:00
|
|
|
{"digest", digest}
|
2019-01-23 08:37:25 -07:00
|
|
|
]
|
|
|
|
) do
|
2019-02-03 03:28:13 -07:00
|
|
|
if !Map.has_key?(params, :unreachable_since) || params[:unreachable_since],
|
|
|
|
do: Instances.set_reachable(inbox)
|
|
|
|
|
2019-01-23 08:37:25 -07:00
|
|
|
result
|
|
|
|
else
|
2019-01-28 05:25:06 -07:00
|
|
|
{_post_result, response} ->
|
2019-02-03 02:41:27 -07:00
|
|
|
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
|
2019-01-28 05:25:06 -07:00
|
|
|
{:error, response}
|
2019-01-23 08:37:25 -07:00
|
|
|
end
|
2018-02-20 00:51:19 -07:00
|
|
|
end
|
|
|
|
|
2018-02-18 03:24:54 -07:00
|
|
|
# TODO:
|
|
|
|
# This will create a Create activity, which we need internally at the moment.
|
2018-02-17 13:56:33 -07:00
|
|
|
def fetch_object_from_id(id) do
|
|
|
|
if object = Object.get_cached_by_ap_id(id) do
|
|
|
|
{:ok, object}
|
|
|
|
else
|
2018-11-17 13:02:02 -07:00
|
|
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
2018-06-18 15:01:33 -06:00
|
|
|
nil <- Object.normalize(data),
|
2018-03-30 07:01:53 -06:00
|
|
|
params <- %{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2018-11-17 11:17:17 -07:00
|
|
|
"actor" => data["actor"] || data["attributedTo"],
|
2018-03-30 07:01:53 -06:00
|
|
|
"object" => data
|
|
|
|
},
|
2018-09-01 17:20:02 -06:00
|
|
|
:ok <- Transmogrifier.contain_origin(id, params),
|
2018-02-21 07:22:24 -07:00
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
2018-06-18 15:01:33 -06:00
|
|
|
{:ok, Object.normalize(activity.data["object"])}
|
2018-02-18 03:24:54 -07:00
|
|
|
else
|
2018-09-29 23:25:28 -06:00
|
|
|
{:error, {:reject, nil}} ->
|
2018-09-27 18:44:15 -06:00
|
|
|
{:reject, nil}
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
object = %Object{} ->
|
|
|
|
{:ok, object}
|
|
|
|
|
2018-05-04 15:16:02 -06:00
|
|
|
_e ->
|
2018-02-23 07:00:19 -07:00
|
|
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-02-23 07:00:19 -07:00
|
|
|
case OStatus.fetch_activity_from_url(id) do
|
2018-06-18 15:01:33 -06:00
|
|
|
{:ok, [activity | _]} -> {:ok, Object.normalize(activity.data["object"])}
|
2018-02-23 07:00:19 -07:00
|
|
|
e -> e
|
|
|
|
end
|
2018-02-17 13:56:33 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-02-18 07:50:34 -07:00
|
|
|
|
2018-11-17 13:02:02 -07:00
|
|
|
def fetch_and_contain_remote_object_from_id(id) do
|
2019-02-14 12:42:33 -07:00
|
|
|
Logger.info("Fetching object #{id} via AP")
|
2018-11-17 13:02:02 -07:00
|
|
|
|
|
|
|
with true <- String.starts_with?(id, "http"),
|
2018-12-02 07:08:36 -07:00
|
|
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
2018-11-17 13:02:02 -07:00
|
|
|
@httpoison.get(
|
|
|
|
id,
|
2018-12-06 02:41:29 -07:00
|
|
|
[{:Accept, "application/activity+json"}]
|
2018-11-17 13:02:02 -07:00
|
|
|
),
|
|
|
|
{:ok, data} <- Jason.decode(body),
|
|
|
|
:ok <- Transmogrifier.contain_origin_from_id(id, data) do
|
|
|
|
{:ok, data}
|
|
|
|
else
|
|
|
|
e ->
|
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-26 00:16:51 -06:00
|
|
|
# filter out broken threads
|
|
|
|
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
|
|
|
entire_thread_visible_for_user?(activity, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
# do post-processing on a specific activity
|
|
|
|
def contain_activity(%Activity{} = activity, %User{} = user) do
|
2019-03-11 09:57:54 -06:00
|
|
|
contain_broken_threads(activity, user)
|
2018-10-26 00:16:51 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
# do post-processing on a timeline
|
|
|
|
def contain_timeline(timeline, user) do
|
|
|
|
timeline
|
|
|
|
|> Enum.filter(fn activity ->
|
|
|
|
contain_activity(activity, user)
|
|
|
|
end)
|
|
|
|
end
|
2017-03-21 02:21:52 -06:00
|
|
|
end
|