akkoma/lib/pleroma/web/federator/federator.ex

161 lines
4.4 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 08:41:47 -07:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Federator do
2018-02-21 00:51:50 -07:00
alias Pleroma.Activity
2019-04-17 05:52:01 -06:00
alias Pleroma.Object.Containment
2019-02-09 08:16:26 -07:00
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
2018-02-21 00:51:03 -07:00
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
2019-05-11 21:57:10 -06:00
alias Pleroma.Web.Federator.Publisher
2018-08-26 12:17:13 -06:00
alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub
2019-02-06 12:20:02 -07:00
require Logger
def init do
2019-01-28 08:17:17 -07:00
# 1 minute
2019-03-04 18:30:19 -07:00
Process.sleep(1000 * 60)
2019-01-28 08:17:17 -07:00
refresh_subscriptions()
2018-05-07 10:11:37 -06:00
end
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
2019-06-30 06:58:50 -06:00
def allowed_incoming_reply_depth?(depth) do
max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
if max_replies_depth do
(depth || 1) <= max_replies_depth
else
true
end
end
2019-01-28 08:17:17 -07:00
# Client API
def incoming_doc(doc) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
2019-01-28 08:17:17 -07:00
end
def incoming_ap_doc(params) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
2019-01-28 08:17:17 -07:00
end
def publish(activity, priority \\ 1) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
2019-01-28 08:17:17 -07:00
end
def verify_websub(websub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
2019-01-28 08:17:17 -07:00
end
def request_subscription(sub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
2019-01-28 08:17:17 -07:00
end
def refresh_subscriptions do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
2019-01-28 08:17:17 -07:00
end
2018-03-30 07:01:53 -06:00
2019-01-28 08:17:17 -07:00
# Job Worker Callbacks
def perform(:refresh_subscriptions) do
2017-05-10 10:44:06 -06:00
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
2018-03-30 07:01:53 -06:00
2017-05-10 10:44:06 -06:00
spawn(fn ->
2018-03-30 07:01:53 -06:00
# 6 hours
Process.sleep(1000 * 60 * 60 * 6)
2019-01-28 08:17:17 -07:00
refresh_subscriptions()
2017-05-10 10:44:06 -06:00
end)
end
2019-01-28 08:17:17 -07:00
def perform(:request_subscription, websub) do
2017-08-02 04:34:48 -06:00
Logger.debug("Refreshing #{websub.topic}")
2018-03-30 07:01:53 -06:00
with {:ok, websub} <- Websub.request_subscription(websub) do
2017-08-02 04:34:48 -06:00
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
end
2019-01-28 08:17:17 -07:00
def perform(:publish, activity) do
2017-05-05 04:07:38 -06:00
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
2018-03-30 07:01:53 -06:00
with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]),
{:ok, actor} <- User.ensure_keys_present(actor) do
2019-05-11 21:57:10 -06:00
Publisher.publish(actor, activity)
end
end
2019-01-28 08:17:17 -07:00
def perform(:verify_websub, websub) do
2018-03-30 07:01:53 -06:00
Logger.debug(fn ->
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end)
Websub.verify(websub)
end
2019-01-28 08:17:17 -07:00
def perform(:incoming_doc, doc) do
Logger.info("Got document, trying to parse")
OStatus.handle_incoming(doc)
end
2019-01-28 08:17:17 -07:00
def perform(:incoming_ap_doc, params) do
2018-03-19 11:28:06 -06:00
Logger.info("Handling incoming AP activity")
2018-03-30 07:01:53 -06:00
params = Utils.normalize_params(params)
# NOTE: we use the actor ID to do the containment, this is fine because an
# actor shouldn't be acting on objects outside their own AP server.
2018-02-21 00:51:03 -07:00
with {:ok, _user} <- ap_enabled_actor(params["actor"]),
nil <- Activity.normalize(params["id"]),
:ok <- Containment.contain_origin_from_id(params["actor"], params),
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
{:ok, activity}
2018-02-21 00:51:03 -07:00
else
%Activity{} ->
Logger.info("Already had #{params["id"]}")
:error
2018-03-30 07:01:53 -06:00
_e ->
2018-02-21 00:51:03 -07:00
# Just drop those for now
Logger.info("Unhandled activity")
2019-05-13 14:37:38 -06:00
Logger.info(Jason.encode!(params, pretty: true))
:error
2018-02-21 00:51:03 -07:00
end
end
2019-01-28 08:17:17 -07:00
def perform(
2018-08-26 12:17:13 -06:00
:publish_single_websub,
2018-12-09 02:12:48 -07:00
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
2018-08-26 12:17:13 -06:00
) do
case Websub.publish_one(params) do
{:ok, _} ->
:ok
{:error, _} ->
2018-11-19 09:08:41 -07:00
RetryQueue.enqueue(params, Websub)
2017-06-23 08:37:34 -06:00
end
end
2019-01-28 08:17:17 -07:00
def perform(type, _) do
2017-05-05 04:07:38 -06:00
Logger.debug(fn -> "Unknown task: #{type}" end)
2018-03-19 11:47:51 -06:00
{:error, "Don't know what to do with this"}
end
2018-02-21 00:51:03 -07:00
def ap_enabled_actor(id) do
2019-04-22 01:20:43 -06:00
user = User.get_cached_by_ap_id(id)
2018-03-30 07:01:53 -06:00
2018-02-21 00:51:03 -07:00
if User.ap_enabled?(user) do
{:ok, user}
else
ActivityPub.make_user_from_ap_id(id)
end
end
end