Pleroma/lib/pleroma/web/federator/federator.ex

161 lines
4.4 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 16:41:47 +01:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Federator do
2018-02-21 08:51:50 +01:00
alias Pleroma.Activity
2019-04-17 13:52:01 +02:00
alias Pleroma.Object.Containment
2019-02-09 16:16:26 +01:00
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
2018-02-21 08:51:03 +01:00
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
2019-05-12 05:57:10 +02:00
alias Pleroma.Web.Federator.Publisher
2018-08-26 20:17:13 +02:00
alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub
2019-02-06 20:20:02 +01:00
require Logger
def init do
2019-01-28 16:17:17 +01:00
# 1 minute
2019-03-05 02:30:19 +01:00
Process.sleep(1000 * 60)
2019-01-28 16:17:17 +01:00
refresh_subscriptions()
2018-05-07 18:11:37 +02:00
end
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
2019-06-30 14:58:50 +02:00
def allowed_incoming_reply_depth?(depth) do
max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
if max_replies_depth do
(depth || 1) <= max_replies_depth
else
true
end
end
2019-01-28 16:17:17 +01:00
# Client API
def incoming_doc(doc) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
2019-01-28 16:17:17 +01:00
end
def incoming_ap_doc(params) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
2019-01-28 16:17:17 +01:00
end
def publish(activity, priority \\ 1) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
2019-01-28 16:17:17 +01:00
end
def verify_websub(websub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
2019-01-28 16:17:17 +01:00
end
def request_subscription(sub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
2019-01-28 16:17:17 +01:00
end
def refresh_subscriptions do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
2019-01-28 16:17:17 +01:00
end
2018-03-30 15:01:53 +02:00
2019-01-28 16:17:17 +01:00
# Job Worker Callbacks
def perform(:refresh_subscriptions) do
2017-05-10 18:44:06 +02:00
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
2018-03-30 15:01:53 +02:00
2017-05-10 18:44:06 +02:00
spawn(fn ->
2018-03-30 15:01:53 +02:00
# 6 hours
Process.sleep(1000 * 60 * 60 * 6)
2019-01-28 16:17:17 +01:00
refresh_subscriptions()
2017-05-10 18:44:06 +02:00
end)
end
2019-01-28 16:17:17 +01:00
def perform(:request_subscription, websub) do
2017-08-02 12:34:48 +02:00
Logger.debug("Refreshing #{websub.topic}")
2018-03-30 15:01:53 +02:00
with {:ok, websub} <- Websub.request_subscription(websub) do
2017-08-02 12:34:48 +02:00
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
end
2019-01-28 16:17:17 +01:00
def perform(:publish, activity) do
2017-05-05 12:07:38 +02:00
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
2018-03-30 15:01:53 +02:00
with %User{} = actor <- User.get_cached_by_ap_id(activity.data["actor"]),
{:ok, actor} <- User.ensure_keys_present(actor) do
2019-05-12 05:57:10 +02:00
Publisher.publish(actor, activity)
end
end
2019-01-28 16:17:17 +01:00
def perform(:verify_websub, websub) do
2018-03-30 15:01:53 +02:00
Logger.debug(fn ->
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end)
Websub.verify(websub)
end
2019-01-28 16:17:17 +01:00
def perform(:incoming_doc, doc) do
Logger.info("Got document, trying to parse")
OStatus.handle_incoming(doc)
end
2019-01-28 16:17:17 +01:00
def perform(:incoming_ap_doc, params) do
2018-03-19 18:28:06 +01:00
Logger.info("Handling incoming AP activity")
2018-03-30 15:01:53 +02:00
params = Utils.normalize_params(params)
# NOTE: we use the actor ID to do the containment, this is fine because an
# actor shouldn't be acting on objects outside their own AP server.
2018-02-21 08:51:03 +01:00
with {:ok, _user} <- ap_enabled_actor(params["actor"]),
nil <- Activity.normalize(params["id"]),
:ok <- Containment.contain_origin_from_id(params["actor"], params),
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
{:ok, activity}
2018-02-21 08:51:03 +01:00
else
%Activity{} ->
Logger.info("Already had #{params["id"]}")
:error
2018-03-30 15:01:53 +02:00
_e ->
2018-02-21 08:51:03 +01:00
# Just drop those for now
Logger.info("Unhandled activity")
2019-05-13 22:37:38 +02:00
Logger.info(Jason.encode!(params, pretty: true))
:error
2018-02-21 08:51:03 +01:00
end
end
2019-01-28 16:17:17 +01:00
def perform(
2018-08-26 20:17:13 +02:00
:publish_single_websub,
2018-12-09 10:12:48 +01:00
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
2018-08-26 20:17:13 +02:00
) do
case Websub.publish_one(params) do
{:ok, _} ->
:ok
{:error, _} ->
2018-11-19 17:08:41 +01:00
RetryQueue.enqueue(params, Websub)
2017-06-23 16:37:34 +02:00
end
end
2019-01-28 16:17:17 +01:00
def perform(type, _) do
2017-05-05 12:07:38 +02:00
Logger.debug(fn -> "Unknown task: #{type}" end)
2018-03-19 18:47:51 +01:00
{:error, "Don't know what to do with this"}
end
2018-02-21 08:51:03 +01:00
def ap_enabled_actor(id) do
2019-04-22 09:20:43 +02:00
user = User.get_cached_by_ap_id(id)
2018-03-30 15:01:53 +02:00
2018-02-21 08:51:03 +01:00
if User.ap_enabled?(user) do
{:ok, user}
else
ActivityPub.make_user_from_ap_id(id)
end
end
end