2017-04-26 18:33:10 +02:00
|
|
|
defmodule Pleroma.Web.Federator do
|
2017-05-05 18:58:29 +02:00
|
|
|
use GenServer
|
2017-04-26 18:33:10 +02:00
|
|
|
alias Pleroma.User
|
2018-02-21 08:51:50 +01:00
|
|
|
alias Pleroma.Activity
|
2017-05-10 18:44:06 +02:00
|
|
|
alias Pleroma.Web.{WebFinger, Websub}
|
2018-02-18 16:05:25 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2018-02-21 08:51:03 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2017-04-26 18:33:10 +02:00
|
|
|
require Logger
|
|
|
|
|
2017-04-28 15:45:10 +02:00
|
|
|
@websub Application.get_env(:pleroma, :websub)
|
2017-05-06 12:34:40 +02:00
|
|
|
@ostatus Application.get_env(:pleroma, :ostatus)
|
2017-06-23 16:37:34 +02:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
2018-03-05 09:26:24 +01:00
|
|
|
@instance Application.get_env(:pleroma, :instance)
|
|
|
|
@federating Keyword.get(@instance, :federating)
|
2018-03-13 18:33:08 +01:00
|
|
|
@max_jobs 20
|
2017-05-05 18:58:29 +02:00
|
|
|
|
|
|
|
def start_link do
|
2017-05-10 18:44:06 +02:00
|
|
|
spawn(fn ->
|
2017-05-10 19:08:42 +02:00
|
|
|
Process.sleep(1000 * 60 * 1) # 1 minute
|
2017-05-10 18:44:06 +02:00
|
|
|
enqueue(:refresh_subscriptions, nil)
|
|
|
|
end)
|
2017-11-19 15:10:51 +01:00
|
|
|
GenServer.start_link(__MODULE__, %{
|
2017-12-06 16:51:11 +01:00
|
|
|
in: {:sets.new(), []},
|
2017-12-05 18:21:30 +01:00
|
|
|
out: {:sets.new(), []}
|
2017-11-19 15:10:51 +01:00
|
|
|
}, name: __MODULE__)
|
2017-05-05 18:58:29 +02:00
|
|
|
end
|
2017-04-26 18:33:10 +02:00
|
|
|
|
2017-05-10 18:44:06 +02:00
|
|
|
def handle(:refresh_subscriptions, _) do
|
|
|
|
Logger.debug("Federator running refresh subscriptions")
|
|
|
|
Websub.refresh_subscriptions()
|
|
|
|
spawn(fn ->
|
2017-05-10 19:08:42 +02:00
|
|
|
Process.sleep(1000 * 60 * 60 * 6) # 6 hours
|
2017-05-10 18:44:06 +02:00
|
|
|
enqueue(:refresh_subscriptions, nil)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2017-08-02 12:34:48 +02:00
|
|
|
def handle(:request_subscription, websub) do
|
|
|
|
Logger.debug("Refreshing #{websub.topic}")
|
|
|
|
with {:ok, websub } <- Websub.request_subscription(websub) do
|
|
|
|
Logger.debug("Successfully refreshed #{websub.topic}")
|
|
|
|
else
|
|
|
|
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-26 18:33:10 +02:00
|
|
|
def handle(:publish, activity) do
|
2017-05-05 12:07:38 +02:00
|
|
|
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
|
2017-04-26 18:33:10 +02:00
|
|
|
with actor when not is_nil(actor) <- User.get_cached_by_ap_id(activity.data["actor"]) do
|
2017-05-01 14:07:29 +02:00
|
|
|
{:ok, actor} = WebFinger.ensure_keys_present(actor)
|
2018-02-18 16:05:25 +01:00
|
|
|
if ActivityPub.is_public?(activity) do
|
2018-03-19 18:41:04 +01:00
|
|
|
Logger.info(fn -> "Sending #{activity.data["id"]} out via WebSub" end)
|
2018-02-18 16:05:25 +01:00
|
|
|
Websub.publish(Pleroma.Web.OStatus.feed_path(actor), actor, activity)
|
2018-02-11 20:43:33 +01:00
|
|
|
|
2018-03-19 18:38:54 +01:00
|
|
|
Logger.info(fn -> "Sending #{activity.data["id"]} out via Salmon" end)
|
2018-02-19 11:14:46 +01:00
|
|
|
Pleroma.Web.Salmon.publish(actor, activity)
|
|
|
|
end
|
2018-02-18 16:15:04 +01:00
|
|
|
|
|
|
|
Logger.info(fn -> "Sending #{activity.data["id"]} out via AP" end)
|
2018-02-11 20:43:33 +01:00
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.publish(actor, activity)
|
2017-04-26 18:33:10 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle(:verify_websub, websub) do
|
2018-03-19 18:46:53 +01:00
|
|
|
Logger.debug(fn -> "Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})" end)
|
2017-04-28 15:45:10 +02:00
|
|
|
@websub.verify(websub)
|
2017-04-26 18:33:10 +02:00
|
|
|
end
|
|
|
|
|
2017-05-06 12:34:40 +02:00
|
|
|
def handle(:incoming_doc, doc) do
|
2018-02-21 08:57:14 +01:00
|
|
|
Logger.info("Got document, trying to parse")
|
2017-05-06 12:34:40 +02:00
|
|
|
@ostatus.handle_incoming(doc)
|
|
|
|
end
|
|
|
|
|
2018-02-21 08:51:03 +01:00
|
|
|
def handle(:incoming_ap_doc, params) do
|
2018-03-19 18:28:06 +01:00
|
|
|
Logger.info("Handling incoming AP activity")
|
2018-02-21 08:51:03 +01:00
|
|
|
with {:ok, _user} <- ap_enabled_actor(params["actor"]),
|
|
|
|
nil <- Activity.get_by_ap_id(params["id"]),
|
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
|
|
|
else
|
|
|
|
%Activity{} ->
|
|
|
|
Logger.info("Already had #{params["id"]}")
|
|
|
|
e ->
|
|
|
|
# Just drop those for now
|
|
|
|
Logger.info("Unhandled activity")
|
|
|
|
Logger.info(Poison.encode!(params, [pretty: 2]))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-20 08:51:19 +01:00
|
|
|
def handle(:publish_single_ap, params) do
|
|
|
|
ActivityPub.publish_one(params)
|
|
|
|
end
|
|
|
|
|
2017-06-23 16:37:34 +02:00
|
|
|
def handle(:publish_single_websub, %{xml: xml, topic: topic, callback: callback, secret: secret}) do
|
|
|
|
signature = @websub.sign(secret || "", xml)
|
|
|
|
Logger.debug(fn -> "Pushing #{topic} to #{callback}" end)
|
|
|
|
|
|
|
|
with {:ok, %{status_code: code}} <- @httpoison.post(callback, xml, [
|
|
|
|
{"Content-Type", "application/atom+xml"},
|
|
|
|
{"X-Hub-Signature", "sha1=#{signature}"}
|
2018-03-19 17:42:09 +01:00
|
|
|
], timeout: 10000, recv_timeout: 20000, hackney: [pool: :default]) do
|
2017-06-23 16:37:34 +02:00
|
|
|
Logger.debug(fn -> "Pushed to #{callback}, code #{code}" end)
|
|
|
|
else e ->
|
|
|
|
Logger.debug(fn -> "Couldn't push to #{callback}, #{inspect(e)}" end)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
def handle(type, _) do
|
2017-05-05 12:07:38 +02:00
|
|
|
Logger.debug(fn -> "Unknown task: #{type}" end)
|
2018-03-19 18:47:51 +01:00
|
|
|
{:error, "Don't know what to do with this"}
|
2017-04-26 18:33:10 +02:00
|
|
|
end
|
|
|
|
|
2017-12-06 16:51:11 +01:00
|
|
|
def enqueue(type, payload, priority \\ 1) do
|
2018-03-05 09:26:24 +01:00
|
|
|
if @federating do
|
|
|
|
if Mix.env == :test do
|
|
|
|
handle(type, payload)
|
|
|
|
else
|
|
|
|
GenServer.cast(__MODULE__, {:enqueue, type, payload, priority})
|
|
|
|
end
|
2017-05-05 18:58:29 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_start_job(running_jobs, queue) do
|
2017-12-05 18:21:30 +01:00
|
|
|
if (:sets.size(running_jobs) < @max_jobs) && queue != [] do
|
2017-12-06 16:51:11 +01:00
|
|
|
{{type, payload}, queue} = queue_pop(queue)
|
2017-05-05 18:58:29 +02:00
|
|
|
{:ok, pid} = Task.start(fn -> handle(type, payload) end)
|
|
|
|
mref = Process.monitor(pid)
|
|
|
|
{:sets.add_element(mref, running_jobs), queue}
|
|
|
|
else
|
|
|
|
{running_jobs, queue}
|
2017-04-26 18:33:10 +02:00
|
|
|
end
|
|
|
|
end
|
2017-05-05 18:58:29 +02:00
|
|
|
|
2018-02-21 08:57:14 +01:00
|
|
|
def handle_cast({:enqueue, type, payload, priority}, state) when type in [:incoming_doc, :incoming_ap_doc] do
|
2017-11-19 15:10:51 +01:00
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
2017-12-05 18:21:30 +01:00
|
|
|
i_queue = enqueue_sorted(i_queue, {type, payload}, 1)
|
2017-11-19 15:10:51 +01:00
|
|
|
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
|
|
|
end
|
|
|
|
|
2017-12-06 16:51:11 +01:00
|
|
|
def handle_cast({:enqueue, type, payload, priority}, state) do
|
2017-11-19 15:10:51 +01:00
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
2017-12-05 18:21:30 +01:00
|
|
|
o_queue = enqueue_sorted(o_queue, {type, payload}, 1)
|
2017-11-19 15:10:51 +01:00
|
|
|
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
2017-05-05 18:58:29 +02:00
|
|
|
end
|
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
def handle_cast(m, state) do
|
|
|
|
IO.inspect("Unknown: #{inspect(m)}, #{inspect(state)}")
|
|
|
|
{:noreply, state}
|
|
|
|
end
|
|
|
|
|
2017-11-19 15:10:51 +01:00
|
|
|
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
|
|
|
i_running_jobs = :sets.del_element(ref, i_running_jobs)
|
|
|
|
o_running_jobs = :sets.del_element(ref, o_running_jobs)
|
|
|
|
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
|
|
|
|
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
|
|
|
|
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
2017-05-05 18:58:29 +02:00
|
|
|
end
|
2017-12-05 18:21:30 +01:00
|
|
|
|
|
|
|
def enqueue_sorted(queue, element, priority) do
|
|
|
|
[%{item: element, priority: priority} | queue]
|
|
|
|
|> Enum.sort_by(fn (%{priority: priority}) -> priority end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def queue_pop([%{item: element} | queue]) do
|
|
|
|
{element, queue}
|
|
|
|
end
|
2018-02-21 08:51:03 +01:00
|
|
|
|
|
|
|
def ap_enabled_actor(id) do
|
|
|
|
user = User.get_by_ap_id(id)
|
|
|
|
if User.ap_enabled?(user) do
|
|
|
|
{:ok, user}
|
|
|
|
else
|
|
|
|
ActivityPub.make_user_from_ap_id(id)
|
|
|
|
end
|
|
|
|
end
|
2017-04-26 18:33:10 +02:00
|
|
|
end
|