pleroma/lib/pleroma/web/federator/federator.ex

149 lines
5.2 KiB
Elixir
Raw Normal View History

defmodule Pleroma.Web.Federator do
2017-05-05 18:58:29 +02:00
use GenServer
alias Pleroma.User
2017-05-10 18:44:06 +02:00
alias Pleroma.Web.{WebFinger, Websub}
alias Pleroma.Web.ActivityPub.ActivityPub
require Logger
2017-04-28 15:45:10 +02:00
@websub Application.get_env(:pleroma, :websub)
@ostatus Application.get_env(:pleroma, :ostatus)
2017-06-23 16:37:34 +02:00
@httpoison Application.get_env(:pleroma, :httpoison)
2017-05-05 18:58:29 +02:00
@max_jobs 10
def start_link do
2017-05-10 18:44:06 +02:00
spawn(fn ->
2017-05-10 19:08:42 +02:00
Process.sleep(1000 * 60 * 1) # 1 minute
2017-05-10 18:44:06 +02:00
enqueue(:refresh_subscriptions, nil)
end)
GenServer.start_link(__MODULE__, %{
2017-12-06 16:51:11 +01:00
in: {:sets.new(), []},
2017-12-05 18:21:30 +01:00
out: {:sets.new(), []}
}, name: __MODULE__)
2017-05-05 18:58:29 +02:00
end
2017-05-10 18:44:06 +02:00
def handle(:refresh_subscriptions, _) do
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
spawn(fn ->
2017-05-10 19:08:42 +02:00
Process.sleep(1000 * 60 * 60 * 6) # 6 hours
2017-05-10 18:44:06 +02:00
enqueue(:refresh_subscriptions, nil)
end)
end
2017-08-02 12:34:48 +02:00
def handle(:request_subscription, websub) do
Logger.debug("Refreshing #{websub.topic}")
with {:ok, websub } <- Websub.request_subscription(websub) do
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
end
def handle(:publish, activity) do
2017-05-05 12:07:38 +02:00
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
with actor when not is_nil(actor) <- User.get_cached_by_ap_id(activity.data["actor"]) do
2017-05-01 14:07:29 +02:00
{:ok, actor} = WebFinger.ensure_keys_present(actor)
if ActivityPub.is_public?(activity) do
2018-02-18 16:15:04 +01:00
Logger.info(fn -> "Sending #{activity.data["id"]} out via websub" end)
Websub.publish(Pleroma.Web.OStatus.feed_path(actor), actor, activity)
Logger.info(fn -> "Sending #{activity.data["id"]} out via salmon" end)
Pleroma.Web.Salmon.publish(actor, activity)
end
2018-02-18 16:15:04 +01:00
Logger.info(fn -> "Sending #{activity.data["id"]} out via AP" end)
Pleroma.Web.ActivityPub.ActivityPub.publish(actor, activity)
end
end
def handle(:verify_websub, websub) do
2017-05-05 12:07:38 +02:00
Logger.debug(fn -> "Running websub verification for #{websub.id} (#{websub.topic}, #{websub.callback})" end)
2017-04-28 15:45:10 +02:00
@websub.verify(websub)
end
def handle(:incoming_doc, doc) do
Logger.debug("Got document, trying to parse")
@ostatus.handle_incoming(doc)
end
def handle(:publish_single_ap, params) do
ActivityPub.publish_one(params)
end
2017-06-23 16:37:34 +02:00
def handle(:publish_single_websub, %{xml: xml, topic: topic, callback: callback, secret: secret}) do
signature = @websub.sign(secret || "", xml)
Logger.debug(fn -> "Pushing #{topic} to #{callback}" end)
with {:ok, %{status_code: code}} <- @httpoison.post(callback, xml, [
{"Content-Type", "application/atom+xml"},
{"X-Hub-Signature", "sha1=#{signature}"}
], timeout: 10000, recv_timeout: 20000) do
Logger.debug(fn -> "Pushed to #{callback}, code #{code}" end)
else e ->
Logger.debug(fn -> "Couldn't push to #{callback}, #{inspect(e)}" end)
end
end
2017-11-19 02:22:07 +01:00
def handle(type, _) do
2017-05-05 12:07:38 +02:00
Logger.debug(fn -> "Unknown task: #{type}" end)
{:error, "Don't know what do do with this"}
end
2017-12-06 16:51:11 +01:00
def enqueue(type, payload, priority \\ 1) do
if Mix.env == :test do
handle(type, payload)
else
2017-12-06 16:51:11 +01:00
GenServer.cast(__MODULE__, {:enqueue, type, payload, priority})
2017-05-05 18:58:29 +02:00
end
end
def maybe_start_job(running_jobs, queue) do
2017-12-05 18:21:30 +01:00
if (:sets.size(running_jobs) < @max_jobs) && queue != [] do
2017-12-06 16:51:11 +01:00
{{type, payload}, queue} = queue_pop(queue)
2017-05-05 18:58:29 +02:00
{:ok, pid} = Task.start(fn -> handle(type, payload) end)
mref = Process.monitor(pid)
{:sets.add_element(mref, running_jobs), queue}
else
{running_jobs, queue}
end
end
2017-05-05 18:58:29 +02:00
2017-12-06 16:51:11 +01:00
def handle_cast({:enqueue, type, payload, priority}, state) when type in [:incoming_doc] do
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
2017-12-05 18:21:30 +01:00
i_queue = enqueue_sorted(i_queue, {type, payload}, 1)
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
end
2017-12-06 16:51:11 +01:00
def handle_cast({:enqueue, type, payload, priority}, state) do
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
2017-12-05 18:21:30 +01:00
o_queue = enqueue_sorted(o_queue, {type, payload}, 1)
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
2017-05-05 18:58:29 +02:00
end
2017-11-19 02:22:07 +01:00
def handle_cast(m, state) do
IO.inspect("Unknown: #{inspect(m)}, #{inspect(state)}")
{:noreply, state}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
i_running_jobs = :sets.del_element(ref, i_running_jobs)
o_running_jobs = :sets.del_element(ref, o_running_jobs)
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
2017-05-05 18:58:29 +02:00
end
2017-12-05 18:21:30 +01:00
def enqueue_sorted(queue, element, priority) do
[%{item: element, priority: priority} | queue]
|> Enum.sort_by(fn (%{priority: priority}) -> priority end)
end
def queue_pop([%{item: element} | queue]) do
{element, queue}
end
end