pleroma/lib/pleroma/web/activity_pub/activity_pub.ex

531 lines
15 KiB
Elixir
Raw Normal View History

2017-03-21 09:21:52 +01:00
defmodule Pleroma.Web.ActivityPub.ActivityPub do
2017-11-19 02:22:07 +01:00
alias Pleroma.{Activity, Repo, Object, Upload, User, Notification}
alias Pleroma.Web.ActivityPub.{Transmogrifier, MRF}
2018-02-18 12:27:05 +01:00
alias Pleroma.Web.WebFinger
alias Pleroma.Web.Federator
alias Pleroma.Web.OStatus
2017-03-21 17:53:20 +01:00
import Ecto.Query
2017-05-16 15:31:11 +02:00
import Pleroma.Web.ActivityPub.Utils
2017-05-07 20:16:07 +02:00
require Logger
2017-03-21 09:21:52 +01:00
2018-02-11 17:20:02 +01:00
@httpoison Application.get_env(:pleroma, :httpoison)
@instance Application.get_env(:pleroma, :instance)
def get_recipients(data) do
(data["to"] || []) ++ (data["cc"] || [])
end
2017-05-02 10:47:04 +02:00
def insert(map, local \\ true) when is_map(map) do
2017-05-16 15:31:11 +02:00
with nil <- Activity.get_by_ap_id(map["id"]),
map <- lazy_put_activity_defaults(map),
{:ok, map} <- MRF.filter(map),
2018-04-28 16:10:24 +02:00
:ok <- insert_full_object(map) do
2018-03-30 15:01:53 +02:00
{:ok, activity} =
Repo.insert(%Activity{
data: map,
local: local,
actor: map["actor"],
recipients: get_recipients(map)
})
Notification.create_notifications(activity)
stream_out(activity)
{:ok, activity}
2017-05-16 15:31:11 +02:00
else
%Activity{} = activity -> {:ok, activity}
error -> {:error, error}
2017-05-07 20:13:10 +02:00
end
2017-03-21 09:21:52 +01:00
end
2017-03-21 17:53:20 +01:00
def stream_out(activity) do
if activity.data["type"] in ["Create", "Announce"] do
Pleroma.Web.Streamer.stream("user", activity)
2018-03-30 15:01:53 +02:00
if Enum.member?(activity.data["to"], "https://www.w3.org/ns/activitystreams#Public") do
2017-11-11 14:59:25 +01:00
Pleroma.Web.Streamer.stream("public", activity)
2018-03-30 15:01:53 +02:00
if activity.local do
2017-11-16 13:48:58 +01:00
Pleroma.Web.Streamer.stream("public:local", activity)
end
2017-11-11 14:59:25 +01:00
end
end
end
2018-02-15 19:59:03 +01:00
def create(%{to: to, actor: actor, context: context, object: object} = params) do
additional = params[:additional] || %{}
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
2018-02-15 19:59:03 +01:00
published = params[:published]
2018-03-30 15:01:53 +02:00
with create_data <-
make_create_data(
%{to: to, actor: actor, published: published, context: context, object: object},
additional
),
{:ok, activity} <- insert(create_data, local),
:ok <- maybe_federate(activity),
2018-05-04 23:16:02 +02:00
{:ok, _actor} <- User.increase_note_count(actor) do
{:ok, activity}
end
2017-03-21 09:21:52 +01:00
end
2017-03-21 17:53:20 +01:00
def accept(%{to: to, actor: actor, object: object} = params) do
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
with data <- %{"to" => to, "type" => "Accept", "actor" => actor, "object" => object},
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
2018-02-25 16:14:25 +01:00
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
with data <- %{
"to" => to,
"cc" => cc,
"type" => "Update",
"actor" => actor,
"object" => object
},
2018-02-25 16:14:25 +01:00
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
2017-05-16 15:31:11 +02:00
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
2018-03-30 15:01:53 +02:00
def like(
%User{ap_id: ap_id} = user,
%Object{data: %{"id" => _}} = object,
activity_id \\ nil,
local \\ true
) do
2017-05-16 15:31:11 +02:00
with nil <- get_existing_like(ap_id, object),
like_data <- make_like_data(user, object, activity_id),
{:ok, activity} <- insert(like_data, local),
{:ok, object} <- add_like_to_object(activity, object),
:ok <- maybe_federate(activity) do
{:ok, activity, object}
else
%Activity{} = activity -> {:ok, activity, object}
error -> {:error, error}
end
2017-04-13 15:50:05 +02:00
end
2017-05-16 15:31:11 +02:00
def unlike(%User{} = actor, %Object{} = object) do
with %Activity{} = activity <- get_existing_like(actor.ap_id, object),
{:ok, _activity} <- Repo.delete(activity),
{:ok, object} <- remove_like_from_object(activity, object) do
2017-04-14 18:08:47 +02:00
{:ok, object}
2018-03-30 15:01:53 +02:00
else
_e -> {:ok, object}
2017-04-14 18:08:47 +02:00
end
end
2018-03-30 15:01:53 +02:00
def announce(
%User{ap_id: _} = user,
%Object{data: %{"id" => _}} = object,
activity_id \\ nil,
local \\ true
) do
2018-02-18 15:58:18 +01:00
with true <- is_public?(object),
announce_data <- make_announce_data(user, object, activity_id),
2017-05-16 15:31:11 +02:00
{:ok, activity} <- insert(announce_data, local),
{:ok, object} <- add_announce_to_object(activity, object),
:ok <- maybe_federate(activity) do
{:ok, activity, object}
else
error -> {:error, error}
end
2017-03-23 23:34:10 +01:00
end
2017-05-16 15:31:11 +02:00
def follow(follower, followed, activity_id \\ nil, local \\ true) do
with data <- make_follow_data(follower, followed, activity_id),
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
2017-03-23 17:56:49 +01:00
end
2017-05-16 15:31:11 +02:00
def unfollow(follower, followed, local \\ true) do
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
unfollow_data <- make_unfollow_data(follower, followed, follow_activity),
{:ok, activity} <- insert(unfollow_data, local),
2018-03-30 15:01:53 +02:00
:ok,
maybe_federate(activity) do
2017-05-16 15:31:11 +02:00
{:ok, activity}
end
2017-03-23 23:34:10 +01:00
end
2017-09-04 18:47:33 +02:00
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
user = User.get_cached_by_ap_id(actor)
2018-03-30 15:01:53 +02:00
2017-09-04 18:47:33 +02:00
data = %{
"type" => "Delete",
"actor" => actor,
"object" => id,
"to" => [user.follower_address, "https://www.w3.org/ns/activitystreams#Public"]
}
2018-03-30 15:01:53 +02:00
2017-09-04 20:47:43 +02:00
with Repo.delete(object),
Repo.delete_all(Activity.all_non_create_by_object_ap_id_q(id)),
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity),
2018-05-04 23:16:02 +02:00
{:ok, _actor} <- User.decrease_note_count(user) do
2017-09-04 18:47:33 +02:00
{:ok, activity}
end
end
def fetch_activities_for_context(context, opts \\ %{}) do
2018-02-18 20:52:07 +01:00
public = ["https://www.w3.org/ns/activitystreams#Public"]
2018-03-30 15:01:53 +02:00
recipients =
if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
query = from(activity in Activity)
query =
query
2018-02-18 15:50:34 +01:00
|> restrict_blocked(opts)
2018-02-18 20:52:07 +01:00
|> restrict_recipients(recipients, opts["user"])
2018-03-30 15:01:53 +02:00
query =
from(
activity in query,
where:
fragment(
"?->>'type' = ? and ?->>'context' = ?",
activity.data,
"Create",
activity.data,
^context
),
order_by: [desc: :id]
)
2017-05-16 15:31:11 +02:00
Repo.all(query)
2017-03-23 21:22:49 +01:00
end
2017-03-21 20:31:48 +01:00
def fetch_public_activities(opts \\ %{}) do
q = fetch_activities_query(["https://www.w3.org/ns/activitystreams#Public"], opts)
2018-03-30 15:01:53 +02:00
2018-02-18 15:32:11 +01:00
q
|> restrict_unlisted()
2018-03-30 15:01:53 +02:00
|> Repo.all()
|> Enum.reverse()
2017-03-22 14:45:17 +01:00
end
2017-05-16 15:31:11 +02:00
defp restrict_since(query, %{"since_id" => since_id}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.id > ^since_id)
2017-05-16 15:31:11 +02:00
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_since(query, _), do: query
2017-03-21 17:53:20 +01:00
2017-09-14 13:22:09 +02:00
defp restrict_tag(query, %{"tag" => tag}) do
2018-03-30 15:01:53 +02:00
from(
activity in query,
2017-09-14 13:22:09 +02:00
where: fragment("? <@ (? #> '{\"object\",\"tag\"}')", ^tag, activity.data)
2018-03-30 15:01:53 +02:00
)
2017-09-14 13:22:09 +02:00
end
2018-03-30 15:01:53 +02:00
2017-09-14 13:22:09 +02:00
defp restrict_tag(query, _), do: query
2018-05-04 23:16:02 +02:00
defp restrict_recipients(query, [], _user), do: query
2018-03-30 15:01:53 +02:00
defp restrict_recipients(query, recipients, nil) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
2017-03-21 17:53:20 +01:00
end
2018-03-30 15:01:53 +02:00
defp restrict_recipients(query, recipients, user) do
2018-03-30 15:01:53 +02:00
from(
activity in query,
where: fragment("? && ?", ^recipients, activity.recipients),
or_where: activity.actor == ^user.ap_id
2018-03-30 15:01:53 +02:00
)
end
2017-03-24 00:09:08 +01:00
2018-03-22 05:47:18 +01:00
defp restrict_limit(query, %{"limit" => limit}) do
2018-03-30 15:01:53 +02:00
from(activity in query, limit: ^limit)
2018-03-22 05:47:18 +01:00
end
2018-03-30 15:01:53 +02:00
2018-03-22 05:47:18 +01:00
defp restrict_limit(query, _), do: query
2017-05-16 15:31:11 +02:00
defp restrict_local(query, %{"local_only" => true}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.local == true)
2017-04-15 12:11:20 +02:00
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_local(query, _), do: query
2017-04-15 12:11:20 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_max(query, %{"max_id" => max_id}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.id < ^max_id)
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_max(query, _), do: query
2017-05-16 15:31:11 +02:00
defp restrict_actor(query, %{"actor_id" => actor_id}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.actor == ^actor_id)
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_actor(query, _), do: query
2017-09-17 14:20:54 +02:00
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
restrict_type(query, %{"type" => [type]})
end
2018-03-30 15:01:53 +02:00
defp restrict_type(query, %{"type" => type}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
end
2018-03-30 15:01:53 +02:00
defp restrict_type(query, _), do: query
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
2018-03-30 15:01:53 +02:00
from(
activity in query,
where: fragment("? <@ (? #> '{\"object\",\"likes\"}')", ^ap_id, activity.data)
2018-03-30 15:01:53 +02:00
)
end
2018-03-30 15:01:53 +02:00
defp restrict_favorited_by(query, _), do: query
2017-11-14 14:50:23 +01:00
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
2018-03-30 15:01:53 +02:00
from(
activity in query,
2017-11-14 14:41:16 +01:00
where: fragment("not (? #> '{\"object\",\"attachment\"}' = ?)", activity.data, ^[])
2018-03-30 15:01:53 +02:00
)
2017-11-14 14:41:16 +01:00
end
2018-03-30 15:01:53 +02:00
2017-11-14 14:41:16 +01:00
defp restrict_media(query, _), do: query
# Only search through last 100_000 activities by default
defp restrict_recent(query, %{"whole_db" => true}), do: query
2018-03-30 15:01:53 +02:00
defp restrict_recent(query, _) do
2017-11-05 12:05:25 +01:00
since = (Repo.aggregate(Activity, :max, :id) || 0) - 100_000
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.id > ^since)
end
2017-11-02 22:47:11 +01:00
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
blocks = info["blocks"] || []
2018-04-28 16:10:24 +02:00
from(
activity in query,
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
where: fragment("not (?->'to' \\?| ?)", activity.data, ^blocks)
)
end
2018-03-30 15:01:53 +02:00
defp restrict_blocked(query, _), do: query
defp restrict_unlisted(query) do
from(
activity in query,
where: fragment("not (?->'cc' \\?| ?)", activity.data, ^["https://www.w3.org/ns/activitystreams#Public"])
)
end
2018-02-18 15:32:11 +01:00
def fetch_activities_query(recipients, opts \\ %{}) do
2018-03-30 15:01:53 +02:00
base_query =
from(
activity in Activity,
limit: 20,
order_by: [fragment("? desc nulls last", activity.id)]
)
2017-03-29 02:05:51 +02:00
2017-05-16 15:31:11 +02:00
base_query
|> restrict_recipients(recipients, opts["user"])
2017-09-14 13:22:09 +02:00
|> restrict_tag(opts)
2017-05-16 15:31:11 +02:00
|> restrict_since(opts)
|> restrict_local(opts)
2018-03-22 05:47:18 +01:00
|> restrict_limit(opts)
2017-05-16 15:31:11 +02:00
|> restrict_max(opts)
|> restrict_actor(opts)
|> restrict_type(opts)
|> restrict_favorited_by(opts)
|> restrict_recent(opts)
|> restrict_blocked(opts)
2017-11-14 14:41:16 +01:00
|> restrict_media(opts)
2018-02-18 15:32:11 +01:00
end
def fetch_activities(recipients, opts \\ %{}) do
fetch_activities_query(recipients, opts)
2018-03-30 15:01:53 +02:00
|> Repo.all()
|> Enum.reverse()
2017-04-21 18:54:21 +02:00
end
2017-04-16 14:23:30 +02:00
def upload(file) do
2017-03-29 02:05:51 +02:00
data = Upload.store(file)
Repo.insert(%Object{data: data})
end
2018-02-25 16:14:25 +01:00
def user_data_from_user_object(data) do
2018-03-30 15:01:53 +02:00
avatar =
data["icon"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["icon"]["url"]}]
}
banner =
data["image"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["image"]["url"]}]
}
2018-02-25 16:14:25 +01:00
user_data = %{
ap_id: data["id"],
info: %{
"ap_enabled" => true,
"source_data" => data,
"banner" => banner
},
avatar: avatar,
nickname: "#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}",
name: data["name"],
follower_address: data["followers"],
bio: data["summary"]
}
{:ok, user_data}
end
2018-02-21 22:21:40 +01:00
def fetch_and_prepare_user_from_ap_id(ap_id) do
2018-03-30 15:01:53 +02:00
with {:ok, %{status_code: 200, body: body}} <-
@httpoison.get(ap_id, Accept: "application/activity+json"),
{:ok, data} <- Jason.decode(body) do
2018-02-25 16:14:25 +01:00
user_data_from_user_object(data)
2018-02-23 15:00:19 +01:00
else
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
2018-02-21 22:21:40 +01:00
end
end
def make_user_from_ap_id(ap_id) do
2018-05-04 23:16:02 +02:00
if _user = User.get_by_ap_id(ap_id) do
2018-02-21 22:21:40 +01:00
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
User.insert_or_update_user(data)
2018-02-18 23:11:31 +01:00
else
2018-02-25 16:52:33 +01:00
e -> {:error, e}
2018-02-18 23:11:31 +01:00
end
2018-02-11 17:20:02 +01:00
end
end
2018-02-18 12:27:05 +01:00
def make_user_from_nickname(nickname) do
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
make_user_from_ap_id(ap_id)
2018-02-25 16:52:33 +01:00
else
2018-03-19 18:56:49 +01:00
_e -> {:error, "No AP id in WebFinger"}
2018-02-18 12:27:05 +01:00
end
end
def publish(actor, activity) do
2018-03-30 15:01:53 +02:00
followers =
if actor.follower_address in activity.recipients do
{:ok, followers} = User.get_followers(actor)
followers |> Enum.filter(&(!&1.local))
else
[]
end
2018-03-30 15:01:53 +02:00
remote_inboxes =
(Pleroma.Web.Salmon.remote_users(activity) ++ followers)
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
|> Enum.map(fn %{info: %{"source_data" => data}} ->
(data["endpoints"] && data["endpoints"]["sharedInbox"]) || data["inbox"]
end)
|> Enum.uniq()
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
2018-03-27 16:45:38 +02:00
json = Jason.encode!(data)
2018-03-30 15:01:53 +02:00
Enum.each(remote_inboxes, fn inbox ->
Federator.enqueue(:publish_single_ap, %{
inbox: inbox,
json: json,
actor: actor,
id: activity.data["id"]
})
end)
end
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id}) do
Logger.info("Federating #{id} to #{inbox}")
host = URI.parse(inbox).host
2018-03-30 15:01:53 +02:00
signature =
Pleroma.Web.HTTPSignatures.sign(actor, %{host: host, "content-length": byte_size(json)})
@httpoison.post(
inbox,
json,
[{"Content-Type", "application/activity+json"}, {"signature", signature}],
hackney: [pool: :default]
)
end
# TODO:
# This will create a Create activity, which we need internally at the moment.
def fetch_object_from_id(id) do
if object = Object.get_cached_by_ap_id(id) do
{:ok, object}
else
2018-02-21 16:22:20 +01:00
Logger.info("Fetching #{id} via AP")
2018-03-30 15:01:53 +02:00
2018-03-19 10:28:28 +01:00
with true <- String.starts_with?(id, "http"),
2018-03-30 15:01:53 +02:00
{:ok, %{body: body, status_code: code}} when code in 200..299 <-
@httpoison.get(
id,
[Accept: "application/activity+json"],
follow_redirect: true,
timeout: 10000,
recv_timeout: 20000
),
2018-03-27 16:45:38 +02:00
{:ok, data} <- Jason.decode(body),
nil <- Object.get_by_ap_id(data["id"]),
2018-03-30 15:01:53 +02:00
params <- %{
"type" => "Create",
"to" => data["to"],
"cc" => data["cc"],
"actor" => data["attributedTo"],
"object" => data
},
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
{:ok, Object.get_by_ap_id(activity.data["object"]["id"])}
else
2018-03-30 15:01:53 +02:00
object = %Object{} ->
{:ok, object}
2018-05-04 23:16:02 +02:00
_e ->
2018-02-23 15:00:19 +01:00
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
2018-03-30 15:01:53 +02:00
2018-02-23 15:00:19 +01:00
case OStatus.fetch_activity_from_url(id) do
{:ok, [activity | _]} -> {:ok, Object.get_by_ap_id(activity.data["object"]["id"])}
e -> e
end
end
end
end
2018-02-18 15:50:34 +01:00
2018-02-18 15:58:18 +01:00
def is_public?(activity) do
2018-03-30 15:01:53 +02:00
"https://www.w3.org/ns/activitystreams#Public" in (activity.data["to"] ++
(activity.data["cc"] || []))
2018-02-18 15:50:34 +01:00
end
2018-02-18 15:58:18 +01:00
def visible_for_user?(activity, nil) do
is_public?(activity)
end
2018-03-30 15:01:53 +02:00
2018-02-18 15:50:34 +01:00
def visible_for_user?(activity, user) do
x = [user.ap_id | user.following]
2018-03-30 15:01:53 +02:00
y = activity.data["to"] ++ (activity.data["cc"] || [])
2018-02-18 15:50:34 +01:00
visible_for_user?(activity, nil) || Enum.any?(x, &(&1 in y))
end
2017-03-21 09:21:52 +01:00
end