Merge remote-tracking branch 'upstream/develop' into neckbeard

This commit is contained in:
Your New SJW Waifu 2020-04-01 12:12:21 -05:00
commit 76fc927650
103 changed files with 4719 additions and 1652 deletions

View File

@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### Added
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
- New HTTP adapter [gun](https://github.com/ninenines/gun). Gun adapter requires minimum OTP version of 22.2 otherwise Pleroma wont start. For hackney OTP update is not required.
<details>
<summary>API Changes</summary>
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.

View File

@ -12,7 +12,7 @@ RUN apk add git gcc g++ musl-dev make &&\
mkdir release &&\
mix release --path release
FROM alpine:3.9
FROM alpine:3.11
ARG BUILD_DATE
ARG VCS_REF
@ -33,7 +33,7 @@ ARG DATA=/var/lib/pleroma
RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\
apk update &&\
apk add ncurses postgresql-client &&\
apk add imagemagick ncurses postgresql-client &&\
adduser --system --shell /bin/false --home ${HOME} pleroma &&\
mkdir -p ${DATA}/uploads &&\
mkdir -p ${DATA}/static &&\

View File

@ -0,0 +1,557 @@
defmodule Pleroma.LoadTesting.Activities do
@moduledoc """
Module for generating different activities.
"""
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
alias Ecto.UUID
alias Pleroma.Constants
alias Pleroma.LoadTesting.Users
alias Pleroma.Repo
alias Pleroma.Web.CommonAPI
require Constants
@defaults [
iterations: 170,
friends_used: 20,
non_friends_used: 20
]
@max_concurrency 10
@visibility ~w(public private direct unlisted)
@types ~w(simple emoji mentions hell_thread attachment tag like reblog simple_thread remote)
@groups ~w(user friends non_friends)
@spec generate(User.t(), keyword()) :: :ok
def generate(user, opts \\ []) do
{:ok, _} =
Agent.start_link(fn -> %{} end,
name: :benchmark_state
)
opts = Keyword.merge(@defaults, opts)
friends =
user
|> Users.get_users(limit: opts[:friends_used], local: :local, friends?: true)
|> Enum.shuffle()
non_friends =
user
|> Users.get_users(limit: opts[:non_friends_used], local: :local, friends?: false)
|> Enum.shuffle()
task_data =
for visibility <- @visibility,
type <- @types,
group <- @groups,
do: {visibility, type, group}
IO.puts("Starting generating #{opts[:iterations]} iterations of activities...")
friends_thread = Enum.take(friends, 5)
non_friends_thread = Enum.take(friends, 5)
public_long_thread = fn ->
generate_long_thread("public", user, friends_thread, non_friends_thread, opts)
end
private_long_thread = fn ->
generate_long_thread("private", user, friends_thread, non_friends_thread, opts)
end
iterations = opts[:iterations]
{time, _} =
:timer.tc(fn ->
Enum.each(
1..iterations,
fn
i when i == iterations - 2 ->
spawn(public_long_thread)
spawn(private_long_thread)
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
_ ->
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
end
)
end)
IO.puts("Generating iterations of activities took #{to_sec(time)} sec.\n")
:ok
end
def generate_power_intervals(opts \\ []) do
count = Keyword.get(opts, :count, 20)
power = Keyword.get(opts, :power, 2)
IO.puts("Generating #{count} intervals for a power #{power} series...")
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
sum = Enum.sum(counts)
densities =
Enum.map(counts, fn c ->
c / sum
end)
densities
|> Enum.reduce(0, fn density, acc ->
if acc == 0 do
[{0, density}]
else
[{_, lower} | _] = acc
[{lower, lower + density} | acc]
end
end)
|> Enum.reverse()
end
def generate_tagged_activities(opts \\ []) do
tag_count = Keyword.get(opts, :tag_count, 20)
users = Keyword.get(opts, :users, Repo.all(Pleroma.User))
activity_count = Keyword.get(opts, :count, 200_000)
intervals = generate_power_intervals(count: tag_count)
IO.puts(
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
)
Enum.each(1..activity_count, fn _ ->
random = :rand.uniform()
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
end)
end
defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
group =
if visibility == "public",
do: "friends",
else: "user"
tasks = get_reply_tasks(visibility, group) |> Stream.cycle() |> Enum.take(50)
{:ok, activity} =
CommonAPI.post(user, %{
"status" => "Start of #{visibility} long thread",
"visibility" => visibility
})
Agent.update(:benchmark_state, fn state ->
key =
if visibility == "public",
do: :public_thread,
else: :private_thread
Map.put(state, key, activity)
end)
acc = {activity.id, ["@" <> user.nickname, "reply to long thread"]}
insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc)
IO.puts("Generating #{visibility} long thread ended\n")
end
defp insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc) do
Enum.reduce(tasks, acc, fn
"friend", {id, data} ->
friend = Enum.random(friends)
insert_reply(friend, List.delete(data, "@" <> friend.nickname), id, visibility)
"non_friend", {id, data} ->
non_friend = Enum.random(non_friends)
insert_reply(non_friend, List.delete(data, "@" <> non_friend.nickname), id, visibility)
"user", {id, data} ->
insert_reply(user, List.delete(data, "@" <> user.nickname), id, visibility)
end)
end
defp generate_activities(user, friends, non_friends, task_data, opts) do
Task.async_stream(
task_data,
fn {visibility, type, group} ->
insert_activity(type, visibility, group, user, friends, non_friends, opts)
end,
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Stream.run()
end
defp insert_activity("simple", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{"status" => "Simple status", "visibility" => visibility})
end
defp insert_activity("emoji", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => "Simple status with emoji :firefox:",
"visibility" => visibility
})
end
defp insert_activity("mentions", visibility, group, user, friends, non_friends, _opts) do
user_mentions =
get_random_mentions(friends, Enum.random(0..3)) ++
get_random_mentions(non_friends, Enum.random(0..3))
user_mentions =
if Enum.random([true, false]),
do: ["@" <> user.nickname | user_mentions],
else: user_mentions
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => Enum.join(user_mentions, ", ") <> " simple status with mentions",
"visibility" => visibility
})
end
defp insert_activity("hell_thread", visibility, group, user, friends, non_friends, _opts) do
mentions =
with {:ok, nil} <- Cachex.get(:user_cache, "hell_thread_mentions") do
cached =
([user | Enum.take(friends, 10)] ++ Enum.take(non_friends, 10))
|> Enum.map(&"@#{&1.nickname}")
|> Enum.join(", ")
Cachex.put(:user_cache, "hell_thread_mentions", cached)
cached
else
{:ok, cached} -> cached
end
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => mentions <> " hell thread status",
"visibility" => visibility
})
end
defp insert_activity("attachment", visibility, group, user, friends, non_friends, _opts) do
actor = get_actor(group, user, friends, non_friends)
obj_data = %{
"actor" => actor.ap_id,
"name" => "4467-11.jpg",
"type" => "Document",
"url" => [
%{
"href" =>
"#{Pleroma.Web.base_url()}/media/b1b873552422a07bf53af01f3c231c841db4dfc42c35efde681abaf0f2a4eab7.jpg",
"mediaType" => "image/jpeg",
"type" => "Link"
}
]
}
object = Repo.insert!(%Pleroma.Object{data: obj_data})
{:ok, _activity} =
CommonAPI.post(actor, %{
"status" => "Post with attachment",
"visibility" => visibility,
"media_ids" => [object.id]
})
end
defp insert_activity("tag", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{"status" => "Status with #tag", "visibility" => visibility})
end
defp insert_activity("like", visibility, group, user, friends, non_friends, opts) do
actor = get_actor(group, user, friends, non_friends)
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
{:ok, _activity, _object} <- CommonAPI.favorite(activity_id, actor) do
:ok
else
{:error, _} ->
insert_activity("like", visibility, group, user, friends, non_friends, opts)
nil ->
Process.sleep(15)
insert_activity("like", visibility, group, user, friends, non_friends, opts)
end
end
defp insert_activity("reblog", visibility, group, user, friends, non_friends, opts) do
actor = get_actor(group, user, friends, non_friends)
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
{:ok, _activity, _object} <- CommonAPI.repeat(activity_id, actor) do
:ok
else
{:error, _} ->
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
nil ->
Process.sleep(15)
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
end
end
defp insert_activity("simple_thread", visibility, group, user, friends, non_friends, _opts)
when visibility in ["public", "unlisted", "private"] do
actor = get_actor(group, user, friends, non_friends)
tasks = get_reply_tasks(visibility, group)
{:ok, activity} =
CommonAPI.post(user, %{"status" => "Simple status", "visibility" => "unlisted"})
acc = {activity.id, ["@" <> actor.nickname, "reply to status"]}
insert_replies(tasks, visibility, user, friends, non_friends, acc)
end
defp insert_activity("simple_thread", "direct", group, user, friends, non_friends, _opts) do
actor = get_actor(group, user, friends, non_friends)
tasks = get_reply_tasks("direct", group)
list =
case group do
"non_friends" ->
Enum.take(non_friends, 3)
_ ->
Enum.take(friends, 3)
end
data = Enum.map(list, &("@" <> &1.nickname))
{:ok, activity} =
CommonAPI.post(actor, %{
"status" => Enum.join(data, ", ") <> "simple status",
"visibility" => "direct"
})
acc = {activity.id, ["@" <> user.nickname | data] ++ ["reply to status"]}
insert_direct_replies(tasks, user, list, acc)
end
defp insert_activity("remote", _, "user", _, _, _, _), do: :ok
defp insert_activity("remote", visibility, group, user, _friends, _non_friends, opts) do
remote_friends =
Users.get_users(user, limit: opts[:friends_used], local: :external, friends?: true)
remote_non_friends =
Users.get_users(user, limit: opts[:non_friends_used], local: :external, friends?: false)
actor = get_actor(group, user, remote_friends, remote_non_friends)
{act_data, obj_data} = prepare_activity_data(actor, visibility, user)
{activity_data, object_data} = other_data(actor)
activity_data
|> Map.merge(act_data)
|> Map.put("object", Map.merge(object_data, obj_data))
|> Pleroma.Web.ActivityPub.ActivityPub.insert(false)
end
defp get_actor("user", user, _friends, _non_friends), do: user
defp get_actor("friends", _user, friends, _non_friends), do: Enum.random(friends)
defp get_actor("non_friends", _user, _friends, non_friends), do: Enum.random(non_friends)
defp other_data(actor) do
%{host: host} = URI.parse(actor.ap_id)
datetime = DateTime.utc_now()
context_id = "http://#{host}:4000/contexts/#{UUID.generate()}"
activity_id = "http://#{host}:4000/activities/#{UUID.generate()}"
object_id = "http://#{host}:4000/objects/#{UUID.generate()}"
activity_data = %{
"actor" => actor.ap_id,
"context" => context_id,
"id" => activity_id,
"published" => datetime,
"type" => "Create",
"directMessage" => false
}
object_data = %{
"actor" => actor.ap_id,
"attachment" => [],
"attributedTo" => actor.ap_id,
"bcc" => [],
"bto" => [],
"content" => "Remote post",
"context" => context_id,
"conversation" => context_id,
"emoji" => %{},
"id" => object_id,
"published" => datetime,
"sensitive" => false,
"summary" => "",
"tag" => [],
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Note"
}
{activity_data, object_data}
end
defp prepare_activity_data(actor, "public", _mention) do
obj_data = %{
"cc" => [actor.follower_address],
"to" => [Constants.as_public()]
}
act_data = %{
"cc" => [actor.follower_address],
"to" => [Constants.as_public()]
}
{act_data, obj_data}
end
defp prepare_activity_data(actor, "private", _mention) do
obj_data = %{
"cc" => [],
"to" => [actor.follower_address]
}
act_data = %{
"cc" => [],
"to" => [actor.follower_address]
}
{act_data, obj_data}
end
defp prepare_activity_data(actor, "unlisted", _mention) do
obj_data = %{
"cc" => [Constants.as_public()],
"to" => [actor.follower_address]
}
act_data = %{
"cc" => [Constants.as_public()],
"to" => [actor.follower_address]
}
{act_data, obj_data}
end
defp prepare_activity_data(_actor, "direct", mention) do
%{host: mentioned_host} = URI.parse(mention.ap_id)
obj_data = %{
"cc" => [],
"content" =>
"<span class=\"h-card\"><a class=\"u-url mention\" href=\"#{mention.ap_id}\" rel=\"ugc\">@<span>#{
mention.nickname
}</span></a></span> direct message",
"tag" => [
%{
"href" => mention.ap_id,
"name" => "@#{mention.nickname}@#{mentioned_host}",
"type" => "Mention"
}
],
"to" => [mention.ap_id]
}
act_data = %{
"cc" => [],
"directMessage" => true,
"to" => [mention.ap_id]
}
{act_data, obj_data}
end
defp get_reply_tasks("public", "user"), do: ~w(friend non_friend user)
defp get_reply_tasks("public", "friends"), do: ~w(non_friend user friend)
defp get_reply_tasks("public", "non_friends"), do: ~w(user friend non_friend)
defp get_reply_tasks(visibility, "user") when visibility in ["unlisted", "private"],
do: ~w(friend user friend)
defp get_reply_tasks(visibility, "friends") when visibility in ["unlisted", "private"],
do: ~w(user friend user)
defp get_reply_tasks(visibility, "non_friends") when visibility in ["unlisted", "private"],
do: []
defp get_reply_tasks("direct", "user"), do: ~w(friend user friend)
defp get_reply_tasks("direct", "friends"), do: ~w(user friend user)
defp get_reply_tasks("direct", "non_friends"), do: ~w(user non_friend user)
defp insert_replies(tasks, visibility, user, friends, non_friends, acc) do
Enum.reduce(tasks, acc, fn
"friend", {id, data} ->
friend = Enum.random(friends)
insert_reply(friend, data, id, visibility)
"non_friend", {id, data} ->
non_friend = Enum.random(non_friends)
insert_reply(non_friend, data, id, visibility)
"user", {id, data} ->
insert_reply(user, data, id, visibility)
end)
end
defp insert_direct_replies(tasks, user, list, acc) do
Enum.reduce(tasks, acc, fn
group, {id, data} when group in ["friend", "non_friend"] ->
actor = Enum.random(list)
{reply_id, _} =
insert_reply(actor, List.delete(data, "@" <> actor.nickname), id, "direct")
{reply_id, data}
"user", {id, data} ->
{reply_id, _} = insert_reply(user, List.delete(data, "@" <> user.nickname), id, "direct")
{reply_id, data}
end)
end
defp insert_reply(actor, data, activity_id, visibility) do
{:ok, reply} =
CommonAPI.post(actor, %{
"status" => Enum.join(data, ", "),
"visibility" => visibility,
"in_reply_to_status_id" => activity_id
})
{reply.id, ["@" <> actor.nickname | data]}
end
defp get_random_mentions(_users, count) when count == 0, do: []
defp get_random_mentions(users, count) do
users
|> Enum.shuffle()
|> Enum.take(count)
|> Enum.map(&"@#{&1.nickname}")
end
defp get_random_create_activity_id do
Repo.one(
from(a in Pleroma.Activity,
where: fragment("(?)->>'type' = ?", a.data, ^"Create"),
order_by: fragment("RANDOM()"),
limit: 1,
select: a.id
)
)
end
end

View File

@ -1,260 +1,489 @@
defmodule Pleroma.LoadTesting.Fetcher do
use Pleroma.LoadTesting.Helper
alias Pleroma.Activity
alias Pleroma.Pagination
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.MastodonAPI.MastodonAPI
alias Pleroma.Web.MastodonAPI.StatusView
def fetch_user(user) do
Benchee.run(%{
"By id" => fn -> Repo.get_by(User, id: user.id) end,
"By ap_id" => fn -> Repo.get_by(User, ap_id: user.ap_id) end,
"By email" => fn -> Repo.get_by(User, email: user.email) end,
"By nickname" => fn -> Repo.get_by(User, nickname: user.nickname) end
})
@spec run_benchmarks(User.t()) :: any()
def run_benchmarks(user) do
fetch_user(user)
fetch_timelines(user)
render_views(user)
end
def query_timelines(user) do
home_timeline_params = %{
"count" => 20,
"with_muted" => true,
"type" => ["Create", "Announce"],
defp formatters do
[
Benchee.Formatters.Console
]
end
defp fetch_user(user) do
Benchee.run(
%{
"By id" => fn -> Repo.get_by(User, id: user.id) end,
"By ap_id" => fn -> Repo.get_by(User, ap_id: user.ap_id) end,
"By email" => fn -> Repo.get_by(User, email: user.email) end,
"By nickname" => fn -> Repo.get_by(User, nickname: user.nickname) end
},
formatters: formatters()
)
end
defp fetch_timelines(user) do
fetch_home_timeline(user)
fetch_direct_timeline(user)
fetch_public_timeline(user)
fetch_public_timeline(user, :local)
fetch_public_timeline(user, :tag)
fetch_notifications(user)
fetch_favourites(user)
fetch_long_thread(user)
end
defp render_views(user) do
render_timelines(user)
render_long_thread(user)
end
defp opts_for_home_timeline(user) do
%{
"blocking_user" => user,
"count" => "20",
"muting_user" => user,
"type" => ["Create", "Announce"],
"user" => user,
"with_muted" => "true"
}
end
defp fetch_home_timeline(user) do
opts = opts_for_home_timeline(user)
recipients = [user.ap_id | User.following(user)]
first_page_last =
ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse() |> List.last()
second_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", first_page_last.id))
|> Enum.reverse()
|> List.last()
third_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", second_page_last.id))
|> Enum.reverse()
|> List.last()
forth_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", third_page_last.id))
|> Enum.reverse()
|> List.last()
Benchee.run(
%{
"home timeline" => fn opts -> ActivityPub.fetch_activities(recipients, opts) end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id),
"1 page only media" => Map.put(opts, "only_media", "true"),
"2 page only media" =>
Map.put(opts, "max_id", first_page_last.id) |> Map.put("only_media", "true"),
"3 page only media" =>
Map.put(opts, "max_id", second_page_last.id) |> Map.put("only_media", "true"),
"4 page only media" =>
Map.put(opts, "max_id", third_page_last.id) |> Map.put("only_media", "true"),
"5 page only media" =>
Map.put(opts, "max_id", forth_page_last.id) |> Map.put("only_media", "true")
},
formatters: formatters()
)
end
defp opts_for_direct_timeline(user) do
%{
:visibility => "direct",
"blocking_user" => user,
"count" => "20",
"type" => "Create",
"user" => user,
"with_muted" => "true"
}
end
defp fetch_direct_timeline(user) do
recipients = [user.ap_id]
opts = opts_for_direct_timeline(user)
first_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts)
|> Pagination.fetch_paginated(opts)
|> List.last()
opts2 = Map.put(opts, "max_id", first_page_last.id)
second_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts2)
|> Pagination.fetch_paginated(opts2)
|> List.last()
opts3 = Map.put(opts, "max_id", second_page_last.id)
third_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts3)
|> Pagination.fetch_paginated(opts3)
|> List.last()
opts4 = Map.put(opts, "max_id", third_page_last.id)
forth_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts4)
|> Pagination.fetch_paginated(opts4)
|> List.last()
Benchee.run(
%{
"direct timeline" => fn opts ->
ActivityPub.fetch_activities_query(recipients, opts) |> Pagination.fetch_paginated(opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => opts2,
"3 page" => opts3,
"4 page" => opts4,
"5 page" => Map.put(opts4, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp opts_for_public_timeline(user) do
%{
"type" => ["Create", "Announce"],
"local_only" => false,
"blocking_user" => user,
"muting_user" => user
}
end
defp opts_for_public_timeline(user, :local) do
%{
"type" => ["Create", "Announce"],
"local_only" => true,
"blocking_user" => user,
"muting_user" => user
}
end
defp opts_for_public_timeline(user, :tag) do
%{
"blocking_user" => user,
"count" => "20",
"local_only" => nil,
"muting_user" => user,
"tag" => ["tag"],
"tag_all" => [],
"tag_reject" => [],
"type" => "Create",
"user" => user,
"with_muted" => "true"
}
end
defp fetch_public_timeline(user) do
opts = opts_for_public_timeline(user)
fetch_public_timeline(opts, "public timeline")
end
defp fetch_public_timeline(user, :local) do
opts = opts_for_public_timeline(user, :local)
fetch_public_timeline(opts, "public timeline only local")
end
defp fetch_public_timeline(user, :tag) do
opts = opts_for_public_timeline(user, :tag)
fetch_public_timeline(opts, "hashtag timeline")
end
defp fetch_public_timeline(user, :only_media) do
opts = opts_for_public_timeline(user) |> Map.put("only_media", "true")
fetch_public_timeline(opts, "public timeline only media")
end
defp fetch_public_timeline(opts, title) when is_binary(title) do
first_page_last = ActivityPub.fetch_public_activities(opts) |> List.last()
second_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", first_page_last.id))
|> List.last()
third_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", second_page_last.id))
|> List.last()
forth_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", third_page_last.id))
|> List.last()
Benchee.run(
%{
title => fn opts ->
ActivityPub.fetch_public_activities(opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp opts_for_notifications do
%{"count" => "20", "with_muted" => "true"}
end
defp fetch_notifications(user) do
opts = opts_for_notifications()
first_page_last = MastodonAPI.get_notifications(user, opts) |> List.last()
second_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", first_page_last.id))
|> List.last()
third_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", second_page_last.id))
|> List.last()
forth_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", third_page_last.id))
|> List.last()
Benchee.run(
%{
"Notifications" => fn opts ->
MastodonAPI.get_notifications(user, opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp fetch_favourites(user) do
first_page_last = ActivityPub.fetch_favourites(user) |> List.last()
second_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => first_page_last.id}) |> List.last()
third_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => second_page_last.id}) |> List.last()
forth_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => third_page_last.id}) |> List.last()
Benchee.run(
%{
"Favourites" => fn opts ->
ActivityPub.fetch_favourites(user, opts)
end
},
inputs: %{
"1 page" => %{},
"2 page" => %{"max_id" => first_page_last.id},
"3 page" => %{"max_id" => second_page_last.id},
"4 page" => %{"max_id" => third_page_last.id},
"5 page" => %{"max_id" => forth_page_last.id}
},
formatters: formatters()
)
end
defp opts_for_long_thread(user) do
%{
"blocking_user" => user,
"user" => user
}
end
mastodon_public_timeline_params = %{
"count" => 20,
"local_only" => true,
"only_media" => "false",
"type" => ["Create", "Announce"],
"with_muted" => "true",
"blocking_user" => user,
"muting_user" => user
}
defp fetch_long_thread(user) do
%{public_thread: public, private_thread: private} =
Agent.get(:benchmark_state, fn state -> state end)
mastodon_federated_timeline_params = %{
"count" => 20,
"only_media" => "false",
"type" => ["Create", "Announce"],
"with_muted" => "true",
"blocking_user" => user,
"muting_user" => user
}
opts = opts_for_long_thread(user)
following = User.following(user)
private_input = {private.data["context"], Map.put(opts, "exclude_id", private.id)}
Benchee.run(%{
"User home timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
following,
home_timeline_params
)
end,
"User mastodon public timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_public_timeline_params
)
end,
"User mastodon federated public timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_federated_timeline_params
)
end
})
public_input = {public.data["context"], Map.put(opts, "exclude_id", public.id)}
home_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
following,
home_timeline_params
Benchee.run(
%{
"fetch context" => fn {context, opts} ->
ActivityPub.fetch_activities_for_context(context, opts)
end
},
inputs: %{
"Private long thread" => private_input,
"Public long thread" => public_input
},
formatters: formatters()
)
end
defp render_timelines(user) do
opts = opts_for_home_timeline(user)
recipients = [user.ap_id | User.following(user)]
home_activities = ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse()
recipients = [user.ap_id]
opts = opts_for_direct_timeline(user)
direct_activities =
recipients
|> ActivityPub.fetch_activities_query(opts)
|> Pagination.fetch_paginated(opts)
opts = opts_for_public_timeline(user)
public_activities = ActivityPub.fetch_public_activities(opts)
opts = opts_for_public_timeline(user, :tag)
tag_activities = ActivityPub.fetch_public_activities(opts)
opts = opts_for_notifications()
notifications = MastodonAPI.get_notifications(user, opts)
favourites = ActivityPub.fetch_favourites(user)
Benchee.run(
%{
"Rendering home timeline" => fn ->
StatusView.render("index.json", %{
activities: home_activities,
for: user,
as: :activity
})
end,
"Rendering direct timeline" => fn ->
StatusView.render("index.json", %{
activities: direct_activities,
for: user,
as: :activity
})
end,
"Rendering public timeline" => fn ->
StatusView.render("index.json", %{
activities: public_activities,
for: user,
as: :activity
})
end,
"Rendering tag timeline" => fn ->
StatusView.render("index.json", %{
activities: tag_activities,
for: user,
as: :activity
})
end,
"Rendering notifications" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: notifications,
for: user
})
end,
"Rendering favourites timeline" => fn ->
StatusView.render("index.json", %{
activities: favourites,
for: user,
as: :activity
})
end
},
formatters: formatters()
)
end
defp render_long_thread(user) do
%{public_thread: public, private_thread: private} =
Agent.get(:benchmark_state, fn state -> state end)
opts = %{for: user}
public_activity = Activity.get_by_id_with_object(public.id)
private_activity = Activity.get_by_id_with_object(private.id)
Benchee.run(
%{
"render" => fn opts ->
StatusView.render("show.json", opts)
end
},
inputs: %{
"Public root" => Map.put(opts, :activity, public_activity),
"Private root" => Map.put(opts, :activity, private_activity)
},
formatters: formatters()
)
fetch_opts = opts_for_long_thread(user)
public_context =
ActivityPub.fetch_activities_for_context(
public.data["context"],
Map.put(fetch_opts, "exclude_id", public.id)
)
public_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(mastodon_public_timeline_params)
public_federated_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_federated_timeline_params
private_context =
ActivityPub.fetch_activities_for_context(
private.data["context"],
Map.put(fetch_opts, "exclude_id", private.id)
)
Benchee.run(%{
"Rendering home timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: home_activities,
for: user,
as: :activity
})
end,
"Rendering public timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: public_activities,
for: user,
as: :activity
})
end,
"Rendering public federated timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: public_federated_activities,
for: user,
as: :activity
})
end,
"Rendering favorites timeline" => fn ->
conn = Phoenix.ConnTest.build_conn(:get, "http://localhost:4001/api/v1/favourites", nil)
Pleroma.Web.MastodonAPI.StatusController.favourites(
%Plug.Conn{conn |
assigns: %{user: user},
query_params: %{"limit" => "0"},
body_params: %{},
cookies: %{},
params: %{},
path_params: %{},
private: %{
Pleroma.Web.Router => {[], %{}},
phoenix_router: Pleroma.Web.Router,
phoenix_action: :favourites,
phoenix_controller: Pleroma.Web.MastodonAPI.StatusController,
phoenix_endpoint: Pleroma.Web.Endpoint,
phoenix_format: "json",
phoenix_layout: {Pleroma.Web.LayoutView, "app.html"},
phoenix_recycled: true,
phoenix_view: Pleroma.Web.MastodonAPI.StatusView,
plug_session: %{"user_id" => user.id},
plug_session_fetch: :done,
plug_session_info: :write,
plug_skip_csrf_protection: true
}
},
%{})
end,
})
end
def query_notifications(user) do
without_muted_params = %{"count" => "20", "with_muted" => "false"}
with_muted_params = %{"count" => "20", "with_muted" => "true"}
Benchee.run(%{
"Notifications without muted" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
end,
"Notifications with muted" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
end
})
without_muted_notifications =
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
with_muted_notifications =
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
Benchee.run(%{
"Render notifications without muted" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: without_muted_notifications,
for: user
})
end,
"Render notifications with muted" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: with_muted_notifications,
for: user
})
end
})
end
def query_dms(user) do
params = %{
"count" => "20",
"with_muted" => "true",
"type" => "Create",
"blocking_user" => user,
"user" => user,
visibility: "direct"
}
Benchee.run(%{
"Direct messages with muted" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(params)
end,
"Direct messages without muted" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
end
})
dms_with_muted =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(params)
dms_without_muted =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
Benchee.run(%{
"Rendering dms with muted" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: dms_with_muted,
for: user,
as: :activity
})
end,
"Rendering dms without muted" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: dms_without_muted,
for: user,
as: :activity
})
end
})
end
def query_long_thread(user, activity) do
Benchee.run(%{
"Fetch main post" => fn ->
Pleroma.Activity.get_by_id_with_object(activity.id)
end,
"Fetch context of main post" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
activity.data["context"],
%{
"blocking_user" => user,
"user" => user,
"exclude_id" => activity.id
}
)
end
})
activity = Pleroma.Activity.get_by_id_with_object(activity.id)
context =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
activity.data["context"],
%{
"blocking_user" => user,
"user" => user,
"exclude_id" => activity.id
Benchee.run(
%{
"render" => fn opts ->
StatusView.render("context.json", opts)
end
},
inputs: %{
"Public context" => %{user: user, activity: public_activity, activities: public_context},
"Private context" => %{
user: user,
activity: private_activity,
activities: private_context
}
)
Benchee.run(%{
"Render status" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{
activity: activity,
for: user
})
end,
"Render context" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render(
"index.json",
for: user,
activities: context,
as: :activity
)
|> Enum.reverse()
end
})
},
formatters: formatters()
)
end
end

View File

@ -1,410 +0,0 @@
defmodule Pleroma.LoadTesting.Generator do
use Pleroma.LoadTesting.Helper
alias Pleroma.Web.CommonAPI
def generate_like_activities(user, posts) do
count_likes = Kernel.trunc(length(posts) / 4)
IO.puts("Starting generating #{count_likes} like activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
Enum.take_random(posts, count_likes),
fn post -> {:ok, _, _} = CommonAPI.favorite(post.id, user) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting like activities take #{to_sec(time)} sec.\n")
end
def generate_users(opts) do
IO.puts("Starting generating #{opts[:users_max]} users...")
{time, users} = :timer.tc(fn -> do_generate_users(opts) end)
IO.puts("Inserting users took #{to_sec(time)} sec.\n")
users
end
defp do_generate_users(opts) do
max = Keyword.get(opts, :users_max)
Task.async_stream(
1..max,
&generate_user_data(&1),
max_concurrency: 10,
timeout: 30_000
)
|> Enum.to_list()
end
defp generate_user_data(i) do
remote = Enum.random([true, false])
user = %User{
name: "Test テスト User #{i}",
email: "user#{i}@example.com",
nickname: "nick#{i}",
password_hash:
"$pbkdf2-sha512$160000$bU.OSFI7H/yqWb5DPEqyjw$uKp/2rmXw12QqnRRTqTtuk2DTwZfF8VR4MYW2xMeIlqPR/UX1nT1CEKVUx2CowFMZ5JON8aDvURrZpJjSgqXrg",
bio: "Tester Number #{i}",
local: remote
}
user_urls =
if remote do
base_url =
Enum.random(["https://domain1.com", "https://domain2.com", "https://domain3.com"])
ap_id = "#{base_url}/users/#{user.nickname}"
%{
ap_id: ap_id,
follower_address: ap_id <> "/followers",
following_address: ap_id <> "/following"
}
else
%{
ap_id: User.ap_id(user),
follower_address: User.ap_followers(user),
following_address: User.ap_following(user)
}
end
user = Map.merge(user, user_urls)
Repo.insert!(user)
end
def generate_activities(user, users) do
do_generate_activities(user, users)
end
defp do_generate_activities(user, users) do
IO.puts("Starting generating 20000 common activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..20_000,
fn _ ->
do_generate_activity([user | users])
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting common activities take #{to_sec(time)} sec.\n")
IO.puts("Starting generating 20000 activities with mentions...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..20_000,
fn _ ->
do_generate_activity_with_mention(user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting activities with menthions take #{to_sec(time)} sec.\n")
IO.puts("Starting generating 10000 activities with threads...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..10_000,
fn _ ->
do_generate_threads([user | users])
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting activities with threads take #{to_sec(time)} sec.\n")
end
defp do_generate_activity(users) do
post = %{
"status" => "Some status without mention with random user"
}
CommonAPI.post(Enum.random(users), post)
end
def generate_power_intervals(opts \\ []) do
count = Keyword.get(opts, :count, 20)
power = Keyword.get(opts, :power, 2)
IO.puts("Generating #{count} intervals for a power #{power} series...")
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
sum = Enum.sum(counts)
densities =
Enum.map(counts, fn c ->
c / sum
end)
densities
|> Enum.reduce(0, fn density, acc ->
if acc == 0 do
[{0, density}]
else
[{_, lower} | _] = acc
[{lower, lower + density} | acc]
end
end)
|> Enum.reverse()
end
def generate_tagged_activities(opts \\ []) do
tag_count = Keyword.get(opts, :tag_count, 20)
users = Keyword.get(opts, :users, Repo.all(User))
activity_count = Keyword.get(opts, :count, 200_000)
intervals = generate_power_intervals(count: tag_count)
IO.puts(
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
)
Enum.each(1..activity_count, fn _ ->
random = :rand.uniform()
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
end)
end
defp do_generate_activity_with_mention(user, users) do
mentions_cnt = Enum.random([2, 3, 4, 5])
with_user = Enum.random([true, false])
users = Enum.shuffle(users)
mentions_users = Enum.take(users, mentions_cnt)
mentions_users = if with_user, do: [user | mentions_users], else: mentions_users
mentions_str =
Enum.map(mentions_users, fn user -> "@" <> user.nickname end) |> Enum.join(", ")
post = %{
"status" => mentions_str <> "some status with mentions random users"
}
CommonAPI.post(Enum.random(users), post)
end
defp do_generate_threads(users) do
thread_length = Enum.random([2, 3, 4, 5])
actor = Enum.random(users)
post = %{
"status" => "Start of the thread"
}
{:ok, activity} = CommonAPI.post(actor, post)
Enum.each(1..thread_length, fn _ ->
user = Enum.random(users)
post = %{
"status" => "@#{actor.nickname} reply to thread",
"in_reply_to_status_id" => activity.id
}
CommonAPI.post(user, post)
end)
end
def generate_remote_activities(user, users) do
do_generate_remote_activities(user, users)
end
defp do_generate_remote_activities(user, users) do
IO.puts("Starting generating 10000 remote activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..10_000,
fn i ->
do_generate_remote_activity(i, user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting remote activities take #{to_sec(time)} sec.\n")
end
defp do_generate_remote_activity(i, user, users) do
actor = Enum.random(users)
%{host: host} = URI.parse(actor.ap_id)
date = Date.utc_today()
datetime = DateTime.utc_now()
map = %{
"actor" => actor.ap_id,
"cc" => [actor.follower_address, user.ap_id],
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"id" => actor.ap_id <> "/statuses/#{i}/activity",
"object" => %{
"actor" => actor.ap_id,
"atomUri" => actor.ap_id <> "/statuses/#{i}",
"attachment" => [],
"attributedTo" => actor.ap_id,
"bcc" => [],
"bto" => [],
"cc" => [actor.follower_address, user.ap_id],
"content" =>
"<p><span class=\"h-card\"><a href=\"" <>
user.ap_id <>
"\" class=\"u-url mention\">@<span>" <> user.nickname <> "</span></a></span></p>",
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"conversation" =>
"tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"emoji" => %{},
"id" => actor.ap_id <> "/statuses/#{i}",
"inReplyTo" => nil,
"inReplyToAtomUri" => nil,
"published" => datetime,
"sensitive" => true,
"summary" => "cw",
"tag" => [
%{
"href" => user.ap_id,
"name" => "@#{user.nickname}@#{host}",
"type" => "Mention"
}
],
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Note",
"url" => "http://#{host}/@#{actor.nickname}/#{i}"
},
"published" => datetime,
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Create"
}
Pleroma.Web.ActivityPub.ActivityPub.insert(map, false)
end
def generate_dms(user, users, opts) do
IO.puts("Starting generating #{opts[:dms_max]} DMs")
{time, _} = :timer.tc(fn -> do_generate_dms(user, users, opts) end)
IO.puts("Inserting dms take #{to_sec(time)} sec.\n")
end
defp do_generate_dms(user, users, opts) do
Task.async_stream(
1..opts[:dms_max],
fn _ ->
do_generate_dm(user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end
defp do_generate_dm(user, users) do
post = %{
"status" => "@#{user.nickname} some direct message",
"visibility" => "direct"
}
CommonAPI.post(Enum.random(users), post)
end
def generate_long_thread(user, users, opts) do
IO.puts("Starting generating long thread with #{opts[:thread_length]} replies")
{time, activity} = :timer.tc(fn -> do_generate_long_thread(user, users, opts) end)
IO.puts("Inserting long thread replies take #{to_sec(time)} sec.\n")
{:ok, activity}
end
defp do_generate_long_thread(user, users, opts) do
{:ok, %{id: id} = activity} = CommonAPI.post(user, %{"status" => "Start of long thread"})
Task.async_stream(
1..opts[:thread_length],
fn _ -> do_generate_thread(users, id) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
activity
end
defp do_generate_thread(users, activity_id) do
CommonAPI.post(Enum.random(users), %{
"status" => "reply to main post",
"in_reply_to_status_id" => activity_id
})
end
def generate_non_visible_message(user, users) do
IO.puts("Starting generating 1000 non visible posts")
{time, _} =
:timer.tc(fn ->
do_generate_non_visible_posts(user, users)
end)
IO.puts("Inserting non visible posts take #{to_sec(time)} sec.\n")
end
defp do_generate_non_visible_posts(user, users) do
[not_friend | users] = users
make_friends(user, users)
Task.async_stream(1..1000, fn _ -> do_generate_non_visible_post(not_friend, users) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end
defp make_friends(_user, []), do: nil
defp make_friends(user, [friend | users]) do
{:ok, _} = User.follow(user, friend)
{:ok, _} = User.follow(friend, user)
make_friends(user, users)
end
defp do_generate_non_visible_post(not_friend, users) do
post = %{
"status" => "some non visible post",
"visibility" => "private"
}
{:ok, activity} = CommonAPI.post(not_friend, post)
thread_length = Enum.random([2, 3, 4, 5])
Enum.each(1..thread_length, fn _ ->
user = Enum.random(users)
post = %{
"status" => "@#{not_friend.nickname} reply to non visible post",
"in_reply_to_status_id" => activity.id,
"visibility" => "private"
}
CommonAPI.post(user, post)
end)
end
end

View File

@ -1,11 +1,14 @@
defmodule Pleroma.LoadTesting.Helper do
defmacro __using__(_) do
quote do
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.User
alias Ecto.Adapters.SQL
alias Pleroma.Repo
defp to_sec(microseconds), do: microseconds / 1_000_000
end
def to_sec(microseconds), do: microseconds / 1_000_000
def clean_tables do
IO.puts("Deleting old data...\n")
SQL.query!(Repo, "TRUNCATE users CASCADE;")
SQL.query!(Repo, "TRUNCATE activities CASCADE;")
SQL.query!(Repo, "TRUNCATE objects CASCADE;")
SQL.query!(Repo, "TRUNCATE oban_jobs CASCADE;")
end
end

View File

@ -0,0 +1,169 @@
defmodule Pleroma.LoadTesting.Users do
@moduledoc """
Module for generating users with friends.
"""
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.User.Query
@defaults [
users: 20_000,
friends: 100
]
@max_concurrency 10
@spec generate(keyword()) :: User.t()
def generate(opts \\ []) do
opts = Keyword.merge(@defaults, opts)
generate_users(opts[:users])
main_user =
Repo.one(from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1))
make_friends(main_user, opts[:friends])
Repo.get(User, main_user.id)
end
def generate_users(max) do
IO.puts("Starting generating #{max} users...")
{time, users} =
:timer.tc(fn ->
Task.async_stream(
1..max,
&generate_user(&1),
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Enum.to_list()
end)
IO.puts("Generating users took #{to_sec(time)} sec.\n")
users
end
defp generate_user(i) do
remote = Enum.random([true, false])
%User{
name: "Test テスト User #{i}",
email: "user#{i}@example.com",
nickname: "nick#{i}",
password_hash: Comeonin.Pbkdf2.hashpwsalt("test"),
bio: "Tester Number #{i}",
local: !remote
}
|> user_urls()
|> Repo.insert!()
end
defp user_urls(%{local: true} = user) do
urls = %{
ap_id: User.ap_id(user),
follower_address: User.ap_followers(user),
following_address: User.ap_following(user)
}
Map.merge(user, urls)
end
defp user_urls(%{local: false} = user) do
base_domain = Enum.random(["domain1.com", "domain2.com", "domain3.com"])
ap_id = "https://#{base_domain}/users/#{user.nickname}"
urls = %{
ap_id: ap_id,
follower_address: ap_id <> "/followers",
following_address: ap_id <> "/following"
}
Map.merge(user, urls)
end
def make_friends(main_user, max) when is_integer(max) do
IO.puts("Starting making friends for #{max} users...")
{time, _} =
:timer.tc(fn ->
number_of_users =
(max / 2)
|> Kernel.trunc()
main_user
|> get_users(%{limit: number_of_users, local: :local})
|> run_stream(main_user)
main_user
|> get_users(%{limit: number_of_users, local: :external})
|> run_stream(main_user)
end)
IO.puts("Making friends took #{to_sec(time)} sec.\n")
end
def make_friends(%User{} = main_user, %User{} = user) do
{:ok, _} = User.follow(main_user, user)
{:ok, _} = User.follow(user, main_user)
end
@spec get_users(User.t(), keyword()) :: [User.t()]
def get_users(user, opts) do
criteria = %{limit: opts[:limit]}
criteria =
if opts[:local] do
Map.put(criteria, opts[:local], true)
else
criteria
end
criteria =
if opts[:friends?] do
Map.put(criteria, :friends, user)
else
criteria
end
query =
criteria
|> Query.build()
|> random_without_user(user)
query =
if opts[:friends?] == false do
friends_ids =
%{friends: user}
|> Query.build()
|> Repo.all()
|> Enum.map(& &1.id)
from(u in query, where: u.id not in ^friends_ids)
else
query
end
Repo.all(query)
end
defp random_without_user(query, user) do
from(u in query,
where: u.id != ^user.id,
order_by: fragment("RANDOM()")
)
end
defp run_stream(users, main_user) do
Task.async_stream(users, &make_friends(main_user, &1),
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Stream.run()
end
end

View File

@ -1,9 +1,12 @@
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
use Mix.Task
alias Pleroma.Repo
alias Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.Web.MastodonAPI.TimelineController
def run(_args) do
Mix.Pleroma.start_pleroma()
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
@ -11,8 +14,8 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
if activities_count == 0 do
IO.puts("Did not find any activities, cleaning and generating")
clean_tables()
Generator.generate_users(users_max: 10)
Generator.generate_tagged_activities()
Pleroma.LoadTesting.Users.generate_users(10)
Pleroma.LoadTesting.Activities.generate_tagged_activities()
else
IO.puts("Found #{activities_count} activities, won't generate new ones")
end
@ -34,7 +37,7 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
Benchee.run(
%{
"Hashtag fetching, any" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"any" => tags
},
@ -44,7 +47,7 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
end,
# Will always return zero results because no overlapping hashtags are generated.
"Hashtag fetching, all" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"all" => tags
},
@ -64,7 +67,7 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
Benchee.run(
%{
"Hashtag fetching" => fn tag ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"tag" => tag
},
@ -77,11 +80,4 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
time: 5
)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
end
end

View File

@ -1,9 +1,10 @@
defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
use Mix.Task
alias Pleroma.Repo
alias Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
alias Pleroma.Web.CommonAPI
alias Plug.Conn
def run(_args) do
Mix.Pleroma.start_pleroma()
@ -11,7 +12,7 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
# Cleaning tables
clean_tables()
[{:ok, user} | users] = Generator.generate_users(users_max: 1000)
[{:ok, user} | users] = Pleroma.LoadTesting.Users.generate_users(1000)
# Let the user make 100 posts
@ -38,8 +39,8 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
"user timeline, no followers" => fn reading_user ->
conn =
Phoenix.ConnTest.build_conn()
|> Plug.Conn.assign(:user, reading_user)
|> Plug.Conn.assign(:skip_link_headers, true)
|> Conn.assign(:user, reading_user)
|> Conn.assign(:skip_link_headers, true)
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
end
@ -56,8 +57,8 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
"user timeline, all following" => fn reading_user ->
conn =
Phoenix.ConnTest.build_conn()
|> Plug.Conn.assign(:user, reading_user)
|> Plug.Conn.assign(:skip_link_headers, true)
|> Conn.assign(:user, reading_user)
|> Conn.assign(:skip_link_headers, true)
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
end
@ -66,11 +67,4 @@ defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
time: 60
)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
end
end

View File

@ -1,114 +1,55 @@
defmodule Mix.Tasks.Pleroma.LoadTesting do
use Mix.Task
use Pleroma.LoadTesting.Helper
import Mix.Pleroma
import Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Fetcher
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
alias Pleroma.Repo
alias Pleroma.User
@shortdoc "Factory for generation data"
@moduledoc """
Generates data like:
- local/remote users
- local/remote activities with notifications
- direct messages
- long thread
- non visible posts
- local/remote activities with differrent visibility:
- simple activiities
- with emoji
- with mentions
- hellthreads
- with attachments
- with tags
- likes
- reblogs
- simple threads
- long threads
## Generate data
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --dms 20000 --thread_length 2000
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -d 20000 -t 2000
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --friends 1000 --iterations 170 --friends_used 20 --non_friends_used 20
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -f 1000 -i 170 -fu 20 -nfu 20
Options:
- `--users NUMBER` - number of users to generate. Defaults to: 20000. Alias: `-u`
- `--dms NUMBER` - number of direct messages to generate. Defaults to: 20000. Alias `-d`
- `--thread_length` - number of messages in thread. Defaults to: 2000. ALias `-t`
- `--friends NUMBER` - number of friends for main user. Defaults to: 1000. Alias: `-f`
- `--iterations NUMBER` - number of iterations to generate activities. For each iteration in database is inserted about 120+ activities with different visibility, actors and types.Defaults to: 170. Alias: `-i`
- `--friends_used NUMBER` - number of main user friends used in activity generation. Defaults to: 20. Alias: `-fu`
- `--non_friends_used NUMBER` - number of non friends used in activity generation. Defaults to: 20. Alias: `-nfu`
"""
@aliases [u: :users, d: :dms, t: :thread_length]
@aliases [u: :users, f: :friends, i: :iterations, fu: :friends_used, nfu: :non_friends_used]
@switches [
users: :integer,
dms: :integer,
thread_length: :integer
friends: :integer,
iterations: :integer,
friends_used: :integer,
non_friends_used: :integer
]
@users_default 20_000
@dms_default 1_000
@thread_length_default 2_000
def run(args) do
start_pleroma()
Pleroma.Config.put([:instance, :skip_thread_containment], true)
Mix.Pleroma.start_pleroma()
clean_tables()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
users_max = Keyword.get(opts, :users, @users_default)
dms_max = Keyword.get(opts, :dms, @dms_default)
thread_length = Keyword.get(opts, :thread_length, @thread_length_default)
clean_tables()
opts =
Keyword.put(opts, :users_max, users_max)
|> Keyword.put(:dms_max, dms_max)
|> Keyword.put(:thread_length, thread_length)
generate_users(opts)
# main user for queries
IO.puts("Fetching local main user...")
{time, user} =
:timer.tc(fn ->
Repo.one(
from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1)
)
end)
IO.puts("Fetching main user take #{to_sec(time)} sec.\n")
IO.puts("Fetching local users...")
{time, users} =
:timer.tc(fn ->
Repo.all(
from(u in User,
where: u.id != ^user.id,
where: u.local == true,
order_by: fragment("RANDOM()"),
limit: 10
)
)
end)
IO.puts("Fetching local users take #{to_sec(time)} sec.\n")
IO.puts("Fetching remote users...")
{time, remote_users} =
:timer.tc(fn ->
Repo.all(
from(u in User,
where: u.id != ^user.id,
where: u.local == false,
order_by: fragment("RANDOM()"),
limit: 10
)
)
end)
IO.puts("Fetching remote users take #{to_sec(time)} sec.\n")
generate_activities(user, users)
generate_remote_activities(user, remote_users)
generate_like_activities(
user, Pleroma.Repo.all(Pleroma.Activity.Queries.by_type("Create"))
)
generate_dms(user, users, opts)
{:ok, activity} = generate_long_thread(user, users, opts)
generate_non_visible_message(user, users)
user = Pleroma.LoadTesting.Users.generate(opts)
Pleroma.LoadTesting.Activities.generate(user, opts)
IO.puts("Users in DB: #{Repo.aggregate(from(u in User), :count, :id)}")
@ -120,19 +61,6 @@ defmodule Mix.Tasks.Pleroma.LoadTesting do
"Notifications in DB: #{Repo.aggregate(from(n in Pleroma.Notification), :count, :id)}"
)
fetch_user(user)
query_timelines(user)
query_notifications(user)
query_dms(user)
query_long_thread(user, activity)
Pleroma.Config.put([:instance, :skip_thread_containment], false)
query_timelines(user)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
Pleroma.LoadTesting.Fetcher.run_benchmarks(user)
end
end

View File

@ -39,7 +39,7 @@ config :pleroma, Pleroma.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "pleroma_test",
database: "pleroma_benchmark",
hostname: System.get_env("DB_HOST") || "localhost",
pool_size: 10

View File

@ -58,20 +58,6 @@ config :pleroma, Pleroma.Captcha,
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
config :pleroma, :hackney_pools,
federation: [
max_connections: 50,
timeout: 150_000
],
media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
]
# Upload configuration
config :pleroma, Pleroma.Upload,
uploader: Pleroma.Uploaders.Local,
@ -184,21 +170,13 @@ config :mime, :types, %{
"application/ld+json" => ["activity+json"]
}
config :tesla, adapter: Tesla.Adapter.Hackney
config :tesla, adapter: Tesla.Adapter.Gun
# Configures http settings, upstream proxy etc.
config :pleroma, :http,
proxy_url: nil,
send_user_agent: true,
user_agent: :default,
adapter: [
ssl_options: [
# Workaround for remote server certificate chain issues
partial_chain: &:hackney_connect.partial_chain/1,
# We don't support TLS v1.3 yet
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
]
]
adapter: []
config :pleroma, :instance,
name: "Pleroma",
@ -624,6 +602,49 @@ config :pleroma, Pleroma.Repo,
parameters: [gin_fuzzy_search_limit: "500"],
prepare: :unnamed
config :pleroma, :connections_pool,
checkin_timeout: 250,
max_connections: 250,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
config :pleroma, :pools,
federation: [
size: 50,
max_overflow: 10,
timeout: 150_000
],
media: [
size: 50,
max_overflow: 10,
timeout: 150_000
],
upload: [
size: 25,
max_overflow: 5,
timeout: 300_000
],
default: [
size: 10,
max_overflow: 2,
timeout: 10_000
]
config :pleroma, :hackney_pools,
federation: [
max_connections: 50,
timeout: 150_000
],
media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
]
config :pleroma, :restrict_unauthenticated,
timelines: %{local: false, federated: false},
profiles: %{local: false, remote: false},

View File

@ -2916,6 +2916,219 @@ config :pleroma, :config_description, [
}
]
},
%{
group: :pleroma,
key: :connections_pool,
type: :group,
description: "Advanced settings for `gun` connections pool",
children: [
%{
key: :checkin_timeout,
type: :integer,
description: "Timeout to checkin connection from pool. Default: 250ms.",
suggestions: [250]
},
%{
key: :max_connections,
type: :integer,
description: "Maximum number of connections in the pool. Default: 250 connections.",
suggestions: [250]
},
%{
key: :retry,
type: :integer,
description:
"Number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.",
suggestions: [1]
},
%{
key: :retry_timeout,
type: :integer,
description:
"Time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.",
suggestions: [1000]
},
%{
key: :await_up_timeout,
type: :integer,
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
suggestions: [5000]
}
]
},
%{
group: :pleroma,
key: :pools,
type: :group,
description: "Advanced settings for `gun` workers pools",
children: [
%{
key: :federation,
type: :keyword,
description: "Settings for federation pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [10]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :media,
type: :keyword,
description: "Settings for media pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [10]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :upload,
type: :keyword,
description: "Settings for upload pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [25]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [5]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [300_000]
}
]
},
%{
key: :default,
type: :keyword,
description: "Settings for default pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [10]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [2]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [10_000]
}
]
}
]
},
%{
group: :pleroma,
key: :hackney_pools,
type: :group,
description: "Advanced settings for `hackney` connections pools",
children: [
%{
key: :federation,
type: :keyword,
description: "Settings for federation pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :media,
type: :keyword,
description: "Settings for media pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :upload,
type: :keyword,
description: "Settings for upload pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [25]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [300_000]
}
]
}
]
},
%{
group: :pleroma,
key: :restrict_unauthenticated,

View File

@ -90,6 +90,8 @@ config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
config :pleroma, Pleroma.Gun, Pleroma.GunMock
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
config :pleroma, Pleroma.Plugs.RemoteIp, enabled: false

View File

@ -841,6 +841,8 @@ Some modifications are necessary to save the config settings correctly:
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
- all settings inside these keys:
- `:hackney_pools`
- `:connections_pool`
- `:pools`
- `:chat`
- partially settings inside these keys:
- `:seconds_valid` in `Pleroma.Captcha`

View File

@ -369,8 +369,7 @@ Available caches:
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
* `adapter`: array of hackney options
* `adapter`: array of adapter options
### :hackney_pools
@ -389,6 +388,42 @@ For each pool, the options are:
* `timeout` - retention duration for connections
### :connections_pool
*For `gun` adapter*
Advanced settings for connections pool. Pool with opened connections. These connections can be reused in worker pools.
For big instances it's recommended to increase `config :pleroma, :connections_pool, max_connections: 500` up to 500-1000.
It will increase memory usage, but federation would work faster.
* `:checkin_timeout` - timeout to checkin connection from pool. Default: 250ms.
* `:max_connections` - maximum number of connections in the pool. Default: 250 connections.
* `:retry` - number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.
* `:retry_timeout` - time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.
* `:await_up_timeout` - timeout while `gun` will wait until connection is up. Default: 5000ms.
### :pools
*For `gun` adapter*
Advanced settings for workers pools.
There are four pools used:
* `:federation` for the federation jobs.
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` for rich media, media proxy
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
* `:default` for other requests
For each pool, the options are:
* `:size` - how much workers the pool can hold
* `:timeout` - timeout while `gun` will wait for response
* `:max_overflow` - additional workers if pool is under load
## Captcha
### Pleroma.Captcha

View File

@ -5,6 +5,7 @@
defmodule Mix.Pleroma do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Mix.Task.run("app.start")
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
if Pleroma.Config.get(:env) != :test do

View File

@ -74,4 +74,43 @@ defmodule Mix.Tasks.Pleroma.Benchmark do
inputs: inputs
)
end
def run(["adapters"]) do
start_pleroma()
:ok =
Pleroma.Gun.Conn.open(
"https://httpbin.org/stream-bytes/1500",
:gun_connections
)
Process.sleep(1_500)
Benchee.run(
%{
"Without conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool, receive_conn: false]
)
end,
"Without conn and with pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [receive_conn: false]
)
end,
"With reused conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool]
)
end,
"With reused conn and with pool" => fn ->
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
end
},
parallel: 10
)
end
end

View File

@ -4,13 +4,13 @@
defmodule Mix.Tasks.Pleroma.Emoji do
use Mix.Task
import Mix.Pleroma
@shortdoc "Manages emoji packs"
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
def run(["ls-packs" | args]) do
Mix.Pleroma.start_pleroma()
Application.ensure_all_started(:hackney)
start_pleroma()
{options, [], []} = parse_global_opts(args)
@ -36,8 +36,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
end
def run(["get-packs" | args]) do
Mix.Pleroma.start_pleroma()
Application.ensure_all_started(:hackney)
start_pleroma()
{options, pack_names, []} = parse_global_opts(args)
@ -135,7 +134,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
end
def run(["gen-pack", src]) do
Application.ensure_all_started(:hackney)
start_pleroma()
proposed_name = Path.basename(src) |> Path.rootname()
name = String.trim(IO.gets("Pack name [#{proposed_name}]: "))

View File

@ -3,8 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Application do
import Cachex.Spec
use Application
import Cachex.Spec
alias Pleroma.Config
require Logger
@name Mix.Project.config()[:name]
@ -18,9 +22,9 @@ defmodule Pleroma.Application do
def repository, do: @repository
def user_agent do
case Pleroma.Config.get([:http, :user_agent], :default) do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>"
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
custom ->
@ -33,27 +37,51 @@ defmodule Pleroma.Application do
def start(_type, _args) do
Pleroma.Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.DeprecationWarnings.warn()
Config.DeprecationWarnings.warn()
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.Repo.check_migrations_applied!()
setup_instrumenters()
load_custom_modules()
adapter = Application.get_env(:tesla, :adapter)
if adapter == Tesla.Adapter.Gun do
if version = Pleroma.OTPVersion.version() do
[major, minor] =
version
|> String.split(".")
|> Enum.map(&String.to_integer/1)
|> Enum.take(2)
if (major == 22 and minor < 2) or major < 22 do
raise "
!!!OTP VERSION WARNING!!!
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains.
"
end
else
raise "
!!!OTP VERSION WARNING!!!
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
"
end
end
# Define workers and child supervisors to be supervised
children =
[
Pleroma.Repo,
Pleroma.Config.TransferTask,
Config.TransferTask,
Pleroma.Emoji,
Pleroma.Captcha,
Pleroma.Plugs.RateLimiter.Supervisor
] ++
cachex_children() ++
hackney_pool_children() ++
http_children(adapter, @env) ++
[
Pleroma.Stats,
Pleroma.JobQueueMonitor,
{Oban, Pleroma.Config.get(Oban)}
{Oban, Config.get(Oban)}
] ++
task_children(@env) ++
streamer_child(@env) ++
@ -70,7 +98,7 @@ defmodule Pleroma.Application do
end
def load_custom_modules do
dir = Pleroma.Config.get([:modules, :runtime_dir])
dir = Config.get([:modules, :runtime_dir])
if dir && File.exists?(dir) do
dir
@ -111,20 +139,6 @@ defmodule Pleroma.Application do
Pleroma.Web.Endpoint.Instrumenter.setup()
end
def enabled_hackney_pools do
[:media] ++
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
[:federation]
else
[]
end ++
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload]
else
[]
end
end
defp cachex_children do
[
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
@ -146,7 +160,7 @@ defmodule Pleroma.Application do
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
defp seconds_valid_interval,
do: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
defp build_cachex(type, opts),
do: %{
@ -155,9 +169,9 @@ defmodule Pleroma.Application do
type: :worker
}
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
defp chat_enabled?, do: Config.get([:chat, :enabled])
defp streamer_child(:test), do: []
defp streamer_child(env) when env in [:test, :benchmark], do: []
defp streamer_child(_) do
[Pleroma.Web.Streamer.supervisor()]
@ -169,13 +183,6 @@ defmodule Pleroma.Application do
defp chat_child(_, _), do: []
defp hackney_pool_children do
for pool <- enabled_hackney_pools() do
options = Pleroma.Config.get([:hackney_pools, pool])
:hackney_pool.child_spec(pool, options)
end
end
defp task_children(:test) do
[
%{
@ -200,4 +207,31 @@ defmodule Pleroma.Application do
}
]
end
# start hackney and gun pools in tests
defp http_children(_, :test) do
hackney_options = Config.get([:hackney_pools, :federation])
hackney_pool = :hackney_pool.child_spec(:federation, hackney_options)
[hackney_pool, Pleroma.Pool.Supervisor]
end
defp http_children(Tesla.Adapter.Hackney, _) do
pools = [:federation, :media]
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload | pools]
else
pools
end
for pool <- pools do
options = Config.get([:hackney_pools, pool])
:hackney_pool.child_spec(pool, options)
end
end
defp http_children(Tesla.Adapter.Gun, _), do: [Pleroma.Pool.Supervisor]
defp http_children(_, _), do: []
end

View File

@ -278,8 +278,6 @@ defmodule Pleroma.ConfigDB do
}
end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do
value =
entity
@ -323,15 +321,6 @@ defmodule Pleroma.ConfigDB do
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} =
entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|> Code.eval_string()
{:partial_chain, partial_chain}
end
defp do_transform(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
end

View File

@ -5,6 +5,7 @@
defmodule Pleroma.Config.TransferTask do
use Task
alias Pleroma.Config
alias Pleroma.ConfigDB
alias Pleroma.Repo
@ -18,7 +19,9 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Oban},
{:pleroma, :rate_limit},
{:pleroma, :markup},
{:plerome, :streamer}
{:pleroma, :streamer},
{:pleroma, :pools},
{:pleroma, :connections_pool}
]
@reboot_time_subkeys [
@ -32,45 +35,33 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, :gopher, [:enabled]}
]
@reject [nil, :prometheus]
def start_link(_) do
load_and_update_env()
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
if Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
:ignore
end
@spec load_and_update_env([ConfigDB.t()]) :: :ok | false
def load_and_update_env(deleted \\ [], restart_pleroma? \\ true) do
with {:configurable, true} <-
{:configurable, Pleroma.Config.get(:configurable_from_database)},
true <- Ecto.Adapters.SQL.table_exists?(Repo, "config"),
started_applications <- Application.started_applications() do
@spec load_and_update_env([ConfigDB.t()], boolean()) :: :ok
def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
# We need to restart applications for loaded settings take effect
in_db = Repo.all(ConfigDB)
with_deleted = in_db ++ deleted
reject_for_restart = if restart_pleroma?, do: @reject, else: [:pleroma | @reject]
applications =
with_deleted
|> Enum.map(&merge_and_update(&1))
|> Enum.uniq()
# TODO: some problem with prometheus after restart!
|> Enum.reject(&(&1 in reject_for_restart))
# to be ensured that pleroma will be restarted last
applications =
if :pleroma in applications do
List.delete(applications, :pleroma) ++ [:pleroma]
# TODO: some problem with prometheus after restart!
reject_restart =
if restart_pleroma? do
[nil, :prometheus]
else
Restarter.Pleroma.rebooted()
applications
[:pleroma, nil, :prometheus]
end
Enum.each(applications, &restart(started_applications, &1, Pleroma.Config.get(:env)))
started_applications = Application.started_applications()
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_and_update/1)
|> Enum.uniq()
|> Enum.reject(&(&1 in reject_restart))
|> maybe_set_pleroma_last()
|> Enum.each(&restart(started_applications, &1, Config.get(:env)))
:ok
else
@ -78,42 +69,54 @@ defmodule Pleroma.Config.TransferTask do
end
end
defp maybe_set_pleroma_last(apps) do
# to be ensured that pleroma will be restarted last
if :pleroma in apps do
apps
|> List.delete(:pleroma)
|> List.insert_at(-1, :pleroma)
else
Restarter.Pleroma.rebooted()
apps
end
end
defp group_for_restart(:logger, key, _, merged_value) do
# change logger configuration in runtime, without restart
if Keyword.keyword?(merged_value) and
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
Logger.configure_backend(key, merged_value)
else
Logger.configure([{key, merged_value}])
end
nil
end
defp group_for_restart(group, _, _, _) when group != :pleroma, do: group
defp group_for_restart(group, key, value, _) do
if pleroma_need_restart?(group, key, value), do: group
end
defp merge_and_update(setting) do
try do
key = ConfigDB.from_string(setting.key)
group = ConfigDB.from_string(setting.group)
default = Pleroma.Config.Holder.default_config(group, key)
default = Config.Holder.default_config(group, key)
value = ConfigDB.from_binary(setting.value)
merged_value =
if Ecto.get_meta(setting, :state) == :deleted do
default
else
if can_be_merged?(default, value) do
ConfigDB.merge_group(group, key, default, value)
else
value
end
cond do
Ecto.get_meta(setting, :state) == :deleted -> default
can_be_merged?(default, value) -> ConfigDB.merge_group(group, key, default, value)
true -> value
end
:ok = update_env(group, key, merged_value)
if group != :logger do
if group != :pleroma or pleroma_need_restart?(group, key, value) do
group
end
else
# change logger configuration in runtime, without restart
if Keyword.keyword?(merged_value) and
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
Logger.configure_backend(key, merged_value)
else
Logger.configure([{key, merged_value}])
end
nil
end
group_for_restart(group, key, value, merged_value)
rescue
error ->
error_msg =

45
lib/pleroma/gun/api.ex Normal file
View File

@ -0,0 +1,45 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.API do
@behaviour Pleroma.Gun
alias Pleroma.Gun
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:tls_opts,
:tcp_opts,
:socks_opts,
:ws_opts
]
@impl Gun
def open(host, port, opts \\ %{}), do: :gun.open(host, port, Map.take(opts, @gun_keys))
@impl Gun
defdelegate info(pid), to: :gun
@impl Gun
defdelegate close(pid), to: :gun
@impl Gun
defdelegate await_up(pid, timeout \\ 5_000), to: :gun
@impl Gun
defdelegate connect(pid, opts), to: :gun
@impl Gun
defdelegate await(pid, ref), to: :gun
@impl Gun
defdelegate set_owner(pid, owner), to: :gun
end

196
lib/pleroma/gun/conn.ex Normal file
View File

@ -0,0 +1,196 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.Conn do
@moduledoc """
Struct for gun connection data
"""
alias Pleroma.Gun
alias Pleroma.Pool.Connections
require Logger
@type gun_state :: :up | :down
@type conn_state :: :active | :idle
@type t :: %__MODULE__{
conn: pid(),
gun_state: gun_state(),
conn_state: conn_state(),
used_by: [pid()],
last_reference: pos_integer(),
crf: float(),
retries: pos_integer()
}
defstruct conn: nil,
gun_state: :open,
conn_state: :init,
used_by: [],
last_reference: 0,
crf: 1,
retries: 0
@spec open(String.t() | URI.t(), atom(), keyword()) :: :ok | nil
def open(url, name, opts \\ [])
def open(url, name, opts) when is_binary(url), do: open(URI.parse(url), name, opts)
def open(%URI{} = uri, name, opts) do
pool_opts = Pleroma.Config.get([:connections_pool], [])
opts =
opts
|> Enum.into(%{})
|> Map.put_new(:retry, pool_opts[:retry] || 1)
|> Map.put_new(:retry_timeout, pool_opts[:retry_timeout] || 1000)
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|> maybe_add_tls_opts(uri)
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
conn_pid =
if Connections.count(name) < opts[:max_connection] do
do_open(uri, opts)
else
close_least_used_and_do_open(name, uri, opts)
end
if is_pid(conn_pid) do
conn = %Pleroma.Gun.Conn{
conn: conn_pid,
gun_state: :up,
conn_state: :active,
last_reference: :os.system_time(:second)
}
:ok = Gun.set_owner(conn_pid, Process.whereis(name))
Connections.add_conn(name, key, conn)
end
end
defp maybe_add_tls_opts(opts, %URI{scheme: "http"}), do: opts
defp maybe_add_tls_opts(opts, %URI{scheme: "https", host: host}) do
tls_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun:
{&:ssl_verify_hostname.verify_fun/3,
[check_hostname: Pleroma.HTTP.Connection.format_host(host)]}
]
tls_opts =
if Keyword.keyword?(opts[:tls_opts]) do
Keyword.merge(tls_opts, opts[:tls_opts])
else
tls_opts
end
Map.put(opts, :tls_opts, tls_opts)
end
defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
connect_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
with open_opts <- Map.delete(opts, :tls_opts),
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
stream <- Gun.connect(conn, connect_opts),
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
conn
else
error ->
Logger.warn(
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{
inspect(error)
}"
)
error
end
end
defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
|> Map.put(:version, version)
opts =
opts
|> Map.put(:protocols, [:socks])
|> Map.put(:socks_opts, socks_opts)
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
else
error ->
Logger.warn(
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{
inspect(error)
}"
)
error
end
end
defp do_open(%URI{host: host, port: port} = uri, opts) do
host = Pleroma.HTTP.Connection.parse_host(host)
with {:ok, conn} <- Gun.open(host, port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
else
error ->
Logger.warn(
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
error
end
end
defp destination_opts(%URI{host: host, port: port}) do
host = Pleroma.HTTP.Connection.parse_host(host)
%{host: host, port: port}
end
defp add_http2_opts(opts, "https", tls_opts) do
Map.merge(opts, %{protocols: [:http2], transport: :tls, tls_opts: tls_opts})
end
defp add_http2_opts(opts, _, _), do: opts
defp close_least_used_and_do_open(name, uri, opts) do
with [{key, conn} | _conns] <- Connections.get_unused_conns(name),
:ok <- Gun.close(conn.conn) do
Connections.remove_conn(name, key)
do_open(uri, opts)
else
[] -> {:error, :pool_overflowed}
end
end
def compose_uri_log(%URI{scheme: scheme, host: host, path: path}) do
"#{scheme}://#{host}#{path}"
end
end

31
lib/pleroma/gun/gun.ex Normal file
View File

@ -0,0 +1,31 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun do
@callback open(charlist(), pos_integer(), map()) :: {:ok, pid()}
@callback info(pid()) :: map()
@callback close(pid()) :: :ok
@callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()}
@callback connect(pid(), map()) :: reference()
@callback await(pid(), reference()) :: {:response, :fin, 200, []}
@callback set_owner(pid(), pid()) :: :ok
@api Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API)
defp api, do: @api
def open(host, port, opts), do: api().open(host, port, opts)
def info(pid), do: api().info(pid)
def close(pid), do: api().close(pid)
def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout)
def connect(pid, opts), do: api().connect(pid, opts)
def await(pid, ref), do: api().await(pid, ref)
def set_owner(pid, owner), do: api().set_owner(pid, owner)
end

View File

@ -0,0 +1,41 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper do
alias Pleroma.HTTP.Connection
@type proxy ::
{Connection.host(), pos_integer()}
| {Connection.proxy_type(), Connection.host(), pos_integer()}
@callback options(keyword(), URI.t()) :: keyword()
@callback after_request(keyword()) :: :ok
@spec options(keyword(), URI.t()) :: keyword()
def options(opts, _uri) do
proxy = Pleroma.Config.get([:http, :proxy_url], nil)
maybe_add_proxy(opts, format_proxy(proxy))
end
@spec maybe_get_conn(URI.t(), keyword()) :: keyword()
def maybe_get_conn(_uri, opts), do: opts
@spec after_request(keyword()) :: :ok
def after_request(_opts), do: :ok
@spec format_proxy(String.t() | tuple() | nil) :: proxy() | nil
def format_proxy(nil), do: nil
def format_proxy(proxy_url) do
case Connection.parse_proxy(proxy_url) do
{:ok, host, port} -> {host, port}
{:ok, type, host, port} -> {type, host, port}
_ -> nil
end
end
@spec maybe_add_proxy(keyword(), proxy() | nil) :: keyword()
def maybe_add_proxy(opts, nil), do: opts
def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
end

View File

@ -0,0 +1,77 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper.Gun do
@behaviour Pleroma.HTTP.AdapterHelper
alias Pleroma.HTTP.AdapterHelper
alias Pleroma.Pool.Connections
require Logger
@defaults [
connect_timeout: 5_000,
domain_lookup_timeout: 5_000,
tls_handshake_timeout: 5_000,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
]
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do
proxy =
Pleroma.Config.get([:http, :proxy_url])
|> AdapterHelper.format_proxy()
config_opts = Pleroma.Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> add_scheme_opts(uri)
|> AdapterHelper.maybe_add_proxy(proxy)
|> maybe_get_conn(uri, incoming_opts)
end
@spec after_request(keyword()) :: :ok
def after_request(opts) do
if opts[:conn] && opts[:body_as] != :chunks do
Connections.checkout(opts[:conn], self(), :gun_connections)
end
:ok
end
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %{scheme: "https"}) do
opts
|> Keyword.put(:certificates_verification, true)
|> Keyword.put(:tls_opts, log_level: :warning)
end
defp maybe_get_conn(adapter_opts, uri, incoming_opts) do
{receive_conn?, opts} =
adapter_opts
|> Keyword.merge(incoming_opts)
|> Keyword.pop(:receive_conn, true)
if Connections.alive?(:gun_connections) and receive_conn? do
checkin_conn(uri, opts)
else
opts
end
end
defp checkin_conn(uri, opts) do
case Connections.checkin(uri, :gun_connections) do
nil ->
Task.start(Pleroma.Gun.Conn, :open, [uri, :gun_connections, opts])
opts
conn when is_pid(conn) ->
Keyword.merge(opts, conn: conn, close_conn: false)
end
end
end

View File

@ -0,0 +1,43 @@
defmodule Pleroma.HTTP.AdapterHelper.Hackney do
@behaviour Pleroma.HTTP.AdapterHelper
@defaults [
connect_timeout: 10_000,
recv_timeout: 20_000,
follow_redirect: true,
force_redirect: true,
pool: :federation
]
@spec options(keyword(), URI.t()) :: keyword()
def options(connection_opts \\ [], %URI{} = uri) do
proxy = Pleroma.Config.get([:http, :proxy_url])
config_opts = Pleroma.Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> Keyword.merge(connection_opts)
|> add_scheme_opts(uri)
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
end
defp add_scheme_opts(opts, %URI{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %URI{scheme: "https", host: host}) do
ssl_opts = [
ssl_options: [
# Workaround for remote server certificate chain issues
partial_chain: &:hackney_connect.partial_chain/1,
# We don't support TLS v1.3 yet
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"],
server_name_indication: to_charlist(host)
]
]
Keyword.merge(opts, ssl_opts)
end
def after_request(_), do: :ok
end

View File

@ -4,40 +4,121 @@
defmodule Pleroma.HTTP.Connection do
@moduledoc """
Connection for http-requests.
Configure Tesla.Client with default and customized adapter options.
"""
@hackney_options [
connect_timeout: 10_000,
recv_timeout: 20_000,
follow_redirect: true,
force_redirect: true,
pool: :federation
]
@adapter Application.get_env(:tesla, :adapter)
alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
require Logger
@defaults [pool: :federation]
@type ip_address :: ipv4_address() | ipv6_address()
@type ipv4_address :: {0..255, 0..255, 0..255, 0..255}
@type ipv6_address ::
{0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535}
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | ip_address()
@doc """
Configure a client connection
# Returns
Tesla.Env.client
Merge default connection & adapter options with received ones.
"""
@spec new(Keyword.t()) :: Tesla.Env.client()
def new(opts \\ []) do
Tesla.client([], {@adapter, hackney_options(opts)})
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
|> pool_timeout()
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
# fetch Hackney options
#
def hackney_options(opts) do
options = Keyword.get(opts, :adapter, [])
adapter_options = Pleroma.Config.get([:http, :adapter], [])
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
defp pool_timeout(opts) do
{config_key, default} =
if adapter() == Tesla.Adapter.Gun do
{:pools, Config.get([:pools, :default, :timeout])}
else
{:hackney_pools, 10_000}
end
@hackney_options
|> Keyword.merge(adapter_options)
|> Keyword.merge(options)
|> Keyword.merge(proxy: proxy_url)
timeout = Config.get([config_key, opts[:pool], :timeout], default)
Keyword.merge(opts, timeout: timeout)
end
@spec after_request(keyword()) :: :ok
def after_request(opts), do: adapter_helper().after_request(opts)
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do
case adapter() do
Tesla.Adapter.Gun -> AdapterHelper.Gun
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
_ -> AdapterHelper
end
end
@spec parse_proxy(String.t() | tuple() | nil) ::
{:ok, host(), pos_integer()}
| {:ok, proxy_type(), host(), pos_integer()}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil
def parse_proxy(proxy) when is_binary(proxy) do
with [host, port] <- String.split(proxy, ":"),
{port, ""} <- Integer.parse(port) do
{:ok, parse_host(host), port}
else
{_, _} ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
def parse_proxy(proxy) when is_tuple(proxy) do
with {type, host, port} <- proxy do
{:ok, type, parse_host(host), port}
else
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@spec parse_host(String.t() | atom() | charlist()) :: charlist() | ip_address()
def parse_host(host) when is_list(host), do: host
def parse_host(host) when is_atom(host), do: to_charlist(host)
def parse_host(host) when is_binary(host) do
host = to_charlist(host)
case :inet.parse_address(host) do
{:error, :einval} -> host
{:ok, ip} -> ip
end
end
@spec format_host(String.t()) :: charlist()
def format_host(host) do
host_charlist = to_charlist(host)
case :inet.parse_address(host_charlist) do
{:error, :einval} ->
:idna.encode(host_charlist)
{:ok, _ip} ->
host_charlist
end
end
end

View File

@ -4,21 +4,47 @@
defmodule Pleroma.HTTP do
@moduledoc """
Wrapper for `Tesla.request/2`.
"""
alias Pleroma.HTTP.Connection
alias Pleroma.HTTP.Request
alias Pleroma.HTTP.RequestBuilder, as: Builder
alias Tesla.Client
alias Tesla.Env
require Logger
@type t :: __MODULE__
@doc """
Builds and perform http request.
Performs GET request.
See `Pleroma.HTTP.request/5`
"""
@spec get(Request.url() | nil, Request.headers(), keyword()) ::
nil | {:ok, Env.t()} | {:error, any()}
def get(url, headers \\ [], options \\ [])
def get(nil, _, _), do: nil
def get(url, headers, options), do: request(:get, url, "", headers, options)
@doc """
Performs POST request.
See `Pleroma.HTTP.request/5`
"""
@spec post(Request.url(), String.t(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
@doc """
Builds and performs http request.
# Arguments:
`method` - :get, :post, :put, :delete
`url`
`body`
`url` - full url
`body` - request body
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
`options` - custom, per-request middleware or adapter options
@ -26,61 +52,66 @@ defmodule Pleroma.HTTP do
`{:ok, %Tesla.Env{}}` or `{:error, error}`
"""
def request(method, url, body \\ "", headers \\ [], options \\ []) do
try do
options =
process_request_options(options)
|> process_sni_options(url)
params = Keyword.get(options, :params, [])
%{}
|> Builder.method(method)
|> Builder.headers(headers)
|> Builder.opts(options)
|> Builder.url(url)
|> Builder.add_param(:body, :body, body)
|> Builder.add_param(:query, :query, params)
|> Enum.into([])
|> (&Tesla.request(Connection.new(options), &1)).()
rescue
e ->
{:error, e}
catch
:exit, e ->
{:error, e}
end
end
defp process_sni_options(options, nil), do: options
defp process_sni_options(options, url) do
@spec request(atom(), Request.url(), String.t(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def request(method, url, body, headers, options) when is_binary(url) do
uri = URI.parse(url)
host = uri.host |> to_charlist()
adapter_opts = Connection.options(uri, options[:adapter] || [])
options = put_in(options[:adapter], adapter_opts)
params = options[:params] || []
request = build_request(method, headers, options, url, body, params)
case uri.scheme do
"https" -> options ++ [ssl: [server_name_indication: host]]
_ -> options
end
adapter = Application.get_env(:tesla, :adapter)
client = Tesla.client([Tesla.Middleware.FollowRedirects], adapter)
pid = Process.whereis(adapter_opts[:pool])
pool_alive? =
if adapter == Tesla.Adapter.Gun && pid do
Process.alive?(pid)
else
false
end
request_opts =
adapter_opts
|> Enum.into(%{})
|> Map.put(:env, Pleroma.Config.get([:env]))
|> Map.put(:pool_alive?, pool_alive?)
response = request(client, request, request_opts)
Connection.after_request(adapter_opts)
response
end
def process_request_options(options) do
Keyword.merge(Pleroma.HTTP.Connection.hackney_options([]), options)
@spec request(Client.t(), keyword(), map()) :: {:ok, Env.t()} | {:error, any()}
def request(%Client{} = client, request, %{env: :test}), do: request(client, request)
def request(%Client{} = client, request, %{body_as: :chunks}), do: request(client, request)
def request(%Client{} = client, request, %{pool_alive?: false}), do: request(client, request)
def request(%Client{} = client, request, %{pool: pool, timeout: timeout}) do
:poolboy.transaction(
pool,
&Pleroma.Pool.Request.execute(&1, client, request, timeout),
timeout
)
end
@doc """
Performs GET request.
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
def request(client, request), do: Tesla.request(client, request)
See `Pleroma.HTTP.request/5`
"""
def get(url, headers \\ [], options \\ []),
do: request(:get, url, "", headers, options)
@doc """
Performs POST request.
See `Pleroma.HTTP.request/5`
"""
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
defp build_request(method, headers, options, url, body, params) do
Builder.new()
|> Builder.method(method)
|> Builder.headers(headers)
|> Builder.opts(options)
|> Builder.url(url)
|> Builder.add_param(:body, :body, body)
|> Builder.add_param(:query, :query, params)
|> Builder.convert_to_keyword()
end
end

View File

@ -0,0 +1,23 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.Request do
@moduledoc """
Request struct.
"""
defstruct method: :get, url: "", query: [], headers: [], body: "", opts: []
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
@type url :: String.t()
@type headers :: [{String.t(), String.t()}]
@type t :: %__MODULE__{
method: method(),
url: url(),
query: keyword(),
headers: headers(),
body: String.t(),
opts: keyword()
}
end

View File

@ -7,136 +7,87 @@ defmodule Pleroma.HTTP.RequestBuilder do
Helper functions for building Tesla requests
"""
alias Pleroma.HTTP.Request
alias Tesla.Multipart
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (atom) - Request method
## Returns
Map
Creates new request
"""
@spec method(map(), atom) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@spec new(Request.t()) :: Request.t()
def new(%Request{} = request \\ %Request{}), do: request
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t()) :: map()
def url(request, u) do
Map.put_new(request, :url, u)
end
@spec method(Request.t(), Request.method()) :: Request.t()
def method(request, m), do: %{request | method: m}
@doc """
Specify the request method when building a request
"""
@spec url(Request.t(), Request.url()) :: Request.t()
def url(request, u), do: %{request | url: u}
@doc """
Add headers to the request
"""
@spec headers(map(), list(tuple)) :: map()
def headers(request, header_list) do
header_list =
@spec headers(Request.t(), Request.headers()) :: Request.t()
def headers(request, headers) do
headers_list =
if Pleroma.Config.get([:http, :send_user_agent]) do
header_list ++ [{"User-Agent", Pleroma.Application.user_agent()}]
[{"user-agent", Pleroma.Application.user_agent()} | headers]
else
header_list
headers
end
Map.put_new(request, :headers, header_list)
%{request | headers: headers_list}
end
@doc """
Add custom, per-request middleware or adapter options to the request
"""
@spec opts(map(), Keyword.t()) :: map()
def opts(request, options) do
Map.put_new(request, :opts, options)
end
@spec opts(Request.t(), keyword()) :: Request.t()
def opts(request, options), do: %{request | opts: options}
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(atom) => atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
@spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
def add_param(request, :query, :query, values), do: %{request | query: values}
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), atom, atom, any()) :: map()
def add_param(request, :query, :query, values), do: Map.put(request, :query, values)
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, :body, value), do: %{request | body: value}
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.put(:body, Multipart.new())
|> Map.update!(
:body,
&Tesla.Multipart.add_field(
&Multipart.add_field(
&1,
key,
Jason.encode!(value),
headers: [{:"Content-Type", "application/json"}]
headers: [{"content-type", "application/json"}]
)
)
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &Tesla.Multipart.add_file(&1, path, name: name))
|> Map.put(:body, Multipart.new())
|> Map.update!(:body, &Multipart.add_file(&1, path, name: name))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &Map.put(&1, name, value))
Map.update(request, :body, %{name => value}, &Map.put(&1, name, value))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
def convert_to_keyword(request) do
request
|> Map.from_struct()
|> Enum.into([])
end
end

View File

@ -141,7 +141,7 @@ defmodule Pleroma.Object.Fetcher do
date: date
})
[{:Signature, signature}]
[{"signature", signature}]
end
defp sign_fetch(headers, id, date) do
@ -154,7 +154,7 @@ defmodule Pleroma.Object.Fetcher do
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ [{:Date, date}]
headers ++ [{"date", date}]
else
headers
end
@ -166,7 +166,7 @@ defmodule Pleroma.Object.Fetcher do
date = Pleroma.Signature.signed_date()
headers =
[{:Accept, "application/activity+json"}]
[{"accept", "application/activity+json"}]
|> maybe_date_fetch(date)
|> sign_fetch(id, date)

View File

@ -0,0 +1,28 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.OTPVersion do
@spec version() :: String.t() | nil
def version do
# OTP Version https://erlang.org/doc/system_principles/versions.html#otp-version
[
Path.join(:code.root_dir(), "OTP_VERSION"),
Path.join([:code.root_dir(), "releases", :erlang.system_info(:otp_release), "OTP_VERSION"])
]
|> get_version_from_files()
end
@spec get_version_from_files([Path.t()]) :: String.t() | nil
def get_version_from_files([]), do: nil
def get_version_from_files([path | paths]) do
if File.exists?(path) do
path
|> File.read!()
|> String.replace(~r/\r|\n|\s/, "")
else
get_version_from_files(paths)
end
end
end

View File

@ -0,0 +1,283 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Connections do
use GenServer
alias Pleroma.Config
alias Pleroma.Gun
require Logger
@type domain :: String.t()
@type conn :: Pleroma.Gun.Conn.t()
@type t :: %__MODULE__{
conns: %{domain() => conn()},
opts: keyword()
}
defstruct conns: %{}, opts: []
@spec start_link({atom(), keyword()}) :: {:ok, pid()}
def start_link({name, opts}) do
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl true
def init(opts), do: {:ok, %__MODULE__{conns: %{}, opts: opts}}
@spec checkin(String.t() | URI.t(), atom()) :: pid() | nil
def checkin(url, name)
def checkin(url, name) when is_binary(url), do: checkin(URI.parse(url), name)
def checkin(%URI{} = uri, name) do
timeout = Config.get([:connections_pool, :checkin_timeout], 250)
GenServer.call(name, {:checkin, uri}, timeout)
end
@spec alive?(atom()) :: boolean()
def alive?(name) do
if pid = Process.whereis(name) do
Process.alive?(pid)
else
false
end
end
@spec get_state(atom()) :: t()
def get_state(name) do
GenServer.call(name, :state)
end
@spec count(atom()) :: pos_integer()
def count(name) do
GenServer.call(name, :count)
end
@spec get_unused_conns(atom()) :: [{domain(), conn()}]
def get_unused_conns(name) do
GenServer.call(name, :unused_conns)
end
@spec checkout(pid(), pid(), atom()) :: :ok
def checkout(conn, pid, name) do
GenServer.cast(name, {:checkout, conn, pid})
end
@spec add_conn(atom(), String.t(), Pleroma.Gun.Conn.t()) :: :ok
def add_conn(name, key, conn) do
GenServer.cast(name, {:add_conn, key, conn})
end
@spec remove_conn(atom(), String.t()) :: :ok
def remove_conn(name, key) do
GenServer.cast(name, {:remove_conn, key})
end
@impl true
def handle_cast({:add_conn, key, conn}, state) do
state = put_in(state.conns[key], conn)
Process.monitor(conn.conn)
{:noreply, state}
end
@impl true
def handle_cast({:checkout, conn_pid, pid}, state) do
state =
with true <- Process.alive?(conn_pid),
{key, conn} <- find_conn(state.conns, conn_pid),
used_by <- List.keydelete(conn.used_by, pid, 0) do
conn_state = if used_by == [], do: :idle, else: conn.conn_state
put_in(state.conns[key], %{conn | conn_state: conn_state, used_by: used_by})
else
false ->
Logger.debug("checkout for closed conn #{inspect(conn_pid)}")
state
nil ->
Logger.debug("checkout for alive conn #{inspect(conn_pid)}, but is not in state")
state
end
{:noreply, state}
end
@impl true
def handle_cast({:remove_conn, key}, state) do
state = put_in(state.conns, Map.delete(state.conns, key))
{:noreply, state}
end
@impl true
def handle_call({:checkin, uri}, from, state) do
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
case state.conns[key] do
%{conn: pid, gun_state: :up} = conn ->
time = :os.system_time(:second)
last_reference = time - conn.last_reference
crf = crf(last_reference, 100, conn.crf)
state =
put_in(state.conns[key], %{
conn
| last_reference: time,
crf: crf,
conn_state: :active,
used_by: [from | conn.used_by]
})
{:reply, pid, state}
%{gun_state: :down} ->
{:reply, nil, state}
nil ->
{:reply, nil, state}
end
end
@impl true
def handle_call(:state, _from, state), do: {:reply, state, state}
@impl true
def handle_call(:count, _from, state) do
{:reply, Enum.count(state.conns), state}
end
@impl true
def handle_call(:unused_conns, _from, state) do
unused_conns =
state.conns
|> Enum.filter(&filter_conns/1)
|> Enum.sort(&sort_conns/2)
{:reply, unused_conns, state}
end
defp filter_conns({_, %{conn_state: :idle, used_by: []}}), do: true
defp filter_conns(_), do: false
defp sort_conns({_, c1}, {_, c2}) do
c1.crf <= c2.crf and c1.last_reference <= c2.last_reference
end
@impl true
def handle_info({:gun_up, conn_pid, _protocol}, state) do
%{origin_host: host, origin_scheme: scheme, origin_port: port} = Gun.info(conn_pid)
host =
case :inet.ntoa(host) do
{:error, :einval} -> host
ip -> ip
end
key = "#{scheme}:#{host}:#{port}"
state =
with {key, conn} <- find_conn(state.conns, conn_pid, key),
{true, key} <- {Process.alive?(conn_pid), key} do
put_in(state.conns[key], %{
conn
| gun_state: :up,
conn_state: :active,
retries: 0
})
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
:ok = Gun.close(conn_pid)
state
end
{:noreply, state}
end
@impl true
def handle_info({:gun_down, conn_pid, _protocol, _reason, _killed}, state) do
retries = Config.get([:connections_pool, :retry], 1)
# we can't get info on this pid, because pid is dead
state =
with {key, conn} <- find_conn(state.conns, conn_pid),
{true, key} <- {Process.alive?(conn_pid), key} do
if conn.retries == retries do
:ok = Gun.close(conn.conn)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
put_in(state.conns[key], %{
conn
| gun_state: :down,
retries: conn.retries + 1
})
end
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
Logger.debug(":gun_down for conn which isn't found in state")
state
end
{:noreply, state}
end
@impl true
def handle_info({:DOWN, _ref, :process, conn_pid, reason}, state) do
Logger.debug("received DOWM message for #{inspect(conn_pid)} reason -> #{inspect(reason)}")
state =
with {key, conn} <- find_conn(state.conns, conn_pid) do
Enum.each(conn.used_by, fn {pid, _ref} ->
Process.exit(pid, reason)
end)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
nil ->
Logger.debug(":DOWN for conn which isn't found in state")
state
end
{:noreply, state}
end
defp find_conn(conns, conn_pid) do
Enum.find(conns, fn {_key, conn} ->
conn.conn == conn_pid
end)
end
defp find_conn(conns, conn_pid, conn_key) do
Enum.find(conns, fn {key, conn} ->
key == conn_key and conn.conn == conn_pid
end)
end
def crf(current, steps, crf) do
1 + :math.pow(0.5, current / steps) * crf
end
end

22
lib/pleroma/pool/pool.ex Normal file
View File

@ -0,0 +1,22 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool do
def child_spec(opts) do
poolboy_opts =
opts
|> Keyword.put(:worker_module, Pleroma.Pool.Request)
|> Keyword.put(:name, {:local, opts[:name]})
|> Keyword.put(:size, opts[:size])
|> Keyword.put(:max_overflow, opts[:max_overflow])
%{
id: opts[:id] || {__MODULE__, make_ref()},
start: {:poolboy, :start_link, [poolboy_opts, [name: opts[:name]]]},
restart: :permanent,
shutdown: 5000,
type: :worker
}
end
end

View File

@ -0,0 +1,65 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Request do
use GenServer
require Logger
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@impl true
def init(_), do: {:ok, []}
@spec execute(pid() | atom(), Tesla.Client.t(), keyword(), pos_integer()) ::
{:ok, Tesla.Env.t()} | {:error, any()}
def execute(pid, client, request, timeout) do
GenServer.call(pid, {:execute, client, request}, timeout)
end
@impl true
def handle_call({:execute, client, request}, _from, state) do
response = Pleroma.HTTP.request(client, request)
{:reply, response, state}
end
@impl true
def handle_info({:gun_data, _conn, _stream, _, _}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_up, _conn, _protocol}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_down, _conn, _protocol, _reason, _killed}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_error, _conn, _stream, _error}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_push, _conn, _stream, _new_stream, _method, _uri, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_response, _conn, _stream, _, _status, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info(msg, state) do
Logger.warn("Received unexpected message #{inspect(__MODULE__)} #{inspect(msg)}")
{:noreply, state}
end
end

View File

@ -0,0 +1,42 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Supervisor do
use Supervisor
alias Pleroma.Config
alias Pleroma.Pool
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
def init(_) do
conns_child = %{
id: Pool.Connections,
start:
{Pool.Connections, :start_link, [{:gun_connections, Config.get([:connections_pool])}]}
}
Supervisor.init([conns_child | pools()], strategy: :one_for_one)
end
defp pools do
pools = Config.get(:pools)
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) == false do
Keyword.delete(pools, :upload)
else
pools
end
for {pool_name, pool_opts} <- pools do
pool_opts
|> Keyword.put(:id, {Pool, pool_name})
|> Keyword.put(:name, pool_name)
|> Pool.child_spec()
end
end
end

View File

@ -3,19 +3,23 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client do
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
{:ok, pos_integer(), [tuple()], reference() | map()}
| {:ok, pos_integer(), [tuple()]}
@type status :: pos_integer()
@type header_name :: String.t()
@type header_value :: String.t()
@type headers :: [{header_name(), header_value()}]
@callback request(atom(), String.t(), headers(), String.t(), list()) ::
{:ok, status(), headers(), reference() | map()}
| {:ok, status(), headers()}
| {:ok, reference()}
| {:error, term()}
@callback stream_body(reference() | pid() | map()) ::
{:ok, binary()} | :done | {:error, String.t()}
@callback stream_body(map()) :: {:ok, binary(), map()} | :done | {:error, atom() | String.t()}
@callback close(reference() | pid() | map()) :: :ok
def request(method, url, headers, "", opts \\ []) do
client().request(method, url, headers, "", opts)
def request(method, url, headers, body \\ "", opts \\ []) do
client().request(method, url, headers, body, opts)
end
def stream_body(ref), do: client().stream_body(ref)
@ -23,6 +27,12 @@ defmodule Pleroma.ReverseProxy.Client do
def close(ref), do: client().close(ref)
defp client do
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
:tesla
|> Application.get_env(:adapter)
|> client()
end
defp client(Tesla.Adapter.Hackney), do: Pleroma.ReverseProxy.Client.Hackney
defp client(Tesla.Adapter.Gun), do: Pleroma.ReverseProxy.Client.Tesla
defp client(_), do: Pleroma.Config.get!(Pleroma.ReverseProxy.Client)
end

View File

@ -0,0 +1,24 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client.Hackney do
@behaviour Pleroma.ReverseProxy.Client
@impl true
def request(method, url, headers, body, opts \\ []) do
:hackney.request(method, url, headers, body, opts)
end
@impl true
def stream_body(ref) do
case :hackney.stream_body(ref) do
:done -> :done
{:ok, data} -> {:ok, data, ref}
{:error, error} -> {:error, error}
end
end
@impl true
def close(ref), do: :hackney.close(ref)
end

View File

@ -0,0 +1,90 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client.Tesla do
@behaviour Pleroma.ReverseProxy.Client
@type headers() :: [{String.t(), String.t()}]
@type status() :: pos_integer()
@spec request(atom(), String.t(), headers(), String.t(), keyword()) ::
{:ok, status(), headers}
| {:ok, status(), headers, map()}
| {:error, atom() | String.t()}
| no_return()
@impl true
def request(method, url, headers, body, opts \\ []) do
check_adapter()
opts = Keyword.put(opts, :body_as, :chunks)
with {:ok, response} <-
Pleroma.HTTP.request(
method,
url,
body,
headers,
Keyword.put(opts, :adapter, opts)
) do
if is_map(response.body) and method != :head do
{:ok, response.status, response.headers, response.body}
else
{:ok, response.status, response.headers}
end
else
{:error, error} -> {:error, error}
end
end
@impl true
@spec stream_body(map()) ::
{:ok, binary(), map()} | {:error, atom() | String.t()} | :done | no_return()
def stream_body(%{pid: pid, opts: opts, fin: true}) do
# if connection was reused, but in tesla were redirects,
# tesla returns new opened connection, which must be closed manually
if opts[:old_conn], do: Tesla.Adapter.Gun.close(pid)
# if there were redirects we need to checkout old conn
conn = opts[:old_conn] || opts[:conn]
if conn, do: :ok = Pleroma.Pool.Connections.checkout(conn, self(), :gun_connections)
:done
end
def stream_body(client) do
case read_chunk!(client) do
{:fin, body} ->
{:ok, body, Map.put(client, :fin, true)}
{:nofin, part} ->
{:ok, part, client}
{:error, error} ->
{:error, error}
end
end
defp read_chunk!(%{pid: pid, stream: stream, opts: opts}) do
adapter = check_adapter()
adapter.read_chunk(pid, stream, opts)
end
@impl true
@spec close(map) :: :ok | no_return()
def close(%{pid: pid}) do
adapter = check_adapter()
adapter.close(pid)
end
defp check_adapter do
adapter = Application.get_env(:tesla, :adapter)
unless adapter == Tesla.Adapter.Gun do
raise "#{adapter} doesn't support reading body in chunks"
end
adapter
end
end

View File

@ -3,8 +3,6 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy do
alias Pleroma.HTTP
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
~w(if-unmodified-since if-none-match if-range range)
@resp_cache_headers ~w(etag date last-modified)
@ -58,10 +56,10 @@ defmodule Pleroma.ReverseProxy do
* `req_headers`, `resp_headers` additional headers.
* `http`: options for [hackney](https://github.com/benoitc/hackney).
* `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun).
"""
@default_hackney_options [pool: :media]
@default_options [pool: :media]
@inline_content_types [
"image/gif",
@ -94,11 +92,7 @@ defmodule Pleroma.ReverseProxy do
def call(_conn, _url, _opts \\ [])
def call(conn = %{method: method}, url, opts) when method in @methods do
hackney_opts =
Pleroma.HTTP.Connection.hackney_options([])
|> Keyword.merge(@default_hackney_options)
|> Keyword.merge(Keyword.get(opts, :http, []))
|> HTTP.process_request_options()
client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, []))
req_headers = build_req_headers(conn.req_headers, opts)
@ -110,7 +104,7 @@ defmodule Pleroma.ReverseProxy do
end
with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url),
{:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
{:ok, code, headers, client} <- request(method, url, req_headers, client_opts),
:ok <-
header_length_constraint(
headers,
@ -156,11 +150,11 @@ defmodule Pleroma.ReverseProxy do
|> halt()
end
defp request(method, url, headers, hackney_opts) do
defp request(method, url, headers, opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom()
case client().request(method, url, headers, "", hackney_opts) do
case client().request(method, url, headers, "", opts) do
{:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client}
@ -210,7 +204,7 @@ defmodule Pleroma.ReverseProxy do
duration,
Keyword.get(opts, :max_read_duration, @max_read_duration)
),
{:ok, data} <- client().stream_body(client),
{:ok, data, client} <- client().stream_body(client),
{:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data),
:ok <-

View File

@ -305,16 +305,12 @@ defmodule Pleroma.User do
end
end
def profile_url(%User{source_data: %{"url" => url}}), do: url
def profile_url(%User{ap_id: ap_id}), do: ap_id
def profile_url(_), do: nil
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
@spec ap_following(User.t()) :: Sring.t()
@spec ap_following(User.t()) :: String.t()
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"

View File

@ -1379,6 +1379,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
@spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
defp get_actor_url(url) when is_list(url) do
url
|> List.first()
|> get_actor_url()
end
defp get_actor_url(_url), do: nil
defp object_to_user_data(data) do
avatar =
data["icon"]["url"] &&
@ -1408,6 +1420,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
user_data = %{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
ap_enabled: true,
source_data: data,
banner: banner,

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy do

View File

@ -12,17 +12,23 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
@hackney_options [
pool: :media,
recv_timeout: 10_000
@options [
pool: :media
]
def perform(:prefetch, url) do
Logger.debug("Prefetching #{inspect(url)}")
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.put(@options, :recv_timeout, 10_000)
else
@options
end
url
|> MediaProxy.url()
|> HTTP.get([], adapter: @hackney_options)
|> HTTP.get([], adapter: opts)
end
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do

View File

@ -795,102 +795,6 @@ defmodule Pleroma.Web.ActivityPub.Utils do
ActivityPub.fetch_activities([], params, :offset)
end
def parse_report_group(activity) do
reports = get_reports_by_status_id(activity["id"])
max_date = Enum.max_by(reports, &NaiveDateTime.from_iso8601!(&1.data["published"]))
actors = Enum.map(reports, & &1.user_actor)
[%{data: %{"object" => [account_id | _]}} | _] = reports
account =
AccountView.render("show.json", %{
user: User.get_by_ap_id(account_id)
})
status = get_status_data(activity)
%{
date: max_date.data["published"],
account: account,
status: status,
actors: Enum.uniq(actors),
reports: reports
}
end
defp get_status_data(status) do
case status["deleted"] do
true ->
%{
"id" => status["id"],
"deleted" => true
}
_ ->
Activity.get_by_ap_id(status["id"])
end
end
def get_reports_by_status_id(ap_id) do
from(a in Activity,
where: fragment("(?)->>'type' = 'Flag'", a.data),
where: fragment("(?)->'object' @> ?", a.data, ^[%{id: ap_id}]),
or_where: fragment("(?)->'object' @> ?", a.data, ^[ap_id])
)
|> Activity.with_preloaded_user_actor()
|> Repo.all()
end
@spec get_reports_grouped_by_status([String.t()]) :: %{
required(:groups) => [
%{
required(:date) => String.t(),
required(:account) => %{},
required(:status) => %{},
required(:actors) => [%User{}],
required(:reports) => [%Activity{}]
}
]
}
def get_reports_grouped_by_status(activity_ids) do
parsed_groups =
activity_ids
|> Enum.map(fn id ->
id
|> build_flag_object()
|> parse_report_group()
end)
%{
groups: parsed_groups
}
end
@spec get_reported_activities() :: [
%{
required(:activity) => String.t(),
required(:date) => String.t()
}
]
def get_reported_activities do
reported_activities_query =
from(a in Activity,
where: fragment("(?)->>'type' = 'Flag'", a.data),
select: %{
activity: fragment("jsonb_array_elements((? #- '{object,0}')->'object')", a.data)
},
group_by: fragment("activity")
)
from(a in subquery(reported_activities_query),
distinct: true,
select: %{
id: fragment("COALESCE(?->>'id'::text, ? #>> '{}')", a.activity, a.activity)
}
)
|> Repo.all()
|> Enum.map(& &1.id)
end
def update_report_state(%Activity{} = activity, state)
when state in @strip_status_report_states do
{:ok, stripped_activity} = strip_report_status_data(activity)

View File

@ -715,14 +715,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|> render("index.json", %{reports: reports})
end
def list_grouped_reports(conn, _params) do
statuses = Utils.get_reported_activities()
conn
|> put_view(ReportView)
|> render("index_grouped.json", Utils.get_reports_grouped_by_status(statuses))
end
def report_show(conn, %{"id" => id}) do
with %Activity{} = report <- Activity.get_by_id(id) do
conn

View File

@ -4,7 +4,7 @@
defmodule Pleroma.Web.AdminAPI.ReportView do
use Pleroma.Web, :view
alias Pleroma.Activity
alias Pleroma.HTML
alias Pleroma.User
alias Pleroma.Web.AdminAPI.Report
@ -44,32 +44,6 @@ defmodule Pleroma.Web.AdminAPI.ReportView do
}
end
def render("index_grouped.json", %{groups: groups}) do
reports =
Enum.map(groups, fn group ->
status =
case group.status do
%Activity{} = activity -> StatusView.render("show.json", %{activity: activity})
_ -> group.status
end
%{
date: group[:date],
account: group[:account],
status: Map.put_new(status, "deleted", false),
actors: Enum.map(group[:actors], &merge_account_views/1),
reports:
group[:reports]
|> Enum.map(&Report.extract_report_info(&1))
|> Enum.map(&render(__MODULE__, "show.json", &1))
}
end)
%{
reports: reports
}
end
def render("index_notes.json", %{notes: notes}) when is_list(notes) do
Enum.map(notes, &render(__MODULE__, "show_note.json", &1))
end

View File

@ -43,7 +43,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
id: to_string(user.id),
acct: user.nickname,
username: username_from_nickname(user.nickname),
url: User.profile_url(user)
url: user.uri || user.ap_id
}
end
@ -207,7 +207,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
following_count: following_count,
statuses_count: user.note_count,
note: user.bio || "",
url: User.profile_url(user),
url: user.uri || user.ap_id,
avatar: image,
avatar_static: image,
header: header,

View File

@ -6,7 +6,12 @@ defmodule Pleroma.Web.Metadata do
alias Phoenix.HTML
def build_tags(params) do
Enum.reduce(Pleroma.Config.get([__MODULE__, :providers], []), "", fn parser, acc ->
providers = [
Pleroma.Web.Metadata.Providers.RestrictIndexing
| Pleroma.Config.get([__MODULE__, :providers], [])
]
Enum.reduce(providers, "", fn parser, acc ->
rendered_html =
params
|> parser.build_tags()

View File

@ -68,7 +68,7 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do
property: "og:title",
content: Utils.user_name_string(user)
], []},
{:meta, [property: "og:url", content: User.profile_url(user)], []},
{:meta, [property: "og:url", content: user.uri || user.ap_id], []},
{:meta, [property: "og:description", content: truncated_bio], []},
{:meta, [property: "og:type", content: "website"], []},
{:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []},

View File

@ -0,0 +1,25 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Metadata.Providers.RestrictIndexing do
@behaviour Pleroma.Web.Metadata.Providers.Provider
@moduledoc """
Restricts indexing of remote users.
"""
@impl true
def build_tags(%{user: %{local: false}}) do
[
{:meta,
[
name: "robots",
content: "noindex, noarchive"
], []}
]
end
@impl true
def build_tags(%{user: %{local: true}}), do: []
end

View File

@ -3,11 +3,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RelMe do
@hackney_options [
@options [
pool: :media,
recv_timeout: 2_000,
max_body: 2_000_000,
with_body: true
max_body: 2_000_000
]
if Pleroma.Config.get(:env) == :test do
@ -25,8 +23,18 @@ defmodule Pleroma.Web.RelMe do
def parse(_), do: {:error, "No URL provided"}
defp parse_url(url) do
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.merge(@options,
recv_timeout: 2_000,
with_body: true
)
else
@options
end
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
Pleroma.HTTP.get(url, [], adapter: @hackney_options),
Pleroma.HTTP.get(url, [], adapter: opts),
{:ok, html_tree} <- Floki.parse_document(html),
data <-
Floki.attribute(html_tree, "link[rel~=me]", "href") ++

View File

@ -3,11 +3,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser do
@hackney_options [
@options [
pool: :media,
recv_timeout: 2_000,
max_body: 2_000_000,
with_body: true
max_body: 2_000_000
]
defp parsers do
@ -77,8 +75,18 @@ defmodule Pleroma.Web.RichMedia.Parser do
end
defp parse_url(url) do
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.merge(@options,
recv_timeout: 2_000,
with_body: true
)
else
@options
end
try do
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts)
html
|> parse_html()

View File

@ -186,7 +186,6 @@ defmodule Pleroma.Web.Router do
patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
get("/reports", AdminAPIController, :list_reports)
get("/grouped_reports", AdminAPIController, :list_grouped_reports)
get("/reports/:id", AdminAPIController, :report_show)
patch("/reports", AdminAPIController, :reports_update)
post("/reports/:id/notes", AdminAPIController, :report_notes_create)

View File

@ -1,5 +1,5 @@
<div class="p-author h-card">
<a class="u-url" rel="author noopener" href="<%= User.profile_url(@user) %>">
<a class="u-url" rel="author noopener" href="<%= (@user.uri || @user.ap_id) %>">
<div class="avatar">
<img src="<%= User.avatar_url(@user) |> MediaProxy.url %>" width="48" height="48" alt="">
</div>

View File

@ -8,7 +8,7 @@
<button type="submit" class="collapse">Remote follow</button>
</form>
<%= raw Formatter.emojify(@user.name, emoji_for_user(@user)) %> |
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: User.profile_url(@user) %>
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: (@user.uri || @user.ap_id) %>
</h3>
<p><%= raw @user.bio %></p>
</header>

View File

@ -173,7 +173,8 @@ defmodule Pleroma.Web.WebFinger do
get_template_from_xml(body)
else
_ ->
with {:ok, %{body: body}} <- HTTP.get("https://#{domain}/.well-known/host-meta", []) do
with {:ok, %{body: body, status: status}} when status in 200..299 <-
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
get_template_from_xml(body)
else
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
@ -205,7 +206,7 @@ defmodule Pleroma.Web.WebFinger do
with response <-
HTTP.get(
address,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
),
{:ok, %{status: status, body: body}} when status in 200..299 <- response do
doc = XML.parse_document(body)

10
mix.exs
View File

@ -119,7 +119,15 @@ defmodule Pleroma.Mixfile do
{:calendar, "~> 0.17.4"},
{:cachex, "~> 3.2"},
{:poison, "~> 3.0", override: true},
{:tesla, "~> 1.3", override: true},
# {:tesla, "~> 1.3", override: true},
{:tesla,
git: "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git",
ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b",
override: true},
{:castore, "~> 0.1"},
{:cowlib, "~> 2.8", override: true},
{:gun,
github: "ninenines/gun", ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc", override: true},
{:jason, "~> 1.0"},
{:mogrify, "~> 0.6.1"},
{:ex_aws, "~> 2.1"},

View File

@ -10,6 +10,7 @@
"cachex": {:hex, :cachex, "3.2.0", "a596476c781b0646e6cb5cd9751af2e2974c3e0d5498a8cab71807618b74fe2f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "aef93694067a43697ae0531727e097754a9e992a1e7946296f5969d6dd9ac986"},
"calendar": {:hex, :calendar, "0.17.6", "ec291cb2e4ba499c2e8c0ef5f4ace974e2f9d02ae9e807e711a9b0c7850b9aee", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "738d0e17a93c2ccfe4ddc707bdc8e672e9074c8569498483feb1c4530fb91b2b"},
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
"castore": {:hex, :castore, "0.1.5", "591c763a637af2cc468a72f006878584bc6c306f8d111ef8ba1d4c10e0684010", [:mix], [], "hexpm", "6db356b2bc6cc22561e051ff545c20ad064af57647e436650aa24d7d06cd941a"},
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
"comeonin": {:hex, :comeonin, "4.1.2", "3eb5620fd8e35508991664b4c2b04dd41e52f1620b36957be837c1d7784b7592", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm", "d8700a0ca4dbb616c22c9b3f6dd539d88deaafec3efe66869d6370c9a559b3e9"},
@ -46,6 +47,7 @@
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"},
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
"gettext": {:hex, :gettext, "0.17.4", "f13088e1ec10ce01665cf25f5ff779e7df3f2dc71b37084976cf89d1aa124d5c", [:mix], [], "hexpm", "3c75b5ea8288e2ee7ea503ff9e30dfe4d07ad3c054576a6e60040e79a801e14d"},
"gun": {:git, "https://github.com/ninenines/gun.git", "e1a69b36b180a574c0ac314ced9613fdd52312cc", [ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc"]},
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"},
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
@ -101,7 +103,7 @@
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "e3928e1d2889a308aaf3e42755809ac21cffd77cb58eef01cbfdab4ce2fd1e21"},
"syslog": {:hex, :syslog, "1.0.6", "995970c9aa7feb380ac493302138e308d6e04fd57da95b439a6df5bb3bf75076", [:rebar3], [], "hexpm", "769ddfabd0d2a16f3f9c17eb7509951e0ca4f68363fb26f2ee51a8ec4a49881a"},
"telemetry": {:hex, :telemetry, "0.4.1", "ae2718484892448a24470e6aa341bc847c3277bfb8d4e9289f7474d752c09c7f", [:rebar3], [], "hexpm", "4738382e36a0a9a2b6e25d67c960e40e1a2c95560b9f936d8e29de8cd858480f"},
"tesla": {:hex, :tesla, "1.3.2", "deb92c5c9ce35e747a395ba413ca78593a4f75bf0e1545630ee2e3d34264021e", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.3", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "7567704c4790e21bd9a961b56d0b6a988ff68cc4dacfe6b2106e258da1d5cdda"},
"tesla": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git", "61b7503cef33f00834f78ddfafe0d5d9dec2270b", [ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b"]},
"timex": {:hex, :timex, "3.6.1", "efdf56d0e67a6b956cc57774353b0329c8ab7726766a11547e529357ffdc1d56", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "f354efb2400dd7a80fd9eb6c8419068c4f632da4ac47f3d8822d6e33f08bc852"},
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
"tzdata": {:hex, :tzdata, "0.5.22", "f2ba9105117ee0360eae2eca389783ef7db36d533899b2e84559404dbc77ebb8", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "cd66c8a1e6a9e121d1f538b01bef459334bb4029a1ffb4eeeb5e4eae0337e7b6"},

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.CreateConversations do

View File

@ -62,7 +62,7 @@ defmodule Restarter.Pleroma do
end
def handle_cast({:restart, :test, _}, state) do
Logger.warn("pleroma restarted")
Logger.debug("pleroma manually restarted")
{:noreply, Map.put(state, :need_reboot, false)}
end
@ -75,7 +75,7 @@ defmodule Restarter.Pleroma do
def handle_cast({:after_boot, _}, %{after_boot: true} = state), do: {:noreply, state}
def handle_cast({:after_boot, :test}, state) do
Logger.warn("pleroma restarted")
Logger.debug("pleroma restarted after boot")
state = %{state | after_boot: true, rebooted: true}
{:noreply, state}
end

View File

@ -83,7 +83,7 @@ defmodule Pleroma.Activity.Ir.TopicsTest do
assert Enum.member?(topics, "hashtag:bar")
end
test "only converts strinngs to hash tags", %{
test "only converts strings to hash tags", %{
activity: %{object: %{data: data} = object} = activity
} do
tagged_data = Map.put(data, "tag", [2])

View File

@ -478,14 +478,6 @@ defmodule Pleroma.ConfigDBTest do
assert ConfigDB.from_binary(binary) == [key: "value"]
end
test "keyword with partial_chain key" do
binary =
ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
end
test "keyword" do
binary =
ConfigDB.transform([

41
test/fixtures/users_mock/localhost.json vendored Normal file
View File

@ -0,0 +1,41 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"http://localhost:4001/schemas/litepub-0.1.jsonld",
{
"@language": "und"
}
],
"attachment": [],
"endpoints": {
"oauthAuthorizationEndpoint": "http://localhost:4001/oauth/authorize",
"oauthRegistrationEndpoint": "http://localhost:4001/api/v1/apps",
"oauthTokenEndpoint": "http://localhost:4001/oauth/token",
"sharedInbox": "http://localhost:4001/inbox"
},
"followers": "http://localhost:4001/users/{{nickname}}/followers",
"following": "http://localhost:4001/users/{{nickname}}/following",
"icon": {
"type": "Image",
"url": "http://localhost:4001/media/4e914f5b84e4a259a3f6c2d2edc9ab642f2ab05f3e3d9c52c81fc2d984b3d51e.jpg"
},
"id": "http://localhost:4001/users/{{nickname}}",
"image": {
"type": "Image",
"url": "http://localhost:4001/media/f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg?name=f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg"
},
"inbox": "http://localhost:4001/users/{{nickname}}/inbox",
"manuallyApprovesFollowers": false,
"name": "{{nickname}}",
"outbox": "http://localhost:4001/users/{{nickname}}/outbox",
"preferredUsername": "{{nickname}}",
"publicKey": {
"id": "http://localhost:4001/users/{{nickname}}#main-key",
"owner": "http://localhost:4001/users/{{nickname}}",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5DLtwGXNZElJyxFGfcVc\nXANhaMadj/iYYQwZjOJTV9QsbtiNBeIK54PJrYuU0/0YIdrvS1iqheX5IwXRhcwa\nhm3ZyLz7XeN9st7FBni4BmZMBtMpxAuYuu5p/jbWy13qAiYOhPreCx0wrWgm/lBD\n9mkgaxIxPooBE0S4ZWEJIDIV1Vft3AWcRUyWW1vIBK0uZzs6GYshbQZB952S0yo4\nFzI1hABGHncH8UvuFauh4EZ8tY7/X5I0pGRnDOcRN1dAht5w5yTA+6r5kebiFQjP\nIzN/eCO/a9Flrj9YGW7HDNtjSOH0A31PLRGlJtJO3yK57dnf5ppyCZGfL4emShQo\ncQIDAQAB\n-----END PUBLIC KEY-----\n\n"
},
"summary": "your friendly neighborhood pleroma developer<br>I like cute things and distributed systems, and really hate delete and redrafts",
"tag": [],
"type": "Person",
"url": "http://localhost:4001/users/{{nickname}}"
}

View File

@ -0,0 +1 @@
21.1

View File

@ -0,0 +1 @@
22.1

View File

@ -0,0 +1 @@
22.4

View File

@ -0,0 +1 @@
23.0

View File

@ -0,0 +1,258 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper.GunTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
import Mox
alias Pleroma.Config
alias Pleroma.Gun.Conn
alias Pleroma.HTTP.AdapterHelper.Gun
alias Pleroma.Pool.Connections
setup :verify_on_exit!
defp gun_mock(_) do
gun_mock()
:ok
end
defp gun_mock do
Pleroma.GunMock
|> stub(:open, fn _, _, _ -> Task.start_link(fn -> Process.sleep(1000) end) end)
|> stub(:await_up, fn _, _ -> {:ok, :http} end)
|> stub(:set_owner, fn _, _ -> :ok end)
end
describe "options/1" do
setup do: clear_config([:http, :adapter], a: 1, b: 2)
test "https url with default port" do
uri = URI.parse("https://example.com")
opts = Gun.options([receive_conn: false], uri)
assert opts[:certificates_verification]
assert opts[:tls_opts][:log_level] == :warning
end
test "https ipv4 with default port" do
uri = URI.parse("https://127.0.0.1")
opts = Gun.options([receive_conn: false], uri)
assert opts[:certificates_verification]
assert opts[:tls_opts][:log_level] == :warning
end
test "https ipv6 with default port" do
uri = URI.parse("https://[2a03:2880:f10c:83:face:b00c:0:25de]")
opts = Gun.options([receive_conn: false], uri)
assert opts[:certificates_verification]
assert opts[:tls_opts][:log_level] == :warning
end
test "https url with non standart port" do
uri = URI.parse("https://example.com:115")
opts = Gun.options([receive_conn: false], uri)
assert opts[:certificates_verification]
end
test "get conn on next request" do
gun_mock()
level = Application.get_env(:logger, :level)
Logger.configure(level: :debug)
on_exit(fn -> Logger.configure(level: level) end)
uri = URI.parse("http://some-domain2.com")
opts = Gun.options(uri)
assert opts[:conn] == nil
assert opts[:close_conn] == nil
Process.sleep(50)
opts = Gun.options(uri)
assert is_pid(opts[:conn])
assert opts[:close_conn] == false
end
test "merges with defaul http adapter config" do
defaults = Gun.options([receive_conn: false], URI.parse("https://example.com"))
assert Keyword.has_key?(defaults, :a)
assert Keyword.has_key?(defaults, :b)
end
test "default ssl adapter opts with connection" do
gun_mock()
uri = URI.parse("https://some-domain.com")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options(uri)
assert opts[:certificates_verification]
refute opts[:tls_opts] == []
assert opts[:close_conn] == false
assert is_pid(opts[:conn])
end
test "parses string proxy host & port" do
proxy = Config.get([:http, :proxy_url])
Config.put([:http, :proxy_url], "localhost:8123")
on_exit(fn -> Config.put([:http, :proxy_url], proxy) end)
uri = URI.parse("https://some-domain.com")
opts = Gun.options([receive_conn: false], uri)
assert opts[:proxy] == {'localhost', 8123}
end
test "parses tuple proxy scheme host and port" do
proxy = Config.get([:http, :proxy_url])
Config.put([:http, :proxy_url], {:socks, 'localhost', 1234})
on_exit(fn -> Config.put([:http, :proxy_url], proxy) end)
uri = URI.parse("https://some-domain.com")
opts = Gun.options([receive_conn: false], uri)
assert opts[:proxy] == {:socks, 'localhost', 1234}
end
test "passed opts have more weight than defaults" do
proxy = Config.get([:http, :proxy_url])
Config.put([:http, :proxy_url], {:socks5, 'localhost', 1234})
on_exit(fn -> Config.put([:http, :proxy_url], proxy) end)
uri = URI.parse("https://some-domain.com")
opts = Gun.options([receive_conn: false, proxy: {'example.com', 4321}], uri)
assert opts[:proxy] == {'example.com', 4321}
end
end
describe "options/1 with receive_conn parameter" do
setup :gun_mock
test "receive conn by default" do
uri = URI.parse("http://another-domain.com")
:ok = Conn.open(uri, :gun_connections)
received_opts = Gun.options(uri)
assert received_opts[:close_conn] == false
assert is_pid(received_opts[:conn])
end
test "don't receive conn if receive_conn is false" do
uri = URI.parse("http://another-domain.com")
:ok = Conn.open(uri, :gun_connections)
opts = [receive_conn: false]
received_opts = Gun.options(opts, uri)
assert received_opts[:close_conn] == nil
assert received_opts[:conn] == nil
end
end
describe "after_request/1" do
setup :gun_mock
test "body_as not chunks" do
uri = URI.parse("http://some-domain.com")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options(uri)
:ok = Gun.after_request(opts)
conn = opts[:conn]
assert %Connections{
conns: %{
"http:some-domain.com:80" => %Pleroma.Gun.Conn{
conn: ^conn,
conn_state: :idle,
used_by: []
}
}
} = Connections.get_state(:gun_connections)
end
test "body_as chunks" do
uri = URI.parse("http://some-domain.com")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options([body_as: :chunks], uri)
:ok = Gun.after_request(opts)
conn = opts[:conn]
self = self()
assert %Connections{
conns: %{
"http:some-domain.com:80" => %Pleroma.Gun.Conn{
conn: ^conn,
conn_state: :active,
used_by: [{^self, _}]
}
}
} = Connections.get_state(:gun_connections)
end
test "with no connection" do
uri = URI.parse("http://uniq-domain.com")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options([body_as: :chunks], uri)
conn = opts[:conn]
opts = Keyword.delete(opts, :conn)
self = self()
:ok = Gun.after_request(opts)
assert %Connections{
conns: %{
"http:uniq-domain.com:80" => %Pleroma.Gun.Conn{
conn: ^conn,
conn_state: :active,
used_by: [{^self, _}]
}
}
} = Connections.get_state(:gun_connections)
end
test "with ipv4" do
uri = URI.parse("http://127.0.0.1")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options(uri)
:ok = Gun.after_request(opts)
conn = opts[:conn]
assert %Connections{
conns: %{
"http:127.0.0.1:80" => %Pleroma.Gun.Conn{
conn: ^conn,
conn_state: :idle,
used_by: []
}
}
} = Connections.get_state(:gun_connections)
end
test "with ipv6" do
uri = URI.parse("http://[2a03:2880:f10c:83:face:b00c:0:25de]")
:ok = Conn.open(uri, :gun_connections)
opts = Gun.options(uri)
:ok = Gun.after_request(opts)
conn = opts[:conn]
assert %Connections{
conns: %{
"http:2a03:2880:f10c:83:face:b00c:0:25de:80" => %Pleroma.Gun.Conn{
conn: ^conn,
conn_state: :idle,
used_by: []
}
}
} = Connections.get_state(:gun_connections)
end
end
end

View File

@ -0,0 +1,47 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper.HackneyTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
alias Pleroma.HTTP.AdapterHelper.Hackney
setup_all do
uri = URI.parse("http://domain.com")
{:ok, uri: uri}
end
describe "options/2" do
setup do: clear_config([:http, :adapter], a: 1, b: 2)
test "add proxy and opts from config", %{uri: uri} do
opts = Hackney.options([proxy: "localhost:8123"], uri)
assert opts[:a] == 1
assert opts[:b] == 2
assert opts[:proxy] == "localhost:8123"
end
test "respect connection opts and no proxy", %{uri: uri} do
opts = Hackney.options([a: 2, b: 1], uri)
assert opts[:a] == 2
assert opts[:b] == 1
refute Keyword.has_key?(opts, :proxy)
end
test "add opts for https" do
uri = URI.parse("https://domain.com")
opts = Hackney.options(uri)
assert opts[:ssl_options] == [
partial_chain: &:hackney_connect.partial_chain/1,
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"],
server_name_indication: 'domain.com'
]
end
end
end

View File

@ -0,0 +1,28 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelperTest do
use ExUnit.Case, async: true
alias Pleroma.HTTP.AdapterHelper
describe "format_proxy/1" do
test "with nil" do
assert AdapterHelper.format_proxy(nil) == nil
end
test "with string" do
assert AdapterHelper.format_proxy("127.0.0.1:8123") == {{127, 0, 0, 1}, 8123}
end
test "localhost with port" do
assert AdapterHelper.format_proxy("localhost:8123") == {'localhost', 8123}
end
test "tuple" do
assert AdapterHelper.format_proxy({:socks4, :localhost, 9050}) ==
{:socks4, 'localhost', 9050}
end
end
end

View File

@ -0,0 +1,135 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.ConnectionTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
import ExUnit.CaptureLog
alias Pleroma.Config
alias Pleroma.HTTP.Connection
describe "parse_host/1" do
test "as atom to charlist" do
assert Connection.parse_host(:localhost) == 'localhost'
end
test "as string to charlist" do
assert Connection.parse_host("localhost.com") == 'localhost.com'
end
test "as string ip to tuple" do
assert Connection.parse_host("127.0.0.1") == {127, 0, 0, 1}
end
end
describe "parse_proxy/1" do
test "ip with port" do
assert Connection.parse_proxy("127.0.0.1:8123") == {:ok, {127, 0, 0, 1}, 8123}
end
test "host with port" do
assert Connection.parse_proxy("localhost:8123") == {:ok, 'localhost', 8123}
end
test "as tuple" do
assert Connection.parse_proxy({:socks4, :localhost, 9050}) ==
{:ok, :socks4, 'localhost', 9050}
end
test "as tuple with string host" do
assert Connection.parse_proxy({:socks5, "localhost", 9050}) ==
{:ok, :socks5, 'localhost', 9050}
end
end
describe "parse_proxy/1 errors" do
test "ip without port" do
capture_log(fn ->
assert Connection.parse_proxy("127.0.0.1") == {:error, :invalid_proxy}
end) =~ "parsing proxy fail \"127.0.0.1\""
end
test "host without port" do
capture_log(fn ->
assert Connection.parse_proxy("localhost") == {:error, :invalid_proxy}
end) =~ "parsing proxy fail \"localhost\""
end
test "host with bad port" do
capture_log(fn ->
assert Connection.parse_proxy("localhost:port") == {:error, :invalid_proxy_port}
end) =~ "parsing port in proxy fail \"localhost:port\""
end
test "ip with bad port" do
capture_log(fn ->
assert Connection.parse_proxy("127.0.0.1:15.9") == {:error, :invalid_proxy_port}
end) =~ "parsing port in proxy fail \"127.0.0.1:15.9\""
end
test "as tuple without port" do
capture_log(fn ->
assert Connection.parse_proxy({:socks5, :localhost}) == {:error, :invalid_proxy}
end) =~ "parsing proxy fail {:socks5, :localhost}"
end
test "with nil" do
assert Connection.parse_proxy(nil) == nil
end
end
describe "options/3" do
setup do: clear_config([:http, :proxy_url])
test "without proxy_url in config" do
Config.delete([:http, :proxy_url])
opts = Connection.options(%URI{})
refute Keyword.has_key?(opts, :proxy)
end
test "parses string proxy host & port" do
Config.put([:http, :proxy_url], "localhost:8123")
opts = Connection.options(%URI{})
assert opts[:proxy] == {'localhost', 8123}
end
test "parses tuple proxy scheme host and port" do
Config.put([:http, :proxy_url], {:socks, 'localhost', 1234})
opts = Connection.options(%URI{})
assert opts[:proxy] == {:socks, 'localhost', 1234}
end
test "passed opts have more weight than defaults" do
Config.put([:http, :proxy_url], {:socks5, 'localhost', 1234})
opts = Connection.options(%URI{}, proxy: {'example.com', 4321})
assert opts[:proxy] == {'example.com', 4321}
end
end
describe "format_host/1" do
test "with domain" do
assert Connection.format_host("example.com") == 'example.com'
end
test "with idna domain" do
assert Connection.format_host("ですexample.com") == 'xn--example-183fne.com'
end
test "with ipv4" do
assert Connection.format_host("127.0.0.1") == '127.0.0.1'
end
test "with ipv6" do
assert Connection.format_host("2a03:2880:f10c:83:face:b00c:0:25de") ==
'2a03:2880:f10c:83:face:b00c:0:25de'
end
end
end

View File

@ -5,6 +5,8 @@
defmodule Pleroma.HTTP.RequestBuilderTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
alias Pleroma.Config
alias Pleroma.HTTP.Request
alias Pleroma.HTTP.RequestBuilder
describe "headers/2" do
@ -12,48 +14,31 @@ defmodule Pleroma.HTTP.RequestBuilderTest do
setup do: clear_config([:http, :user_agent])
test "don't send pleroma user agent" do
assert RequestBuilder.headers(%{}, []) == %{headers: []}
assert RequestBuilder.headers(%Request{}, []) == %Request{headers: []}
end
test "send pleroma user agent" do
Pleroma.Config.put([:http, :send_user_agent], true)
Pleroma.Config.put([:http, :user_agent], :default)
Config.put([:http, :send_user_agent], true)
Config.put([:http, :user_agent], :default)
assert RequestBuilder.headers(%{}, []) == %{
headers: [{"User-Agent", Pleroma.Application.user_agent()}]
assert RequestBuilder.headers(%Request{}, []) == %Request{
headers: [{"user-agent", Pleroma.Application.user_agent()}]
}
end
test "send custom user agent" do
Pleroma.Config.put([:http, :send_user_agent], true)
Pleroma.Config.put([:http, :user_agent], "totally-not-pleroma")
Config.put([:http, :send_user_agent], true)
Config.put([:http, :user_agent], "totally-not-pleroma")
assert RequestBuilder.headers(%{}, []) == %{
headers: [{"User-Agent", "totally-not-pleroma"}]
assert RequestBuilder.headers(%Request{}, []) == %Request{
headers: [{"user-agent", "totally-not-pleroma"}]
}
end
end
describe "add_optional_params/3" do
test "don't add if keyword is empty" do
assert RequestBuilder.add_optional_params(%{}, %{}, []) == %{}
end
test "add query parameter" do
assert RequestBuilder.add_optional_params(
%{},
%{query: :query, body: :body, another: :val},
[
{:query, "param1=val1&param2=val2"},
{:body, "some body"}
]
) == %{query: "param1=val1&param2=val2", body: "some body"}
end
end
describe "add_param/4" do
test "add file parameter" do
%{
%Request{
body: %Tesla.Multipart{
boundary: _,
content_type_params: [],
@ -70,7 +55,7 @@ defmodule Pleroma.HTTP.RequestBuilderTest do
}
]
}
} = RequestBuilder.add_param(%{}, :file, "filename.png", "some-path/filename.png")
} = RequestBuilder.add_param(%Request{}, :file, "filename.png", "some-path/filename.png")
end
test "add key to body" do
@ -82,7 +67,7 @@ defmodule Pleroma.HTTP.RequestBuilderTest do
%Tesla.Multipart.Part{
body: "\"someval\"",
dispositions: [name: "somekey"],
headers: ["Content-Type": "application/json"]
headers: [{"content-type", "application/json"}]
}
]
}

View File

@ -3,8 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTPTest do
use Pleroma.DataCase
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
import Tesla.Mock
alias Pleroma.HTTP
setup do
mock(fn
@ -27,7 +29,7 @@ defmodule Pleroma.HTTPTest do
describe "get/1" do
test "returns successfully result" do
assert Pleroma.HTTP.get("http://example.com/hello") == {
assert HTTP.get("http://example.com/hello") == {
:ok,
%Tesla.Env{status: 200, body: "hello"}
}
@ -36,7 +38,7 @@ defmodule Pleroma.HTTPTest do
describe "get/2 (with headers)" do
test "returns successfully result for json content-type" do
assert Pleroma.HTTP.get("http://example.com/hello", [{"content-type", "application/json"}]) ==
assert HTTP.get("http://example.com/hello", [{"content-type", "application/json"}]) ==
{
:ok,
%Tesla.Env{
@ -50,7 +52,7 @@ defmodule Pleroma.HTTPTest do
describe "post/2" do
test "returns successfully result" do
assert Pleroma.HTTP.post("http://example.com/world", "") == {
assert HTTP.post("http://example.com/world", "") == {
:ok,
%Tesla.Env{status: 200, body: "world"}
}

View File

@ -784,12 +784,20 @@ defmodule Pleroma.NotificationTest do
"object" => remote_user.ap_id
}
remote_user_url = remote_user.ap_id
Tesla.Mock.mock(fn
%{method: :get, url: ^remote_user_url} ->
%Tesla.Env{status: 404, body: ""}
end)
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
ObanHelpers.perform_all()
assert Enum.empty?(Notification.for_user(local_user))
end
@tag capture_log: true
test "move activity generates a notification" do
%{ap_id: old_ap_id} = old_user = insert(:user)
%{ap_id: new_ap_id} = new_user = insert(:user, also_known_as: [old_ap_id])
@ -799,6 +807,18 @@ defmodule Pleroma.NotificationTest do
User.follow(follower, old_user)
User.follow(other_follower, old_user)
old_user_url = old_user.ap_id
body =
File.read!("test/fixtures/users_mock/localhost.json")
|> String.replace("{{nickname}}", old_user.nickname)
|> Jason.encode!()
Tesla.Mock.mock(fn
%{method: :get, url: ^old_user_url} ->
%Tesla.Env{status: 200, body: body}
end)
Pleroma.Web.ActivityPub.ActivityPub.move(old_user, new_user)
ObanHelpers.perform_all()

42
test/otp_version_test.exs Normal file
View File

@ -0,0 +1,42 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.OTPVersionTest do
use ExUnit.Case, async: true
alias Pleroma.OTPVersion
describe "check/1" do
test "22.4" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/22.4"]) ==
"22.4"
end
test "22.1" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/22.1"]) ==
"22.1"
end
test "21.1" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/21.1"]) ==
"21.1"
end
test "23.0" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/23.0"]) ==
"23.0"
end
test "with non existance file" do
assert OTPVersion.get_version_from_files([
"test/fixtures/warnings/otp_version/non-exising",
"test/fixtures/warnings/otp_version/22.4"
]) == "22.4"
end
test "empty paths" do
assert OTPVersion.get_version_from_files([]) == nil
end
end
end

View File

@ -0,0 +1,760 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.ConnectionsTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
import ExUnit.CaptureLog
import Mox
alias Pleroma.Gun.Conn
alias Pleroma.GunMock
alias Pleroma.Pool.Connections
setup :verify_on_exit!
setup_all do
name = :test_connections
{:ok, pid} = Connections.start_link({name, [checkin_timeout: 150]})
{:ok, _} = Registry.start_link(keys: :unique, name: Pleroma.GunMock)
on_exit(fn ->
if Process.alive?(pid), do: GenServer.stop(name)
end)
{:ok, name: name}
end
defp open_mock(num \\ 1) do
GunMock
|> expect(:open, num, &start_and_register(&1, &2, &3))
|> expect(:await_up, num, fn _, _ -> {:ok, :http} end)
|> expect(:set_owner, num, fn _, _ -> :ok end)
end
defp connect_mock(mock) do
mock
|> expect(:connect, &connect(&1, &2))
|> expect(:await, &await(&1, &2))
end
defp info_mock(mock), do: expect(mock, :info, &info(&1))
defp start_and_register('gun-not-up.com', _, _), do: {:error, :timeout}
defp start_and_register(host, port, _) do
{:ok, pid} = Task.start_link(fn -> Process.sleep(1000) end)
scheme =
case port do
443 -> "https"
_ -> "http"
end
Registry.register(GunMock, pid, %{
origin_scheme: scheme,
origin_host: host,
origin_port: port
})
{:ok, pid}
end
defp info(pid) do
[{_, info}] = Registry.lookup(GunMock, pid)
info
end
defp connect(pid, _) do
ref = make_ref()
Registry.register(GunMock, ref, pid)
ref
end
defp await(pid, ref) do
[{_, ^pid}] = Registry.lookup(GunMock, ref)
{:response, :fin, 200, []}
end
defp now, do: :os.system_time(:second)
describe "alive?/2" do
test "is alive", %{name: name} do
assert Connections.alive?(name)
end
test "returns false if not started" do
refute Connections.alive?(:some_random_name)
end
end
test "opens connection and reuse it on next request", %{name: name} do
open_mock()
url = "http://some-domain.com"
key = "http:some-domain.com:80"
refute Connections.checkin(url, name)
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
assert is_pid(conn)
assert Process.alive?(conn)
self = self()
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert conn == reused_conn
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}, {^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
:ok = Connections.checkout(conn, self, name)
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
:ok = Connections.checkout(conn, self, name)
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [],
conn_state: :idle
}
}
} = Connections.get_state(name)
end
test "reuse connection for idna domains", %{name: name} do
open_mock()
url = "http://ですsome-domain.com"
refute Connections.checkin(url, name)
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
assert is_pid(conn)
assert Process.alive?(conn)
self = self()
%Connections{
conns: %{
"http:ですsome-domain.com:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert conn == reused_conn
end
test "reuse for ipv4", %{name: name} do
open_mock()
url = "http://127.0.0.1"
refute Connections.checkin(url, name)
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
assert is_pid(conn)
assert Process.alive?(conn)
self = self()
%Connections{
conns: %{
"http:127.0.0.1:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert conn == reused_conn
:ok = Connections.checkout(conn, self, name)
:ok = Connections.checkout(reused_conn, self, name)
%Connections{
conns: %{
"http:127.0.0.1:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [],
conn_state: :idle
}
}
} = Connections.get_state(name)
end
test "reuse for ipv6", %{name: name} do
open_mock()
url = "http://[2a03:2880:f10c:83:face:b00c:0:25de]"
refute Connections.checkin(url, name)
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
assert is_pid(conn)
assert Process.alive?(conn)
self = self()
%Connections{
conns: %{
"http:2a03:2880:f10c:83:face:b00c:0:25de:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert conn == reused_conn
end
test "up and down ipv4", %{name: name} do
open_mock()
|> info_mock()
|> allow(self(), name)
self = self()
url = "http://127.0.0.1"
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
send(name, {:gun_down, conn, nil, nil, nil})
send(name, {:gun_up, conn, nil})
%Connections{
conns: %{
"http:127.0.0.1:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
end
test "up and down ipv6", %{name: name} do
self = self()
open_mock()
|> info_mock()
|> allow(self, name)
url = "http://[2a03:2880:f10c:83:face:b00c:0:25de]"
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
send(name, {:gun_down, conn, nil, nil, nil})
send(name, {:gun_up, conn, nil})
%Connections{
conns: %{
"http:2a03:2880:f10c:83:face:b00c:0:25de:80" => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}],
conn_state: :active
}
}
} = Connections.get_state(name)
end
test "reuses connection based on protocol", %{name: name} do
open_mock(2)
http_url = "http://some-domain.com"
http_key = "http:some-domain.com:80"
https_url = "https://some-domain.com"
https_key = "https:some-domain.com:443"
refute Connections.checkin(http_url, name)
:ok = Conn.open(http_url, name)
conn = Connections.checkin(http_url, name)
assert is_pid(conn)
assert Process.alive?(conn)
refute Connections.checkin(https_url, name)
:ok = Conn.open(https_url, name)
https_conn = Connections.checkin(https_url, name)
refute conn == https_conn
reused_https = Connections.checkin(https_url, name)
refute conn == reused_https
assert reused_https == https_conn
%Connections{
conns: %{
^http_key => %Conn{
conn: ^conn,
gun_state: :up
},
^https_key => %Conn{
conn: ^https_conn,
gun_state: :up
}
}
} = Connections.get_state(name)
end
test "connection can't get up", %{name: name} do
expect(GunMock, :open, &start_and_register(&1, &2, &3))
url = "http://gun-not-up.com"
assert capture_log(fn ->
refute Conn.open(url, name)
refute Connections.checkin(url, name)
end) =~
"Opening connection to http://gun-not-up.com failed with error {:error, :timeout}"
end
test "process gun_down message and then gun_up", %{name: name} do
self = self()
open_mock()
|> info_mock()
|> allow(self, name)
url = "http://gun-down-and-up.com"
key = "http:gun-down-and-up.com:80"
:ok = Conn.open(url, name)
conn = Connections.checkin(url, name)
assert is_pid(conn)
assert Process.alive?(conn)
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up,
used_by: [{^self, _}]
}
}
} = Connections.get_state(name)
send(name, {:gun_down, conn, :http, nil, nil})
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :down,
used_by: [{^self, _}]
}
}
} = Connections.get_state(name)
send(name, {:gun_up, conn, :http})
conn2 = Connections.checkin(url, name)
assert conn == conn2
assert is_pid(conn2)
assert Process.alive?(conn2)
%Connections{
conns: %{
^key => %Conn{
conn: _,
gun_state: :up,
used_by: [{^self, _}, {^self, _}]
}
}
} = Connections.get_state(name)
end
test "async processes get same conn for same domain", %{name: name} do
open_mock()
url = "http://some-domain.com"
:ok = Conn.open(url, name)
tasks =
for _ <- 1..5 do
Task.async(fn ->
Connections.checkin(url, name)
end)
end
tasks_with_results = Task.yield_many(tasks)
results =
Enum.map(tasks_with_results, fn {task, res} ->
res || Task.shutdown(task, :brutal_kill)
end)
conns = for {:ok, value} <- results, do: value
%Connections{
conns: %{
"http:some-domain.com:80" => %Conn{
conn: conn,
gun_state: :up
}
}
} = Connections.get_state(name)
assert Enum.all?(conns, fn res -> res == conn end)
end
test "remove frequently used and idle", %{name: name} do
open_mock(3)
self = self()
http_url = "http://some-domain.com"
https_url = "https://some-domain.com"
:ok = Conn.open(https_url, name)
:ok = Conn.open(http_url, name)
conn1 = Connections.checkin(https_url, name)
[conn2 | _conns] =
for _ <- 1..4 do
Connections.checkin(http_url, name)
end
http_key = "http:some-domain.com:80"
%Connections{
conns: %{
^http_key => %Conn{
conn: ^conn2,
gun_state: :up,
conn_state: :active,
used_by: [{^self, _}, {^self, _}, {^self, _}, {^self, _}]
},
"https:some-domain.com:443" => %Conn{
conn: ^conn1,
gun_state: :up,
conn_state: :active,
used_by: [{^self, _}]
}
}
} = Connections.get_state(name)
:ok = Connections.checkout(conn1, self, name)
another_url = "http://another-domain.com"
:ok = Conn.open(another_url, name)
conn = Connections.checkin(another_url, name)
%Connections{
conns: %{
"http:another-domain.com:80" => %Conn{
conn: ^conn,
gun_state: :up
},
^http_key => %Conn{
conn: _,
gun_state: :up
}
}
} = Connections.get_state(name)
end
describe "with proxy" do
test "as ip", %{name: name} do
open_mock()
|> connect_mock()
url = "http://proxy-string.com"
key = "http:proxy-string.com:80"
:ok = Conn.open(url, name, proxy: {{127, 0, 0, 1}, 8123})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
^key => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
test "as host", %{name: name} do
open_mock()
|> connect_mock()
url = "http://proxy-tuple-atom.com"
:ok = Conn.open(url, name, proxy: {'localhost', 9050})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
"http:proxy-tuple-atom.com:80" => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
test "as ip and ssl", %{name: name} do
open_mock()
|> connect_mock()
url = "https://proxy-string.com"
:ok = Conn.open(url, name, proxy: {{127, 0, 0, 1}, 8123})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
"https:proxy-string.com:443" => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
test "as host and ssl", %{name: name} do
open_mock()
|> connect_mock()
url = "https://proxy-tuple-atom.com"
:ok = Conn.open(url, name, proxy: {'localhost', 9050})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
"https:proxy-tuple-atom.com:443" => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
test "with socks type", %{name: name} do
open_mock()
url = "http://proxy-socks.com"
:ok = Conn.open(url, name, proxy: {:socks5, 'localhost', 1234})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
"http:proxy-socks.com:80" => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
test "with socks4 type and ssl", %{name: name} do
open_mock()
url = "https://proxy-socks.com"
:ok = Conn.open(url, name, proxy: {:socks4, 'localhost', 1234})
conn = Connections.checkin(url, name)
%Connections{
conns: %{
"https:proxy-socks.com:443" => %Conn{
conn: ^conn,
gun_state: :up
}
}
} = Connections.get_state(name)
reused_conn = Connections.checkin(url, name)
assert reused_conn == conn
end
end
describe "crf/3" do
setup do
crf = Connections.crf(1, 10, 1)
{:ok, crf: crf}
end
test "more used will have crf higher", %{crf: crf} do
# used 3 times
crf1 = Connections.crf(1, 10, crf)
crf1 = Connections.crf(1, 10, crf1)
# used 2 times
crf2 = Connections.crf(1, 10, crf)
assert crf1 > crf2
end
test "recently used will have crf higher on equal references", %{crf: crf} do
# used 3 sec ago
crf1 = Connections.crf(3, 10, crf)
# used 4 sec ago
crf2 = Connections.crf(4, 10, crf)
assert crf1 > crf2
end
test "equal crf on equal reference and time", %{crf: crf} do
# used 2 times
crf1 = Connections.crf(1, 10, crf)
# used 2 times
crf2 = Connections.crf(1, 10, crf)
assert crf1 == crf2
end
test "recently used will have higher crf", %{crf: crf} do
crf1 = Connections.crf(2, 10, crf)
crf1 = Connections.crf(1, 10, crf1)
crf2 = Connections.crf(3, 10, crf)
crf2 = Connections.crf(4, 10, crf2)
assert crf1 > crf2
end
end
describe "get_unused_conns/1" do
test "crf is equalent, sorting by reference", %{name: name} do
Connections.add_conn(name, "1", %Conn{
conn_state: :idle,
last_reference: now() - 1
})
Connections.add_conn(name, "2", %Conn{
conn_state: :idle,
last_reference: now()
})
assert [{"1", _unused_conn} | _others] = Connections.get_unused_conns(name)
end
test "reference is equalent, sorting by crf", %{name: name} do
Connections.add_conn(name, "1", %Conn{
conn_state: :idle,
crf: 1.999
})
Connections.add_conn(name, "2", %Conn{
conn_state: :idle,
crf: 2
})
assert [{"1", _unused_conn} | _others] = Connections.get_unused_conns(name)
end
test "higher crf and lower reference", %{name: name} do
Connections.add_conn(name, "1", %Conn{
conn_state: :idle,
crf: 3,
last_reference: now() - 1
})
Connections.add_conn(name, "2", %Conn{
conn_state: :idle,
crf: 2,
last_reference: now()
})
assert [{"2", _unused_conn} | _others] = Connections.get_unused_conns(name)
end
test "lower crf and lower reference", %{name: name} do
Connections.add_conn(name, "1", %Conn{
conn_state: :idle,
crf: 1.99,
last_reference: now() - 1
})
Connections.add_conn(name, "2", %Conn{
conn_state: :idle,
crf: 2,
last_reference: now()
})
assert [{"1", _unused_conn} | _others] = Connections.get_unused_conns(name)
end
end
test "count/1" do
name = :test_count
{:ok, _} = Connections.start_link({name, [checkin_timeout: 150]})
assert Connections.count(name) == 0
Connections.add_conn(name, "1", %Conn{conn: self()})
assert Connections.count(name) == 1
Connections.remove_conn(name, "1")
assert Connections.count(name) == 0
end
end

View File

@ -4,13 +4,16 @@
defmodule Pleroma.ReverseProxyTest do
use Pleroma.Web.ConnCase, async: true
import ExUnit.CaptureLog
import Mox
alias Pleroma.ReverseProxy
alias Pleroma.ReverseProxy.ClientMock
alias Plug.Conn
setup_all do
{:ok, _} = Registry.start_link(keys: :unique, name: Pleroma.ReverseProxy.ClientMock)
{:ok, _} = Registry.start_link(keys: :unique, name: ClientMock)
:ok
end
@ -21,7 +24,7 @@ defmodule Pleroma.ReverseProxyTest do
ClientMock
|> expect(:request, fn :get, url, _, _, _ ->
Registry.register(Pleroma.ReverseProxy.ClientMock, url, 0)
Registry.register(ClientMock, url, 0)
{:ok, 200,
[
@ -29,14 +32,14 @@ defmodule Pleroma.ReverseProxyTest do
{"content-length", byte_size(json) |> to_string()}
], %{url: url}}
end)
|> expect(:stream_body, invokes, fn %{url: url} ->
case Registry.lookup(Pleroma.ReverseProxy.ClientMock, url) do
|> expect(:stream_body, invokes, fn %{url: url} = client ->
case Registry.lookup(ClientMock, url) do
[{_, 0}] ->
Registry.update_value(Pleroma.ReverseProxy.ClientMock, url, &(&1 + 1))
{:ok, json}
Registry.update_value(ClientMock, url, &(&1 + 1))
{:ok, json, client}
[{_, 1}] ->
Registry.unregister(Pleroma.ReverseProxy.ClientMock, url)
Registry.unregister(ClientMock, url)
:done
end
end)
@ -78,7 +81,39 @@ defmodule Pleroma.ReverseProxyTest do
assert conn.halted
end
describe "max_body " do
defp stream_mock(invokes, with_close? \\ false) do
ClientMock
|> expect(:request, fn :get, "/stream-bytes/" <> length, _, _, _ ->
Registry.register(ClientMock, "/stream-bytes/" <> length, 0)
{:ok, 200, [{"content-type", "application/octet-stream"}],
%{url: "/stream-bytes/" <> length}}
end)
|> expect(:stream_body, invokes, fn %{url: "/stream-bytes/" <> length} = client ->
max = String.to_integer(length)
case Registry.lookup(ClientMock, "/stream-bytes/" <> length) do
[{_, current}] when current < max ->
Registry.update_value(
ClientMock,
"/stream-bytes/" <> length,
&(&1 + 10)
)
{:ok, "0123456789", client}
[{_, ^max}] ->
Registry.unregister(ClientMock, "/stream-bytes/" <> length)
:done
end
end)
if with_close? do
expect(ClientMock, :close, fn _ -> :ok end)
end
end
describe "max_body" do
test "length returns error if content-length more than option", %{conn: conn} do
user_agent_mock("hackney/1.15.1", 0)
@ -94,38 +129,6 @@ defmodule Pleroma.ReverseProxyTest do
end) == ""
end
defp stream_mock(invokes, with_close? \\ false) do
ClientMock
|> expect(:request, fn :get, "/stream-bytes/" <> length, _, _, _ ->
Registry.register(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length, 0)
{:ok, 200, [{"content-type", "application/octet-stream"}],
%{url: "/stream-bytes/" <> length}}
end)
|> expect(:stream_body, invokes, fn %{url: "/stream-bytes/" <> length} ->
max = String.to_integer(length)
case Registry.lookup(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length) do
[{_, current}] when current < max ->
Registry.update_value(
Pleroma.ReverseProxy.ClientMock,
"/stream-bytes/" <> length,
&(&1 + 10)
)
{:ok, "0123456789"}
[{_, ^max}] ->
Registry.unregister(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length)
:done
end
end)
if with_close? do
expect(ClientMock, :close, fn _ -> :ok end)
end
end
test "max_body_length returns error if streaming body more than that option", %{conn: conn} do
stream_mock(3, true)
@ -214,24 +217,24 @@ defmodule Pleroma.ReverseProxyTest do
conn = ReverseProxy.call(conn, "/stream-bytes/200")
assert conn.state == :chunked
assert byte_size(conn.resp_body) == 200
assert Plug.Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
end
defp headers_mock(_) do
ClientMock
|> expect(:request, fn :get, "/headers", headers, _, _ ->
Registry.register(Pleroma.ReverseProxy.ClientMock, "/headers", 0)
Registry.register(ClientMock, "/headers", 0)
{:ok, 200, [{"content-type", "application/json"}], %{url: "/headers", headers: headers}}
end)
|> expect(:stream_body, 2, fn %{url: url, headers: headers} ->
case Registry.lookup(Pleroma.ReverseProxy.ClientMock, url) do
|> expect(:stream_body, 2, fn %{url: url, headers: headers} = client ->
case Registry.lookup(ClientMock, url) do
[{_, 0}] ->
Registry.update_value(Pleroma.ReverseProxy.ClientMock, url, &(&1 + 1))
Registry.update_value(ClientMock, url, &(&1 + 1))
headers = for {k, v} <- headers, into: %{}, do: {String.capitalize(k), v}
{:ok, Jason.encode!(%{headers: headers})}
{:ok, Jason.encode!(%{headers: headers}), client}
[{_, 1}] ->
Registry.unregister(Pleroma.ReverseProxy.ClientMock, url)
Registry.unregister(ClientMock, url)
:done
end
end)
@ -244,7 +247,7 @@ defmodule Pleroma.ReverseProxyTest do
test "header passes", %{conn: conn} do
conn =
Plug.Conn.put_req_header(
Conn.put_req_header(
conn,
"accept",
"text/html"
@ -257,7 +260,7 @@ defmodule Pleroma.ReverseProxyTest do
test "header is filtered", %{conn: conn} do
conn =
Plug.Conn.put_req_header(
Conn.put_req_header(
conn,
"accept-language",
"en-US"
@ -290,18 +293,18 @@ defmodule Pleroma.ReverseProxyTest do
defp disposition_headers_mock(headers) do
ClientMock
|> expect(:request, fn :get, "/disposition", _, _, _ ->
Registry.register(Pleroma.ReverseProxy.ClientMock, "/disposition", 0)
Registry.register(ClientMock, "/disposition", 0)
{:ok, 200, headers, %{url: "/disposition"}}
end)
|> expect(:stream_body, 2, fn %{url: "/disposition"} ->
case Registry.lookup(Pleroma.ReverseProxy.ClientMock, "/disposition") do
|> expect(:stream_body, 2, fn %{url: "/disposition"} = client ->
case Registry.lookup(ClientMock, "/disposition") do
[{_, 0}] ->
Registry.update_value(Pleroma.ReverseProxy.ClientMock, "/disposition", &(&1 + 1))
{:ok, ""}
Registry.update_value(ClientMock, "/disposition", &(&1 + 1))
{:ok, "", client}
[{_, 1}] ->
Registry.unregister(Pleroma.ReverseProxy.ClientMock, "/disposition")
Registry.unregister(ClientMock, "/disposition")
:done
end
end)

View File

@ -107,7 +107,7 @@ defmodule HttpRequestMock do
"https://osada.macgirvin.com/.well-known/webfinger?resource=acct:mike@osada.macgirvin.com",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -120,7 +120,7 @@ defmodule HttpRequestMock do
"https://social.heldscal.la/.well-known/webfinger?resource=https://social.heldscal.la/user/29191",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -141,7 +141,7 @@ defmodule HttpRequestMock do
"https://pawoo.net/.well-known/webfinger?resource=acct:https://pawoo.net/users/pekorino",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -167,7 +167,7 @@ defmodule HttpRequestMock do
"https://social.stopwatchingus-heidelberg.de/.well-known/webfinger?resource=acct:https://social.stopwatchingus-heidelberg.de/user/18330",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -188,7 +188,7 @@ defmodule HttpRequestMock do
"https://mamot.fr/.well-known/webfinger?resource=acct:https://mamot.fr/users/Skruyb",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -201,7 +201,7 @@ defmodule HttpRequestMock do
"https://social.heldscal.la/.well-known/webfinger?resource=nonexistant@social.heldscal.la",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -214,7 +214,7 @@ defmodule HttpRequestMock do
"https://squeet.me/xrd/?uri=lain@squeet.me",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -227,7 +227,7 @@ defmodule HttpRequestMock do
"https://mst3k.interlinked.me/users/luciferMysticus",
_,
_,
Accept: "application/activity+json"
[{"accept", "application/activity+json"}]
) do
{:ok,
%Tesla.Env{
@ -248,7 +248,7 @@ defmodule HttpRequestMock do
"https://hubzilla.example.org/channel/kaniini",
_,
_,
Accept: "application/activity+json"
[{"accept", "application/activity+json"}]
) do
{:ok,
%Tesla.Env{
@ -257,7 +257,7 @@ defmodule HttpRequestMock do
}}
end
def get("https://niu.moe/users/rye", _, _, Accept: "application/activity+json") do
def get("https://niu.moe/users/rye", _, _, [{"accept", "application/activity+json"}]) do
{:ok,
%Tesla.Env{
status: 200,
@ -265,7 +265,7 @@ defmodule HttpRequestMock do
}}
end
def get("https://n1u.moe/users/rye", _, _, Accept: "application/activity+json") do
def get("https://n1u.moe/users/rye", _, _, [{"accept", "application/activity+json"}]) do
{:ok,
%Tesla.Env{
status: 200,
@ -284,7 +284,7 @@ defmodule HttpRequestMock do
}}
end
def get("https://puckipedia.com/", _, _, Accept: "application/activity+json") do
def get("https://puckipedia.com/", _, _, [{"accept", "application/activity+json"}]) do
{:ok,
%Tesla.Env{
status: 200,
@ -308,9 +308,9 @@ defmodule HttpRequestMock do
}}
end
def get("https://mobilizon.org/events/252d5816-00a3-4a89-a66f-15bf65c33e39", _, _,
Accept: "application/activity+json"
) do
def get("https://mobilizon.org/events/252d5816-00a3-4a89-a66f-15bf65c33e39", _, _, [
{"accept", "application/activity+json"}
]) do
{:ok,
%Tesla.Env{
status: 200,
@ -318,7 +318,7 @@ defmodule HttpRequestMock do
}}
end
def get("https://mobilizon.org/@tcit", _, _, Accept: "application/activity+json") do
def get("https://mobilizon.org/@tcit", _, _, [{"accept", "application/activity+json"}]) do
{:ok,
%Tesla.Env{
status: 200,
@ -358,7 +358,7 @@ defmodule HttpRequestMock do
}}
end
def get("http://mastodon.example.org/users/admin", _, _, Accept: "application/activity+json") do
def get("http://mastodon.example.org/users/admin", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
@ -366,7 +366,9 @@ defmodule HttpRequestMock do
}}
end
def get("http://mastodon.example.org/users/relay", _, _, Accept: "application/activity+json") do
def get("http://mastodon.example.org/users/relay", _, _, [
{"accept", "application/activity+json"}
]) do
{:ok,
%Tesla.Env{
status: 200,
@ -374,7 +376,9 @@ defmodule HttpRequestMock do
}}
end
def get("http://mastodon.example.org/users/gargron", _, _, Accept: "application/activity+json") do
def get("http://mastodon.example.org/users/gargron", _, _, [
{"accept", "application/activity+json"}
]) do
{:error, :nxdomain}
end
@ -557,7 +561,7 @@ defmodule HttpRequestMock do
"http://mastodon.example.org/@admin/99541947525187367",
_,
_,
Accept: "application/activity+json"
_
) do
{:ok,
%Tesla.Env{
@ -582,7 +586,7 @@ defmodule HttpRequestMock do
}}
end
def get("https://mstdn.io/users/mayuutann", _, _, Accept: "application/activity+json") do
def get("https://mstdn.io/users/mayuutann", _, _, [{"accept", "application/activity+json"}]) do
{:ok,
%Tesla.Env{
status: 200,
@ -594,7 +598,7 @@ defmodule HttpRequestMock do
"https://mstdn.io/users/mayuutann/statuses/99568293732299394",
_,
_,
Accept: "application/activity+json"
[{"accept", "application/activity+json"}]
) do
{:ok,
%Tesla.Env{
@ -614,7 +618,7 @@ defmodule HttpRequestMock do
}}
end
def get(url, _, _, Accept: "application/xrd+xml,application/jrd+json")
def get(url, _, _, [{"accept", "application/xrd+xml,application/jrd+json"}])
when url in [
"https://pleroma.soykaf.com/.well-known/webfinger?resource=acct:https://pleroma.soykaf.com/users/lain",
"https://pleroma.soykaf.com/.well-known/webfinger?resource=https://pleroma.soykaf.com/users/lain"
@ -641,7 +645,7 @@ defmodule HttpRequestMock do
"https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/1",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -685,7 +689,7 @@ defmodule HttpRequestMock do
"https://shitposter.club/.well-known/webfinger?resource=https://shitposter.club/user/5381",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -738,7 +742,7 @@ defmodule HttpRequestMock do
"https://social.sakamoto.gq/.well-known/webfinger?resource=https://social.sakamoto.gq/users/eal",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -751,7 +755,7 @@ defmodule HttpRequestMock do
"https://social.sakamoto.gq/objects/0ccc1a2c-66b0-4305-b23a-7f7f2b040056",
_,
_,
Accept: "application/atom+xml"
[{"accept", "application/atom+xml"}]
) do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/sakamoto.atom")}}
end
@ -768,7 +772,7 @@ defmodule HttpRequestMock do
"https://mastodon.social/.well-known/webfinger?resource=https://mastodon.social/users/lambadalambda",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -790,7 +794,7 @@ defmodule HttpRequestMock do
"http://gs.example.org/.well-known/webfinger?resource=http://gs.example.org:4040/index.php/user/1",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -804,7 +808,7 @@ defmodule HttpRequestMock do
"http://gs.example.org:4040/index.php/user/1",
_,
_,
Accept: "application/activity+json"
[{"accept", "application/activity+json"}]
) do
{:ok, %Tesla.Env{status: 406, body: ""}}
end
@ -840,7 +844,7 @@ defmodule HttpRequestMock do
"https://squeet.me/xrd?uri=lain@squeet.me",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -853,7 +857,7 @@ defmodule HttpRequestMock do
"https://social.heldscal.la/.well-known/webfinger?resource=shp@social.heldscal.la",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -866,7 +870,7 @@ defmodule HttpRequestMock do
"https://social.heldscal.la/.well-known/webfinger?resource=invalid_content@social.heldscal.la",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok, %Tesla.Env{status: 200, body: ""}}
end
@ -883,7 +887,7 @@ defmodule HttpRequestMock do
"http://framatube.org/main/xrd?uri=framasoft@framatube.org",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -905,7 +909,7 @@ defmodule HttpRequestMock do
"http://gnusocial.de/main/xrd?uri=winterdienst@gnusocial.de",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -942,7 +946,7 @@ defmodule HttpRequestMock do
"https://gerzilla.de/xrd/?uri=kaniini@gerzilla.de",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -1005,7 +1009,7 @@ defmodule HttpRequestMock do
%Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/osada-user-indio.json")}}
end
def get("https://social.heldscal.la/user/23211", _, _, Accept: "application/activity+json") do
def get("https://social.heldscal.la/user/23211", _, _, [{"accept", "application/activity+json"}]) do
{:ok, Tesla.Mock.json(%{"id" => "https://social.heldscal.la/user/23211"}, status: 200)}
end
@ -1138,7 +1142,7 @@ defmodule HttpRequestMock do
"https://zetsubou.xn--q9jyb4c/.well-known/webfinger?resource=lain@zetsubou.xn--q9jyb4c",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -1151,7 +1155,7 @@ defmodule HttpRequestMock do
"https://zetsubou.xn--q9jyb4c/.well-known/webfinger?resource=https://zetsubou.xn--q9jyb4c/users/lain",
_,
_,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
{:ok,
%Tesla.Env{
@ -1173,7 +1177,9 @@ defmodule HttpRequestMock do
}}
end
def get("https://info.pleroma.site/activity.json", _, _, Accept: "application/activity+json") do
def get("https://info.pleroma.site/activity.json", _, _, [
{"accept", "application/activity+json"}
]) do
{:ok,
%Tesla.Env{
status: 200,
@ -1185,7 +1191,9 @@ defmodule HttpRequestMock do
{:ok, %Tesla.Env{status: 404, body: ""}}
end
def get("https://info.pleroma.site/activity2.json", _, _, Accept: "application/activity+json") do
def get("https://info.pleroma.site/activity2.json", _, _, [
{"accept", "application/activity+json"}
]) do
{:ok,
%Tesla.Env{
status: 200,
@ -1197,7 +1205,9 @@ defmodule HttpRequestMock do
{:ok, %Tesla.Env{status: 404, body: ""}}
end
def get("https://info.pleroma.site/activity3.json", _, _, Accept: "application/activity+json") do
def get("https://info.pleroma.site/activity3.json", _, _, [
{"accept", "application/activity+json"}
]) do
{:ok,
%Tesla.Env{
status: 200,

View File

@ -6,7 +6,10 @@ os_exclude = if :os.type() == {:unix, :darwin}, do: [skip_on_mac: true], else: [
ExUnit.start(exclude: [:federated | os_exclude])
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
Mox.defmock(Pleroma.ReverseProxy.ClientMock, for: Pleroma.ReverseProxy.Client)
Mox.defmock(Pleroma.GunMock, for: Pleroma.Gun)
{:ok, _} = Application.ensure_all_started(:ex_machina)
ExUnit.after_suite(fn _results ->

View File

@ -4,7 +4,6 @@
defmodule Pleroma.UserInviteTokenTest do
use ExUnit.Case, async: true
use Pleroma.DataCase
alias Pleroma.UserInviteToken
describe "valid_invite?/1 one time invites" do
@ -64,7 +63,6 @@ defmodule Pleroma.UserInviteTokenTest do
test "expires yesterday returns false", %{invite: invite} do
invite = %{invite | expires_at: Date.add(Date.utc_today(), -1)}
invite = Repo.insert!(invite)
refute UserInviteToken.valid_invite?(invite)
end
end
@ -82,7 +80,6 @@ defmodule Pleroma.UserInviteTokenTest do
test "overdue date and less uses returns false", %{invite: invite} do
invite = %{invite | expires_at: Date.add(Date.utc_today(), -1)}
invite = Repo.insert!(invite)
refute UserInviteToken.valid_invite?(invite)
end
@ -93,7 +90,6 @@ defmodule Pleroma.UserInviteTokenTest do
test "overdue date with more uses returns false", %{invite: invite} do
invite = %{invite | expires_at: Date.add(Date.utc_today(), -1), uses: 5}
invite = Repo.insert!(invite)
refute UserInviteToken.valid_invite?(invite)
end
end

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicyTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicyTest do
@ -110,6 +110,15 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicyTest do
end
describe "with unknown actors" do
setup do
Tesla.Mock.mock(fn
%{method: :get, url: "http://invalid.actor"} ->
%Tesla.Env{status: 500, body: ""}
end)
:ok
end
test "it rejects posts without links" do
message =
@linkless_message

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrependedTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicyTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NormalizeMarkupTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.ObjectAgePolicyTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.RejectNonPublicTest do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do

View File

@ -89,6 +89,11 @@ defmodule Pleroma.Web.ActivityPub.RelayTest do
}
)
Tesla.Mock.mock(fn
%{method: :get, url: "http://mastodon.example.org/eee/99541947525187367"} ->
%Tesla.Env{status: 500, body: ""}
end)
assert capture_log(fn ->
assert Relay.publish(activity) == {:error, nil}
end) =~ "[error] error: nil"

View File

@ -21,7 +21,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
alias Pleroma.UserInviteToken
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.StatusView
alias Pleroma.Web.MediaProxy
setup_all do
@ -1586,208 +1585,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
end
end
describe "GET /api/pleroma/admin/grouped_reports" do
setup do
[reporter, target_user] = insert_pair(:user)
date1 = (DateTime.to_unix(DateTime.utc_now()) + 1000) |> DateTime.from_unix!()
date2 = (DateTime.to_unix(DateTime.utc_now()) + 2000) |> DateTime.from_unix!()
date3 = (DateTime.to_unix(DateTime.utc_now()) + 3000) |> DateTime.from_unix!()
first_status =
insert(:note_activity, user: target_user, data_attrs: %{"published" => date1})
second_status =
insert(:note_activity, user: target_user, data_attrs: %{"published" => date2})
third_status =
insert(:note_activity, user: target_user, data_attrs: %{"published" => date3})
{:ok, first_report} =
CommonAPI.report(reporter, %{
"account_id" => target_user.id,
"status_ids" => [first_status.id, second_status.id, third_status.id]
})
{:ok, second_report} =
CommonAPI.report(reporter, %{
"account_id" => target_user.id,
"status_ids" => [first_status.id, second_status.id]
})
{:ok, third_report} =
CommonAPI.report(reporter, %{
"account_id" => target_user.id,
"status_ids" => [first_status.id]
})
%{
first_status: Activity.get_by_ap_id_with_object(first_status.data["id"]),
second_status: Activity.get_by_ap_id_with_object(second_status.data["id"]),
third_status: Activity.get_by_ap_id_with_object(third_status.data["id"]),
first_report: first_report,
first_status_reports: [first_report, second_report, third_report],
second_status_reports: [first_report, second_report],
third_status_reports: [first_report],
target_user: target_user,
reporter: reporter
}
end
test "returns reports grouped by status", %{
conn: conn,
first_status: first_status,
second_status: second_status,
third_status: third_status,
first_status_reports: first_status_reports,
second_status_reports: second_status_reports,
third_status_reports: third_status_reports,
target_user: target_user,
reporter: reporter
} do
response =
conn
|> get("/api/pleroma/admin/grouped_reports")
|> json_response(:ok)
assert length(response["reports"]) == 3
first_group = Enum.find(response["reports"], &(&1["status"]["id"] == first_status.id))
second_group = Enum.find(response["reports"], &(&1["status"]["id"] == second_status.id))
third_group = Enum.find(response["reports"], &(&1["status"]["id"] == third_status.id))
assert length(first_group["reports"]) == 3
assert length(second_group["reports"]) == 2
assert length(third_group["reports"]) == 1
assert first_group["date"] ==
Enum.max_by(first_status_reports, fn act ->
NaiveDateTime.from_iso8601!(act.data["published"])
end).data["published"]
assert first_group["status"] ==
Map.put(
stringify_keys(StatusView.render("show.json", %{activity: first_status})),
"deleted",
false
)
assert(first_group["account"]["id"] == target_user.id)
assert length(first_group["actors"]) == 1
assert hd(first_group["actors"])["id"] == reporter.id
assert Enum.map(first_group["reports"], & &1["id"]) --
Enum.map(first_status_reports, & &1.id) == []
assert second_group["date"] ==
Enum.max_by(second_status_reports, fn act ->
NaiveDateTime.from_iso8601!(act.data["published"])
end).data["published"]
assert second_group["status"] ==
Map.put(
stringify_keys(StatusView.render("show.json", %{activity: second_status})),
"deleted",
false
)
assert second_group["account"]["id"] == target_user.id
assert length(second_group["actors"]) == 1
assert hd(second_group["actors"])["id"] == reporter.id
assert Enum.map(second_group["reports"], & &1["id"]) --
Enum.map(second_status_reports, & &1.id) == []
assert third_group["date"] ==
Enum.max_by(third_status_reports, fn act ->
NaiveDateTime.from_iso8601!(act.data["published"])
end).data["published"]
assert third_group["status"] ==
Map.put(
stringify_keys(StatusView.render("show.json", %{activity: third_status})),
"deleted",
false
)
assert third_group["account"]["id"] == target_user.id
assert length(third_group["actors"]) == 1
assert hd(third_group["actors"])["id"] == reporter.id
assert Enum.map(third_group["reports"], & &1["id"]) --
Enum.map(third_status_reports, & &1.id) == []
end
test "reopened report renders status data", %{
conn: conn,
first_report: first_report,
first_status: first_status
} do
{:ok, _} = CommonAPI.update_report_state(first_report.id, "resolved")
response =
conn
|> get("/api/pleroma/admin/grouped_reports")
|> json_response(:ok)
first_group = Enum.find(response["reports"], &(&1["status"]["id"] == first_status.id))
assert first_group["status"] ==
Map.put(
stringify_keys(StatusView.render("show.json", %{activity: first_status})),
"deleted",
false
)
end
test "reopened report does not render status data if status has been deleted", %{
conn: conn,
first_report: first_report,
first_status: first_status,
target_user: target_user
} do
{:ok, _} = CommonAPI.update_report_state(first_report.id, "resolved")
{:ok, _} = CommonAPI.delete(first_status.id, target_user)
refute Activity.get_by_ap_id(first_status.id)
response =
conn
|> get("/api/pleroma/admin/grouped_reports")
|> json_response(:ok)
assert Enum.find(response["reports"], &(&1["status"]["deleted"] == true))["status"][
"deleted"
] == true
assert length(Enum.filter(response["reports"], &(&1["status"]["deleted"] == false))) == 2
end
test "account not empty if status was deleted", %{
conn: conn,
first_report: first_report,
first_status: first_status,
target_user: target_user
} do
{:ok, _} = CommonAPI.update_report_state(first_report.id, "resolved")
{:ok, _} = CommonAPI.delete(first_status.id, target_user)
refute Activity.get_by_ap_id(first_status.id)
response =
conn
|> get("/api/pleroma/admin/grouped_reports")
|> json_response(:ok)
assert Enum.find(response["reports"], &(&1["status"]["deleted"] == true))["account"]
end
end
describe "PUT /api/pleroma/admin/statuses/:id" do
setup do
activity = insert(:note_activity)
@ -2572,9 +2369,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
end
test "common config example", %{conn: conn} do
adapter = Application.get_env(:tesla, :adapter)
on_exit(fn -> Application.put_env(:tesla, :adapter, adapter) end)
conn =
post(conn, "/api/pleroma/admin/config", %{
configs: [
@ -2587,23 +2381,16 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
%{"tuple" => [":seconds_valid", 60]},
%{"tuple" => [":path", ""]},
%{"tuple" => [":key1", nil]},
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
%{"tuple" => [":regex1", "~r/https:\/\/example.com/"]},
%{"tuple" => [":regex2", "~r/https:\/\/example.com/u"]},
%{"tuple" => [":regex3", "~r/https:\/\/example.com/i"]},
%{"tuple" => [":regex4", "~r/https:\/\/example.com/s"]},
%{"tuple" => [":name", "Pleroma"]}
]
},
%{
"group" => ":tesla",
"key" => ":adapter",
"value" => "Tesla.Adapter.Httpc"
}
]
})
assert Application.get_env(:tesla, :adapter) == Tesla.Adapter.Httpc
assert Config.get([Pleroma.Captcha.NotReal, :name]) == "Pleroma"
assert json_response(conn, 200) == %{
@ -2617,7 +2404,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
%{"tuple" => [":seconds_valid", 60]},
%{"tuple" => [":path", ""]},
%{"tuple" => [":key1", nil]},
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
%{"tuple" => [":regex1", "~r/https:\\/\\/example.com/"]},
%{"tuple" => [":regex2", "~r/https:\\/\\/example.com/u"]},
%{"tuple" => [":regex3", "~r/https:\\/\\/example.com/i"]},
@ -2630,19 +2416,12 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
":seconds_valid",
":path",
":key1",
":partial_chain",
":regex1",
":regex2",
":regex3",
":regex4",
":name"
]
},
%{
"group" => ":tesla",
"key" => ":adapter",
"value" => "Tesla.Adapter.Httpc",
"db" => [":adapter"]
}
]
}

View File

@ -472,6 +472,13 @@ defmodule Pleroma.Web.CommonAPI.UtilsTest do
activity = insert(:note_activity, user: user, note: object)
Pleroma.Repo.delete(object)
obj_url = activity.data["object"]
Tesla.Mock.mock(fn
%{method: :get, url: ^obj_url} ->
%Tesla.Env{status: 404, body: ""}
end)
assert Utils.maybe_notify_mentioned_recipients(["test-test"], activity) == [
"test-test"
]

View File

@ -452,11 +452,24 @@ defmodule Pleroma.Web.MastodonAPI.NotificationControllerTest do
assert length(json_response(conn, 200)) == 1
end
@tag capture_log: true
test "see move notifications" do
old_user = insert(:user)
new_user = insert(:user, also_known_as: [old_user.ap_id])
%{user: follower, conn: conn} = oauth_access(["read:notifications"])
old_user_url = old_user.ap_id
body =
File.read!("test/fixtures/users_mock/localhost.json")
|> String.replace("{{nickname}}", old_user.nickname)
|> Jason.encode!()
Tesla.Mock.mock(fn
%{method: :get, url: ^old_user_url} ->
%Tesla.Env{status: 200, body: body}
end)
User.follow(follower, old_user)
Pleroma.Web.ActivityPub.ActivityPub.move(old_user, new_user)
Pleroma.Tests.ObanHelpers.perform_all()

View File

@ -5,13 +5,19 @@
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.User
alias Pleroma.UserRelationship
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.AccountView
import Pleroma.Factory
import Tesla.Mock
setup do
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
:ok
end
test "Represent a user account" do
source_data = %{
"tag" => [
@ -164,6 +170,17 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
assert expected == AccountView.render("show.json", %{user: user})
end
test "Represent a Funkwhale channel" do
{:ok, user} =
User.get_or_fetch_by_ap_id(
"https://channels.tests.funkwhale.audio/federation/actors/compositions"
)
assert represented = AccountView.render("show.json", %{user: user})
assert represented.acct == "compositions@channels.tests.funkwhale.audio"
assert represented.url == "https://channels.tests.funkwhale.audio/channels/compositions"
end
test "Represent a deactivated user for an admin" do
admin = insert(:user, is_admin: true)
deactivated_user = insert(:user, deactivated: true)

View File

@ -112,11 +112,24 @@ defmodule Pleroma.Web.MastodonAPI.NotificationViewTest do
test_notifications_rendering([notification], followed, [])
end
@tag capture_log: true
test "Move notification" do
old_user = insert(:user)
new_user = insert(:user, also_known_as: [old_user.ap_id])
follower = insert(:user)
old_user_url = old_user.ap_id
body =
File.read!("test/fixtures/users_mock/localhost.json")
|> String.replace("{{nickname}}", old_user.nickname)
|> Jason.encode!()
Tesla.Mock.mock(fn
%{method: :get, url: ^old_user_url} ->
%Tesla.Env{status: 200, body: body}
end)
User.follow(follower, old_user)
Pleroma.Web.ActivityPub.ActivityPub.move(old_user, new_user)
Pleroma.Tests.ObanHelpers.perform_all()

View File

@ -94,6 +94,23 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do
Repo.delete(user)
Cachex.clear(:user_cache)
finger_url =
"https://localhost/.well-known/webfinger?resource=acct:#{user.nickname}@localhost"
Tesla.Mock.mock_global(fn
%{method: :get, url: "http://localhost/.well-known/host-meta"} ->
%Tesla.Env{status: 404, body: ""}
%{method: :get, url: "https://localhost/.well-known/host-meta"} ->
%Tesla.Env{status: 404, body: ""}
%{
method: :get,
url: ^finger_url
} ->
%Tesla.Env{status: 404, body: ""}
end)
%{account: ms_user} = StatusView.render("show.json", activity: activity)
assert ms_user.acct == "erroruser@example.com"

View File

@ -0,0 +1,25 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MetadataTest do
use Pleroma.DataCase, async: true
import Pleroma.Factory
describe "restrict indexing remote users" do
test "for remote user" do
user = insert(:user, local: false)
assert Pleroma.Web.Metadata.build_tags(%{user: user}) =~
"<meta content=\"noindex, noarchive\" name=\"robots\">"
end
test "for local user" do
user = insert(:user)
refute Pleroma.Web.Metadata.build_tags(%{user: user}) =~
"<meta content=\"noindex, noarchive\" name=\"robots\">"
end
end
end

Some files were not shown because too many files have changed in this diff Show More