fix invalidates media url's

This commit is contained in:
Maksim Pechnikov 2020-06-14 21:02:57 +03:00
parent f9dcf15ecb
commit 2e8a236cef
16 changed files with 346 additions and 114 deletions

View File

@ -406,6 +406,13 @@ config :pleroma, :media_proxy,
], ],
whitelist: [] whitelist: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
method: :purge,
headers: [],
options: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
config :pleroma, :chat, enabled: true config :pleroma, :chat, enabled: true
config :phoenix, :format_encoders, json: Jason config :phoenix, :format_encoders, json: Jason

View File

@ -1637,6 +1637,31 @@ config :pleroma, :config_description, [
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.", "The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
suggestions: ["https://example.com"] suggestions: ["https://example.com"]
}, },
%{
key: :invalidation,
type: :keyword,
descpiption: "",
suggestions: [
enabled: true,
provider: Pleroma.Web.MediaProxy.Invalidation.Script
],
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables invalidate media cache"
},
%{
key: :provider,
type: :module,
description: "Module which will be used to cache purge.",
suggestions: [
Pleroma.Web.MediaProxy.Invalidation.Script,
Pleroma.Web.MediaProxy.Invalidation.Http
]
}
]
},
%{ %{
key: :proxy_opts, key: :proxy_opts,
type: :keyword, type: :keyword,
@ -1709,6 +1734,45 @@ config :pleroma, :config_description, [
} }
] ]
}, },
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Http,
type: :group,
description: "HTTP invalidate settings",
children: [
%{
key: :method,
type: :atom,
description: "HTTP method of request. Default: :purge"
},
%{
key: :headers,
type: {:list, :tuple},
description: "HTTP headers of request.",
suggestions: [{"x-refresh", 1}]
},
%{
key: :options,
type: :keyword,
description: "Request options.",
suggestions: [params: %{ts: "xxx"}]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Script,
type: :group,
description: "Script invalidate settings",
children: [
%{
key: :script_path,
type: :string,
description: "Path to shell script. Which will run purge cache.",
suggestions: ["./installation/nginx-cache-purge.sh.example"]
}
]
},
%{ %{
group: :pleroma, group: :pleroma,
key: :gopher, key: :gopher,

View File

@ -262,7 +262,7 @@ This section describe PWA manifest instance-specific values. Currently this opti
#### Pleroma.Web.MediaProxy.Invalidation.Script #### Pleroma.Web.MediaProxy.Invalidation.Script
This strategy allow perform external bash script to purge cache. This strategy allow perform external shell script to purge cache.
Urls of attachments pass to script as arguments. Urls of attachments pass to script as arguments.
* `script_path`: path to external script. * `script_path`: path to external script.
@ -278,8 +278,8 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
This strategy allow perform custom http request to purge cache. This strategy allow perform custom http request to purge cache.
* `method`: http method. default is `purge` * `method`: http method. default is `purge`
* `headers`: http headers. default is empty * `headers`: http headers.
* `options`: request options. default is empty * `options`: request options.
Example: Example:
```elixir ```elixir

View File

@ -13,7 +13,7 @@ CACHE_DIRECTORY="/tmp/pleroma-media-cache"
## $3 - (optional) the number of parallel processes to run for grep. ## $3 - (optional) the number of parallel processes to run for grep.
get_cache_files() { get_cache_files() {
local max_parallel=${3-16} local max_parallel=${3-16}
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E Rl "^KEY:.*$1" | sort -u find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E -Rl "^KEY:.*$1" | sort -u
} }
## Removes an item from the given cache zone. ## Removes an item from the given cache zone.
@ -37,4 +37,4 @@ purge() {
} }
purge $1 purge $@

View File

@ -148,7 +148,8 @@ defmodule Pleroma.Application do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500), build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500), build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10), build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500) build_cachex("failed_proxy_url", limit: 2500),
build_cachex("deleted_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
] ]
end end

View File

@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
require Logger require Logger
alias Pleroma.Web.MediaProxy
@behaviour Plug @behaviour Plug
# no slashes # no slashes
@path "media" @path "media"
@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
%{query_params: %{"name" => name}} = conn -> %{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"") name = String.replace(name, "\"", "\\\"")
conn put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
conn -> conn ->
conn conn
@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
with uploader <- Keyword.fetch!(config, :uploader), with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false), proxy_remote = Keyword.get(config, :proxy_remote, false),
{:ok, get_method} <- uploader.get_file(file) do {:ok, get_method} <- uploader.get_file(file),
false <- media_is_deleted(conn, get_method) do
get_media(conn, get_method, proxy_remote, opts) get_media(conn, get_method, proxy_remote, opts)
else else
_ -> _ ->
@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do
def call(conn, _opts), do: conn def call(conn, _opts), do: conn
defp media_is_deleted(%{request_path: path} = _conn, {:static_dir, _}) do
MediaProxy.in_deleted_urls(Pleroma.Web.base_url() <> path)
end
defp media_is_deleted(_, {:url, url}), do: MediaProxy.in_deleted_urls(url)
defp media_is_deleted(_, _), do: false
defp get_media(conn, {:static_dir, directory}, _, opts) do defp get_media(conn, {:static_dir, directory}, _, opts) do
static_opts = static_opts =
Map.get(opts, :static_plug_opts) Map.get(opts, :static_plug_opts)

View File

@ -5,22 +5,33 @@
defmodule Pleroma.Web.MediaProxy.Invalidation do defmodule Pleroma.Web.MediaProxy.Invalidation do
@moduledoc false @moduledoc false
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()} @callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Web.MediaProxy
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()} @spec enabled?() :: boolean()
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
def purge(urls) do def purge(urls) do
[:media_proxy, :invalidation, :enabled] prepared_urls = prepare_urls(urls)
|> Config.get()
|> do_purge(urls) if enabled?() do
do_purge(prepared_urls)
else
{:ok, prepared_urls}
end
end end
defp do_purge(true, urls) do defp do_purge(urls) do
provider = Config.get([:media_proxy, :invalidation, :provider]) provider = Config.get([:media_proxy, :invalidation, :provider])
options = Config.get(provider) provider.purge(urls, Config.get(provider))
provider.purge(urls, options)
end end
defp do_purge(_, _), do: :ok def prepare_urls(urls) do
urls
|> List.wrap()
|> Enum.map(&MediaProxy.url(&1))
end
end end

View File

@ -10,9 +10,9 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, opts) do def purge(urls, opts) do
method = Map.get(opts, :method, :purge) method = Keyword.get(opts, :method, :purge)
headers = Map.get(opts, :headers, []) headers = Keyword.get(opts, :headers, [])
options = Map.get(opts, :options, []) options = Keyword.get(opts, :options, [])
Logger.debug("Running cache purge: #{inspect(urls)}") Logger.debug("Running cache purge: #{inspect(urls)}")
@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
end end
end) end)
{:ok, "success"} {:ok, urls}
end end
defp do_purge(method, url, headers, options) do defp do_purge(method, url, headers, options) do

View File

@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
require Logger require Logger
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, %{script_path: script_path} = _options) do def purge(urls, opts) do
args = args =
urls urls
|> List.wrap() |> List.wrap()
|> Enum.uniq() |> Enum.uniq()
|> Enum.join(" ") |> Enum.join(" ")
opts
|> Keyword.get(:script_path, nil)
|> do_purge([args])
|> handle_result(urls)
end
defp do_purge(script_path, args) when is_binary(script_path) do
path = Path.expand(script_path) path = Path.expand(script_path)
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
case do_purge(path, [args]) do
{result, exit_status} when exit_status > 0 ->
Logger.error("Error while cache purge: #{inspect(result)}")
{:error, inspect(result)}
_ ->
{:ok, "success"}
end
end
def purge(_, _), do: {:error, "not found script path"}
defp do_purge(path, args) do
System.cmd(path, args) System.cmd(path, args)
rescue rescue
error -> {inspect(error), 1} error -> error
end
defp do_purge(_, _), do: {:error, "not found script path"}
defp handle_result({_result, 0}, urls), do: {:ok, urls}
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
defp handle_result(error, _) do
Logger.error("Error while cache purge: #{inspect(error)}")
{:error, inspect(error)}
end end
end end

View File

@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Upload alias Pleroma.Upload
alias Pleroma.Web alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@base64_opts [padding: false] @base64_opts [padding: false]
@spec in_deleted_urls(String.t()) :: boolean()
def in_deleted_urls(url), do: elem(Cachex.exists?(:deleted_urls_cache, url(url)), 1)
def remove_from_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
end)
end
def remove_from_deleted_urls(url) when is_binary(url) do
Cachex.del(:deleted_urls_cache, url(url))
end
def put_in_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
end)
end
def put_in_deleted_urls(url) when is_binary(url) do
Cachex.put(:deleted_urls_cache, url(url), true)
end
def url(url) when is_nil(url) or url == "", do: nil def url(url) when is_nil(url) or url == "", do: nil
def url("/" <> _ = url), do: url def url("/" <> _ = url), do: url
def url(url) do def url(url) do
if disabled?() or local?(url) or whitelisted?(url) do if disabled?() or not is_url_proxiable?(url) do
url url
else else
encode_url(url) encode_url(url)
end end
end end
@spec is_url_proxiable?(String.t()) :: boolean()
def is_url_proxiable?(url) do
if local?(url) or whitelisted?(url) do
false
else
true
end
end
defp disabled?, do: !Config.get([:media_proxy, :enabled], false) defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())

View File

@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
with config <- Pleroma.Config.get([:media_proxy], []), with config <- Pleroma.Config.get([:media_proxy], []),
true <- Keyword.get(config, :enabled, false), true <- Keyword.get(config, :enabled, false),
{:ok, url} <- MediaProxy.decode_url(sig64, url64), {:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_deleted_urls, MediaProxy.in_deleted_urls(url)},
:ok <- filename_matches(params, conn.request_path, url) do :ok <- filename_matches(params, conn.request_path, url) do
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts)) ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else else
false -> error when error in [false, {:in_deleted_urls, true}] ->
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} -> {:error, :invalid_signature} ->

View File

@ -23,8 +23,25 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
Enum.map(attachment["url"], & &1["href"]) Enum.map(attachment["url"], & &1["href"])
end) end)
names = Enum.map(attachments, & &1["name"]) # find all objects for copies of the attachments, name and actor doesn't matter here
hrefs
|> fetch_objects
|> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|> Enum.reduce({[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
with 1 <- count do
{ids ++ [id], hrefs ++ [href]}
else
_ -> {ids ++ [id], hrefs}
end
end)
|> do_clean
{:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
defp do_clean({object_ids, attachment_urls}) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix = prefix =
@ -39,22 +56,25 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
"/" "/"
) )
# find all objects for copies of the attachments, name and actor doesn't matter here Enum.each(attachment_urls, fn href ->
object_ids_and_hrefs = href
from(o in Object, |> String.trim_leading("#{base_url}/#{prefix}")
where: |> uploader.delete_file()
fragment( end)
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data, delete_objects(object_ids)
o.data, end
^hrefs
) defp delete_objects([_ | _] = object_ids) do
) Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
# The query above can be time consumptive on large instances until we end
# refactor how uploads are stored
|> Repo.all(timeout: :infinity) defp delete_objects(_), do: :ok
# we should delete 1 object for any given attachment, but don't delete # we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it # files if there are more than 1 object for it
def prepare_objects(objects, actor, names) do
objects
|> Enum.reduce(%{}, fn %{ |> Enum.reduce(%{}, fn %{
id: id, id: id,
data: %{ data: %{
@ -76,31 +96,20 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
end end
end) end)
end) end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
{id, href}
else
_ -> {id, nil}
end
end)
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
from(o in Object, where: o.id in ^object_ids)
|> Repo.delete_all()
object_ids_and_hrefs
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|> Enum.map(&elem(&1, 1))
|> Pleroma.Web.MediaProxy.Invalidation.purge()
{:ok, :success}
end end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip} def fetch_objects(hrefs) do
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timeout: :infinity)
end
end end

View File

@ -0,0 +1,65 @@
defmodule Pleroma.Web.MediaProxy.InvalidationTest do
use ExUnit.Case
use Pleroma.Tests.Helpers
alias Pleroma.Config
alias Pleroma.Web.MediaProxy.Invalidation
import ExUnit.CaptureLog
import Mock
import Tesla.Mock
setup do: clear_config([:media_proxy])
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
describe "Invalidation.Http" do
test "perform request to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Http)
Config.put([Invalidation.Http], method: :purge, headers: [{"x-refresh", 1}])
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
mock(fn
%{
method: :purge,
url: "http://example.com/media/example.jpg",
headers: [{"x-refresh", 1}]
} ->
%Tesla.Env{status: 200}
end)
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
describe "Invalidation.Script" do
test "run script to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Script)
Config.put([Invalidation.Script], script_path: "purge-nginx")
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
with_mocks [{System, [], [cmd: fn _, _ -> {"ok", 0} end]}] do
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
end
end

View File

@ -5,6 +5,11 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Tesla.Mock import Tesla.Mock
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "logs hasn't error message when request is valid" do test "logs hasn't error message when request is valid" do
mock(fn mock(fn
%{method: :purge, url: "http://example.com/media/example.jpg"} -> %{method: :purge, url: "http://example.com/media/example.jpg"} ->
@ -14,8 +19,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
refute capture_log(fn -> refute capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example.jpg"]}
end) =~ "Error while cache purge" end) =~ "Error while cache purge"
end end
@ -28,8 +33,8 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example1.jpg"], ["http://example.com/media/example1.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example1.jpg"]}
end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg" end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg"
end end
end end

View File

@ -4,17 +4,24 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.ScriptTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "it logger error when script not found" do test "it logger error when script not found" do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{script_path: "./example"} script_path: "./example"
) == {:error, "\"%ErlangError{original: :enoent}\""} ) == {:error, "%ErlangError{original: :enoent}"}
end) =~ "Error while cache purge: \"%ErlangError{original: :enoent}\"" end) =~ "Error while cache purge: %ErlangError{original: :enoent}"
capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:error, "not found script path"} ) == {:error, "\"not found script path\""}
end)
end end
end end

View File

@ -10,6 +10,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
setup do: clear_config(:media_proxy) setup do: clear_config(:media_proxy)
setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base]) setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base])
setup do
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
:ok
end
test "it returns 404 when MediaProxy disabled", %{conn: conn} do test "it returns 404 when MediaProxy disabled", %{conn: conn} do
Config.put([:media_proxy, :enabled], false) Config.put([:media_proxy, :enabled], false)
@ -66,4 +71,16 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
assert %Plug.Conn{status: :success} = get(conn, url) assert %Plug.Conn{status: :success} = get(conn, url)
end end
end end
test "it returns 404 when url contains in deleted_urls cache", %{conn: conn} do
Config.put([:media_proxy, :enabled], true)
Config.put([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
url = Pleroma.Web.MediaProxy.encode_url("https://google.fn/test.png")
Pleroma.Web.MediaProxy.put_in_deleted_urls("https://google.fn/test.png")
with_mock Pleroma.ReverseProxy,
call: fn _conn, _url, _opts -> %Plug.Conn{status: :success} end do
assert %Plug.Conn{status: 404, resp_body: "Not Found"} = get(conn, url)
end
end
end end