pleroma/lib/pleroma/user/backup.ex

388 lines
10 KiB
Elixir
Raw Normal View History

2020-08-19 13:31:33 +02:00
# Pleroma: A lightweight social networking server
2022-02-26 07:11:42 +01:00
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
2020-08-19 13:31:33 +02:00
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.Backup do
2020-09-02 18:21:33 +02:00
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
2020-09-26 19:45:03 +02:00
import Pleroma.Web.Gettext
2020-09-02 18:21:33 +02:00
2022-12-18 21:55:52 +01:00
require Logger
2020-09-10 18:53:06 +02:00
require Pleroma.Constants
2020-08-19 13:31:33 +02:00
alias Pleroma.Activity
alias Pleroma.Bookmark
2020-09-02 18:21:33 +02:00
alias Pleroma.Repo
2020-08-19 13:31:33 +02:00
alias Pleroma.User
2022-12-16 08:56:32 +01:00
alias Pleroma.User.Backup.State
2020-08-19 13:31:33 +02:00
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView
2020-09-04 19:48:52 +02:00
alias Pleroma.Workers.BackupWorker
2020-08-19 13:31:33 +02:00
2020-09-02 18:21:33 +02:00
schema "backups" do
field(:content_type, :string)
field(:file_name, :string)
field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false)
2022-12-16 08:56:32 +01:00
field(:state, State, default: :invalid)
field(:processed_number, :integer, default: 0)
2020-09-02 18:21:33 +02:00
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
2023-12-10 16:46:25 +01:00
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
2020-09-20 19:06:16 +02:00
def create(user, admin_id \\ nil) do
with :ok <- validate_limit(user, admin_id),
2020-09-02 18:21:33 +02:00
{:ok, backup} <- user |> new() |> Repo.insert() do
2020-09-20 19:06:16 +02:00
BackupWorker.process(backup, admin_id)
2020-09-02 18:21:33 +02:00
end
end
def new(user) do
rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false)
datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now())
name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip"
%__MODULE__{
user_id: user.id,
content_type: "application/zip",
2022-12-16 08:56:32 +01:00
file_name: name,
state: :pending
2020-09-02 18:21:33 +02:00
}
end
2020-09-04 19:48:52 +02:00
def delete(backup) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
2020-09-26 19:50:31 +02:00
with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do
2020-09-04 19:48:52 +02:00
Repo.delete(backup)
end
end
2020-09-20 19:06:16 +02:00
defp validate_limit(_user, admin_id) when is_binary(admin_id), do: :ok
defp validate_limit(user, nil) do
2020-09-02 18:21:33 +02:00
case get_last(user.id) do
%__MODULE__{inserted_at: inserted_at} ->
days = Pleroma.Config.get([__MODULE__, :limit_days])
2020-09-02 18:21:33 +02:00
diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days)
if diff > days do
:ok
else
2020-09-26 19:45:03 +02:00
{:error,
dngettext(
"errors",
"Last export was less than a day ago",
"Last export was less than %{days} days ago",
days,
days: days
)}
2020-09-02 18:21:33 +02:00
end
nil ->
:ok
end
end
def get_last(user_id) do
__MODULE__
|> where(user_id: ^user_id)
|> order_by(desc: :id)
|> limit(1)
|> Repo.one()
end
2020-09-08 23:04:00 +02:00
def list(%User{id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
|> order_by(desc: :id)
|> Repo.all()
end
2020-09-02 19:45:22 +02:00
def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
|> where([b], b.id != ^latest_id)
2020-09-04 19:48:52 +02:00
|> Repo.all()
|> Enum.each(&BackupWorker.delete/1)
2020-09-02 19:45:22 +02:00
end
def get(id), do: Repo.get(__MODULE__, id)
2022-12-16 08:56:32 +01:00
defp set_state(backup, state, processed_number \\ nil) do
struct =
%{state: state}
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
backup
|> cast(struct, [:state, :processed_number])
|> Repo.update()
end
def process(
%__MODULE__{} = backup,
processor_module \\ __MODULE__.Processor
) do
2022-12-16 08:56:32 +01:00
set_state(backup, :running, 0)
current_pid = self()
2022-12-18 21:55:52 +01:00
task =
Task.Supervisor.async_nolink(
Pleroma.TaskSupervisor,
processor_module,
2022-12-18 21:55:52 +01:00
:do_process,
[backup, current_pid]
)
2022-12-16 08:56:32 +01:00
2022-12-18 21:55:52 +01:00
wait_backup(backup, backup.processed_number, task)
2022-12-16 08:56:32 +01:00
end
2022-12-18 21:55:52 +01:00
defp wait_backup(backup, current_processed, task) do
wait_time = @config_impl.get([__MODULE__, :process_wait_time])
2022-12-24 06:17:17 +01:00
2022-12-16 08:56:32 +01:00
receive do
{:progress, new_processed} ->
total_processed = current_processed + new_processed
2022-12-16 20:13:46 +01:00
set_state(backup, :running, total_processed)
2022-12-18 21:55:52 +01:00
wait_backup(backup, total_processed, task)
{:DOWN, _ref, _proc, _pid, reason} ->
backup = get(backup.id)
if reason != :normal do
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
{:ok, backup} = set_state(backup, :failed)
2022-12-16 08:56:32 +01:00
2022-12-24 06:17:17 +01:00
cleanup(backup)
2022-12-18 21:55:52 +01:00
{:error,
%{
backup: backup,
reason: :exit,
details: reason
}}
else
{:ok, backup}
end
2022-12-16 08:56:32 +01:00
after
2022-12-24 06:17:17 +01:00
wait_time ->
2022-12-18 21:55:52 +01:00
Logger.error(
2022-12-24 06:17:17 +01:00
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
2022-12-18 21:55:52 +01:00
)
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
{:ok, backup} = set_state(backup, :failed)
2022-12-24 06:17:17 +01:00
cleanup(backup)
2022-12-18 21:55:52 +01:00
{:error,
%{
backup: backup,
reason: :timeout
}}
2020-09-02 18:21:33 +02:00
end
end
2020-08-19 13:31:33 +02:00
2020-08-24 18:59:57 +02:00
@files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
2022-12-18 21:55:52 +01:00
def export(%__MODULE__{} = backup, caller_pid) do
2020-09-02 18:21:33 +02:00
backup = Repo.preload(backup, :user)
2022-12-24 06:17:17 +01:00
dir = backup_tempdir(backup)
2020-08-24 18:59:57 +02:00
2020-09-02 18:21:33 +02:00
with :ok <- File.mkdir(dir),
2022-12-16 08:56:32 +01:00
:ok <- actor(dir, backup.user, caller_pid),
:ok <- statuses(dir, backup.user, caller_pid),
:ok <- likes(dir, backup.user, caller_pid),
:ok <- bookmarks(dir, backup.user, caller_pid),
2020-09-02 18:21:33 +02:00
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do
{:ok, to_string(zip_path)}
2020-08-31 18:31:21 +02:00
end
end
2020-09-16 21:21:13 +02:00
def dir(name) do
dir = Pleroma.Config.get([__MODULE__, :dir]) || System.tmp_dir!()
Path.join(dir, name)
end
2020-09-02 18:21:33 +02:00
def upload(%__MODULE__{} = backup, zip_path) do
2020-08-31 18:31:21 +02:00
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
upload = %Pleroma.Upload{
2020-09-02 18:21:33 +02:00
name: backup.file_name,
2020-08-31 18:31:21 +02:00
tempfile: zip_path,
2020-09-02 18:21:33 +02:00
content_type: backup.content_type,
2020-09-26 19:50:31 +02:00
path: Path.join("backups", backup.file_name)
2020-08-31 18:31:21 +02:00
}
2020-08-31 21:07:14 +02:00
with {:ok, _} <- Pleroma.Uploaders.Uploader.put_file(uploader, upload),
:ok <- File.rm(zip_path) do
2020-08-31 18:31:21 +02:00
{:ok, upload}
2020-08-19 13:31:33 +02:00
end
end
2022-12-16 08:56:32 +01:00
defp actor(dir, user, caller_pid) do
2020-08-19 13:31:33 +02:00
with {:ok, json} <-
UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do
2022-12-16 08:56:32 +01:00
send(caller_pid, {:progress, 1})
2020-09-26 19:50:31 +02:00
File.write(Path.join(dir, "actor.json"), json)
2020-08-19 13:31:33 +02:00
end
end
defp write_header(file, name) do
IO.write(
file,
"""
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "#{name}.json",
"type": "OrderedCollection",
"orderedItems": [
2020-09-10 18:53:06 +02:00
2020-08-19 13:31:33 +02:00
"""
)
end
2022-12-24 06:17:17 +01:00
defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
defp backup_tempdir(backup) do
name = String.trim_trailing(backup.file_name, ".zip")
dir(name)
end
defp cleanup(backup) do
dir = backup_tempdir(backup)
File.rm_rf(dir)
end
2022-12-16 08:56:32 +01:00
defp write(query, dir, name, fun, caller_pid) do
2020-09-26 19:50:31 +02:00
path = Path.join(dir, "#{name}.json")
2020-08-19 13:31:33 +02:00
2022-12-24 06:17:17 +01:00
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
2020-08-19 13:31:33 +02:00
with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do
total =
query
2022-12-24 06:17:17 +01:00
|> Pleroma.Repo.chunk_stream(chunk_size, _returns_as = :one, timeout: :infinity)
|> Enum.reduce(0, fn i, acc ->
2022-12-18 21:55:52 +01:00
with {:ok, data} <-
(try do
fun.(i)
rescue
e -> {:error, e}
end),
{:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do
2022-12-24 06:17:17 +01:00
if should_report?(acc + 1, chunk_size) do
send(caller_pid, {:progress, chunk_size})
2022-12-16 08:56:32 +01:00
end
acc + 1
else
2022-12-18 21:55:52 +01:00
{:error, e} ->
Logger.warning(
2022-12-18 21:55:52 +01:00
"Error processing backup item: #{inspect(e)}\n The item is: #{inspect(i)}"
)
acc
_ ->
acc
end
end)
2020-08-19 13:31:33 +02:00
2022-12-24 06:17:17 +01:00
send(caller_pid, {:progress, rem(total, chunk_size)})
2022-12-16 08:56:32 +01:00
2020-08-19 13:31:33 +02:00
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file)
end
end
end
2022-12-16 08:56:32 +01:00
defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
2020-08-19 13:31:33 +02:00
Bookmark
|> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
2022-12-16 08:56:32 +01:00
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
2020-08-19 13:31:33 +02:00
end
2022-12-16 08:56:32 +01:00
defp likes(dir, user, caller_pid) do
2020-08-19 13:31:33 +02:00
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
2022-12-16 08:56:32 +01:00
|> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
2020-08-19 13:31:33 +02:00
end
2022-12-16 08:56:32 +01:00
defp statuses(dir, user, caller_pid) do
2020-08-19 13:31:33 +02:00
opts =
%{}
|> Map.put(:type, ["Create", "Announce"])
2020-09-10 18:53:06 +02:00
|> Map.put(:actor_id, user.ap_id)
2020-08-19 13:31:33 +02:00
2020-09-10 18:53:06 +02:00
[
[Pleroma.Constants.as_public(), user.ap_id],
User.following(user),
Pleroma.List.memberships(user)
]
2020-08-19 13:31:33 +02:00
|> Enum.concat()
|> ActivityPub.fetch_activities_query(opts)
2022-12-16 08:56:32 +01:00
|> write(
dir,
"outbox",
fn a ->
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
end,
caller_pid
)
2020-08-19 13:31:33 +02:00
end
end
defmodule Pleroma.User.Backup.ProcessorAPI do
@callback do_process(%Pleroma.User.Backup{}, pid()) ::
{:ok, %Pleroma.User.Backup{}} | {:error, any()}
end
defmodule Pleroma.User.Backup.Processor do
@behaviour Pleroma.User.Backup.ProcessorAPI
alias Pleroma.Repo
alias Pleroma.User.Backup
import Ecto.Changeset
@impl true
def do_process(backup, current_pid) do
with {:ok, zip_file} <- Backup.export(backup, current_pid),
{:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- Backup.upload(backup, zip_file) do
backup
|> cast(
%{
file_size: size,
processed: true,
state: :complete
},
[:file_size, :processed, :state]
)
|> Repo.update()
end
end
end