2020-01-19 17:45:20 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 06:08:45 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2020-01-19 17:45:20 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
|
|
|
defmodule Pleroma.Workers.AttachmentsCleanupWorker do
|
|
|
|
import Ecto.Query
|
|
|
|
|
|
|
|
alias Pleroma.Object
|
|
|
|
alias Pleroma.Repo
|
|
|
|
|
|
|
|
use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup"
|
|
|
|
|
|
|
|
@impl Oban.Worker
|
|
|
|
def perform(
|
2020-01-28 01:52:35 +01:00
|
|
|
%{
|
|
|
|
"op" => "cleanup_attachments",
|
|
|
|
"object" => %{"data" => %{"attachment" => [_ | _] = attachments, "actor" => actor}}
|
2020-01-28 01:54:54 +01:00
|
|
|
},
|
2020-01-19 17:45:20 +01:00
|
|
|
_job
|
|
|
|
) do
|
|
|
|
hrefs =
|
|
|
|
Enum.flat_map(attachments, fn attachment ->
|
|
|
|
Enum.map(attachment["url"], & &1["href"])
|
|
|
|
end)
|
|
|
|
|
|
|
|
names = Enum.map(attachments, & &1["name"])
|
|
|
|
|
|
|
|
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
|
|
|
|
|
2020-05-15 20:34:46 +02:00
|
|
|
prefix =
|
|
|
|
case Pleroma.Config.get([Pleroma.Upload, :base_url]) do
|
|
|
|
nil -> "media"
|
|
|
|
_ -> ""
|
|
|
|
end
|
|
|
|
|
|
|
|
base_url =
|
|
|
|
String.trim_trailing(
|
|
|
|
Pleroma.Config.get([Pleroma.Upload, :base_url], Pleroma.Web.base_url()),
|
|
|
|
"/"
|
|
|
|
)
|
|
|
|
|
2020-01-19 17:45:20 +01:00
|
|
|
# find all objects for copies of the attachments, name and actor doesn't matter here
|
2020-05-15 20:34:46 +02:00
|
|
|
object_ids_and_hrefs =
|
2020-01-19 17:45:20 +01:00
|
|
|
from(o in Object,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
|
|
|
|
o.data,
|
|
|
|
o.data,
|
|
|
|
^hrefs
|
|
|
|
)
|
|
|
|
)
|
2020-01-19 20:09:47 +01:00
|
|
|
# The query above can be time consumptive on large instances until we
|
|
|
|
# refactor how uploads are stored
|
2020-01-27 02:01:50 +01:00
|
|
|
|> Repo.all(timeout: :infinity)
|
2020-01-19 20:09:47 +01:00
|
|
|
# we should delete 1 object for any given attachment, but don't delete
|
|
|
|
# files if there are more than 1 object for it
|
2020-01-19 17:45:20 +01:00
|
|
|
|> Enum.reduce(%{}, fn %{
|
|
|
|
id: id,
|
|
|
|
data: %{
|
|
|
|
"url" => [%{"href" => href}],
|
|
|
|
"actor" => obj_actor,
|
|
|
|
"name" => name
|
|
|
|
}
|
|
|
|
},
|
|
|
|
acc ->
|
|
|
|
Map.update(acc, href, %{id: id, count: 1}, fn val ->
|
|
|
|
case obj_actor == actor and name in names do
|
|
|
|
true ->
|
|
|
|
# set id of the actor's object that will be deleted
|
|
|
|
%{val | id: id, count: val.count + 1}
|
|
|
|
|
|
|
|
false ->
|
|
|
|
# another actor's object, just increase count to not delete file
|
|
|
|
%{val | count: val.count + 1}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end)
|
|
|
|
|> Enum.map(fn {href, %{id: id, count: count}} ->
|
|
|
|
# only delete files that have single instance
|
|
|
|
with 1 <- count do
|
2020-05-15 20:34:46 +02:00
|
|
|
href
|
|
|
|
|> String.trim_leading("#{base_url}/#{prefix}")
|
|
|
|
|> uploader.delete_file()
|
2020-01-19 17:45:20 +01:00
|
|
|
|
2020-05-15 20:34:46 +02:00
|
|
|
{id, href}
|
|
|
|
else
|
|
|
|
_ -> {id, nil}
|
2020-01-19 17:45:20 +01:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
2020-05-15 20:34:46 +02:00
|
|
|
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
|
|
|
|
|
|
|
|
from(o in Object, where: o.id in ^object_ids)
|
2020-01-19 17:45:20 +01:00
|
|
|
|> Repo.delete_all()
|
2020-05-15 20:34:46 +02:00
|
|
|
|
|
|
|
object_ids_and_hrefs
|
|
|
|
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|
|
|
|
|> Enum.map(&elem(&1, 1))
|
|
|
|
|> Pleroma.Web.MediaProxy.Invalidation.purge()
|
|
|
|
|
|
|
|
{:ok, :success}
|
2020-01-19 17:45:20 +01:00
|
|
|
end
|
|
|
|
|
2020-05-15 20:34:46 +02:00
|
|
|
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
|
2020-01-19 17:45:20 +01:00
|
|
|
end
|