fix invalidates media url's

This commit is contained in:
Maksim Pechnikov 2020-06-14 21:02:57 +03:00
commit 2e8a236cef
16 changed files with 346 additions and 114 deletions

View file

@ -148,7 +148,8 @@ defmodule Pleroma.Application do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500)
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("deleted_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
]
end

View file

@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
import Pleroma.Web.Gettext
require Logger
alias Pleroma.Web.MediaProxy
@behaviour Plug
# no slashes
@path "media"
@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
%{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"")
conn
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
conn ->
conn
@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false),
{:ok, get_method} <- uploader.get_file(file) do
{:ok, get_method} <- uploader.get_file(file),
false <- media_is_deleted(conn, get_method) do
get_media(conn, get_method, proxy_remote, opts)
else
_ ->
@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do
def call(conn, _opts), do: conn
defp media_is_deleted(%{request_path: path} = _conn, {:static_dir, _}) do
MediaProxy.in_deleted_urls(Pleroma.Web.base_url() <> path)
end
defp media_is_deleted(_, {:url, url}), do: MediaProxy.in_deleted_urls(url)
defp media_is_deleted(_, _), do: false
defp get_media(conn, {:static_dir, directory}, _, opts) do
static_opts =
Map.get(opts, :static_plug_opts)

View file

@ -5,22 +5,33 @@
defmodule Pleroma.Web.MediaProxy.Invalidation do
@moduledoc false
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()}
@callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
alias Pleroma.Config
alias Pleroma.Web.MediaProxy
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()}
@spec enabled?() :: boolean()
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
def purge(urls) do
[:media_proxy, :invalidation, :enabled]
|> Config.get()
|> do_purge(urls)
prepared_urls = prepare_urls(urls)
if enabled?() do
do_purge(prepared_urls)
else
{:ok, prepared_urls}
end
end
defp do_purge(true, urls) do
defp do_purge(urls) do
provider = Config.get([:media_proxy, :invalidation, :provider])
options = Config.get(provider)
provider.purge(urls, options)
provider.purge(urls, Config.get(provider))
end
defp do_purge(_, _), do: :ok
def prepare_urls(urls) do
urls
|> List.wrap()
|> Enum.map(&MediaProxy.url(&1))
end
end

View file

@ -10,9 +10,9 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
@impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, opts) do
method = Map.get(opts, :method, :purge)
headers = Map.get(opts, :headers, [])
options = Map.get(opts, :options, [])
method = Keyword.get(opts, :method, :purge)
headers = Keyword.get(opts, :headers, [])
options = Keyword.get(opts, :options, [])
Logger.debug("Running cache purge: #{inspect(urls)}")
@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
end
end)
{:ok, "success"}
{:ok, urls}
end
defp do_purge(method, url, headers, options) do

View file

@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
require Logger
@impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, %{script_path: script_path} = _options) do
def purge(urls, opts) do
args =
urls
|> List.wrap()
|> Enum.uniq()
|> Enum.join(" ")
path = Path.expand(script_path)
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
case do_purge(path, [args]) do
{result, exit_status} when exit_status > 0 ->
Logger.error("Error while cache purge: #{inspect(result)}")
{:error, inspect(result)}
_ ->
{:ok, "success"}
end
opts
|> Keyword.get(:script_path, nil)
|> do_purge([args])
|> handle_result(urls)
end
def purge(_, _), do: {:error, "not found script path"}
defp do_purge(path, args) do
defp do_purge(script_path, args) when is_binary(script_path) do
path = Path.expand(script_path)
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
System.cmd(path, args)
rescue
error -> {inspect(error), 1}
error -> error
end
defp do_purge(_, _), do: {:error, "not found script path"}
defp handle_result({_result, 0}, urls), do: {:ok, urls}
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
defp handle_result(error, _) do
Logger.error("Error while cache purge: #{inspect(error)}")
{:error, inspect(error)}
end
end

View file

@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config
alias Pleroma.Upload
alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@base64_opts [padding: false]
@spec in_deleted_urls(String.t()) :: boolean()
def in_deleted_urls(url), do: elem(Cachex.exists?(:deleted_urls_cache, url(url)), 1)
def remove_from_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
end)
end
def remove_from_deleted_urls(url) when is_binary(url) do
Cachex.del(:deleted_urls_cache, url(url))
end
def put_in_deleted_urls(urls) when is_list(urls) do
Cachex.execute!(:deleted_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
end)
end
def put_in_deleted_urls(url) when is_binary(url) do
Cachex.put(:deleted_urls_cache, url(url), true)
end
def url(url) when is_nil(url) or url == "", do: nil
def url("/" <> _ = url), do: url
def url(url) do
if disabled?() or local?(url) or whitelisted?(url) do
if disabled?() or not is_url_proxiable?(url) do
url
else
encode_url(url)
end
end
@spec is_url_proxiable?(String.t()) :: boolean()
def is_url_proxiable?(url) do
if local?(url) or whitelisted?(url) do
false
else
true
end
end
defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())

View file

@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
with config <- Pleroma.Config.get([:media_proxy], []),
true <- Keyword.get(config, :enabled, false),
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_deleted_urls, MediaProxy.in_deleted_urls(url)},
:ok <- filename_matches(params, conn.request_path, url) do
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else
false ->
error when error in [false, {:in_deleted_urls, true}] ->
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} ->

View file

@ -23,8 +23,25 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
Enum.map(attachment["url"], & &1["href"])
end)
names = Enum.map(attachments, & &1["name"])
# find all objects for copies of the attachments, name and actor doesn't matter here
hrefs
|> fetch_objects
|> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|> Enum.reduce({[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
with 1 <- count do
{ids ++ [id], hrefs ++ [href]}
else
_ -> {ids ++ [id], hrefs}
end
end)
|> do_clean
{:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
defp do_clean({object_ids, attachment_urls}) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix =
@ -39,68 +56,60 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
"/"
)
# find all objects for copies of the attachments, name and actor doesn't matter here
object_ids_and_hrefs =
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timeout: :infinity)
# we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it
|> Enum.reduce(%{}, fn %{
id: id,
data: %{
"url" => [%{"href" => href}],
"actor" => obj_actor,
"name" => name
}
},
acc ->
Map.update(acc, href, %{id: id, count: 1}, fn val ->
case obj_actor == actor and name in names do
true ->
# set id of the actor's object that will be deleted
%{val | id: id, count: val.count + 1}
Enum.each(attachment_urls, fn href ->
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
end)
false ->
# another actor's object, just increase count to not delete file
%{val | count: val.count + 1}
end
end)
end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
{id, href}
else
_ -> {id, nil}
end
end)
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
from(o in Object, where: o.id in ^object_ids)
|> Repo.delete_all()
object_ids_and_hrefs
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|> Enum.map(&elem(&1, 1))
|> Pleroma.Web.MediaProxy.Invalidation.purge()
{:ok, :success}
delete_objects(object_ids)
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
defp delete_objects([_ | _] = object_ids) do
Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
end
defp delete_objects(_), do: :ok
# we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it
def prepare_objects(objects, actor, names) do
objects
|> Enum.reduce(%{}, fn %{
id: id,
data: %{
"url" => [%{"href" => href}],
"actor" => obj_actor,
"name" => name
}
},
acc ->
Map.update(acc, href, %{id: id, count: 1}, fn val ->
case obj_actor == actor and name in names do
true ->
# set id of the actor's object that will be deleted
%{val | id: id, count: val.count + 1}
false ->
# another actor's object, just increase count to not delete file
%{val | count: val.count + 1}
end
end)
end)
end
def fetch_objects(hrefs) do
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timeout: :infinity)
end
end