Merge branch 'issue/1855' into 'develop'
#1855 MediaProxy cache invalidation via Admin API See merge request pleroma/pleroma!2648
This commit is contained in:
commit
f928267773
22 changed files with 740 additions and 118 deletions
|
|
@ -148,7 +148,8 @@ defmodule Pleroma.Application do
|
|||
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
|
||||
build_cachex("web_resp", limit: 2500),
|
||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||
build_cachex("failed_proxy_url", limit: 2500)
|
||||
build_cachex("failed_proxy_url", limit: 2500),
|
||||
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
|||
import Pleroma.Web.Gettext
|
||||
require Logger
|
||||
|
||||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
@behaviour Plug
|
||||
# no slashes
|
||||
@path "media"
|
||||
|
|
@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
|||
%{query_params: %{"name" => name}} = conn ->
|
||||
name = String.replace(name, "\"", "\\\"")
|
||||
|
||||
conn
|
||||
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
|
||||
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|
||||
|
||||
conn ->
|
||||
conn
|
||||
|
|
@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
|||
|
||||
with uploader <- Keyword.fetch!(config, :uploader),
|
||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
||||
{:ok, get_method} <- uploader.get_file(file) do
|
||||
{:ok, get_method} <- uploader.get_file(file),
|
||||
false <- media_is_banned(conn, get_method) do
|
||||
get_media(conn, get_method, proxy_remote, opts)
|
||||
else
|
||||
_ ->
|
||||
|
|
@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
|||
|
||||
def call(conn, _opts), do: conn
|
||||
|
||||
defp media_is_banned(%{request_path: path} = _conn, {:static_dir, _}) do
|
||||
MediaProxy.in_banned_urls(Pleroma.Web.base_url() <> path)
|
||||
end
|
||||
|
||||
defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url)
|
||||
|
||||
defp media_is_banned(_, _), do: false
|
||||
|
||||
defp get_media(conn, {:static_dir, directory}, _, opts) do
|
||||
static_opts =
|
||||
Map.get(opts, :static_plug_opts)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,63 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do
|
||||
use Pleroma.Web, :controller
|
||||
|
||||
alias Pleroma.Plugs.OAuthScopesPlug
|
||||
alias Pleroma.Web.ApiSpec.Admin, as: Spec
|
||||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||
|
||||
plug(
|
||||
OAuthScopesPlug,
|
||||
%{scopes: ["read:media_proxy_caches"], admin: true} when action in [:index]
|
||||
)
|
||||
|
||||
plug(
|
||||
OAuthScopesPlug,
|
||||
%{scopes: ["write:media_proxy_caches"], admin: true} when action in [:purge, :delete]
|
||||
)
|
||||
|
||||
action_fallback(Pleroma.Web.AdminAPI.FallbackController)
|
||||
|
||||
defdelegate open_api_operation(action), to: Spec.MediaProxyCacheOperation
|
||||
|
||||
def index(%{assigns: %{user: _}} = conn, params) do
|
||||
cursor =
|
||||
:banned_urls_cache
|
||||
|> :ets.table([{:traverse, {:select, Cachex.Query.create(true, :key)}}])
|
||||
|> :qlc.cursor()
|
||||
|
||||
urls =
|
||||
case params.page do
|
||||
1 ->
|
||||
:qlc.next_answers(cursor, params.page_size)
|
||||
|
||||
_ ->
|
||||
:qlc.next_answers(cursor, (params.page - 1) * params.page_size)
|
||||
:qlc.next_answers(cursor, params.page_size)
|
||||
end
|
||||
|
||||
:qlc.delete_cursor(cursor)
|
||||
|
||||
render(conn, "index.json", urls: urls)
|
||||
end
|
||||
|
||||
def delete(%{assigns: %{user: _}, body_params: %{urls: urls}} = conn, _) do
|
||||
MediaProxy.remove_from_banned_urls(urls)
|
||||
render(conn, "index.json", urls: urls)
|
||||
end
|
||||
|
||||
def purge(%{assigns: %{user: _}, body_params: %{urls: urls, ban: ban}} = conn, _) do
|
||||
MediaProxy.Invalidation.purge(urls)
|
||||
|
||||
if ban do
|
||||
MediaProxy.put_in_banned_urls(urls)
|
||||
end
|
||||
|
||||
render(conn, "index.json", urls: urls)
|
||||
end
|
||||
end
|
||||
11
lib/pleroma/web/admin_api/views/media_proxy_cache_view.ex
Normal file
11
lib/pleroma/web/admin_api/views/media_proxy_cache_view.ex
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.AdminAPI.MediaProxyCacheView do
|
||||
use Pleroma.Web, :view
|
||||
|
||||
def render("index.json", %{urls: urls}) do
|
||||
%{urls: urls}
|
||||
end
|
||||
end
|
||||
|
|
@ -0,0 +1,109 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ApiSpec.Admin.MediaProxyCacheOperation do
|
||||
alias OpenApiSpex.Operation
|
||||
alias OpenApiSpex.Schema
|
||||
alias Pleroma.Web.ApiSpec.Schemas.ApiError
|
||||
|
||||
import Pleroma.Web.ApiSpec.Helpers
|
||||
|
||||
def open_api_operation(action) do
|
||||
operation = String.to_existing_atom("#{action}_operation")
|
||||
apply(__MODULE__, operation, [])
|
||||
end
|
||||
|
||||
def index_operation do
|
||||
%Operation{
|
||||
tags: ["Admin", "MediaProxyCache"],
|
||||
summary: "Fetch a paginated list of all banned MediaProxy URLs in Cachex",
|
||||
operationId: "AdminAPI.MediaProxyCacheController.index",
|
||||
security: [%{"oAuth" => ["read:media_proxy_caches"]}],
|
||||
parameters: [
|
||||
Operation.parameter(
|
||||
:page,
|
||||
:query,
|
||||
%Schema{type: :integer, default: 1},
|
||||
"Page"
|
||||
),
|
||||
Operation.parameter(
|
||||
:page_size,
|
||||
:query,
|
||||
%Schema{type: :integer, default: 50},
|
||||
"Number of statuses to return"
|
||||
)
|
||||
],
|
||||
responses: %{
|
||||
200 => success_response()
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def delete_operation do
|
||||
%Operation{
|
||||
tags: ["Admin", "MediaProxyCache"],
|
||||
summary: "Remove a banned MediaProxy URL from Cachex",
|
||||
operationId: "AdminAPI.MediaProxyCacheController.delete",
|
||||
security: [%{"oAuth" => ["write:media_proxy_caches"]}],
|
||||
requestBody:
|
||||
request_body(
|
||||
"Parameters",
|
||||
%Schema{
|
||||
type: :object,
|
||||
required: [:urls],
|
||||
properties: %{
|
||||
urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}}
|
||||
}
|
||||
},
|
||||
required: true
|
||||
),
|
||||
responses: %{
|
||||
200 => success_response(),
|
||||
400 => Operation.response("Error", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
def purge_operation do
|
||||
%Operation{
|
||||
tags: ["Admin", "MediaProxyCache"],
|
||||
summary: "Purge and optionally ban a MediaProxy URL",
|
||||
operationId: "AdminAPI.MediaProxyCacheController.purge",
|
||||
security: [%{"oAuth" => ["write:media_proxy_caches"]}],
|
||||
requestBody:
|
||||
request_body(
|
||||
"Parameters",
|
||||
%Schema{
|
||||
type: :object,
|
||||
required: [:urls],
|
||||
properties: %{
|
||||
urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}},
|
||||
ban: %Schema{type: :boolean, default: true}
|
||||
}
|
||||
},
|
||||
required: true
|
||||
),
|
||||
responses: %{
|
||||
200 => success_response(),
|
||||
400 => Operation.response("Error", "application/json", ApiError)
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp success_response do
|
||||
Operation.response("Array of banned MediaProxy URLs in Cachex", "application/json", %Schema{
|
||||
type: :object,
|
||||
properties: %{
|
||||
urls: %Schema{
|
||||
type: :array,
|
||||
items: %Schema{
|
||||
type: :string,
|
||||
format: :uri,
|
||||
description: "MediaProxy URLs"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
end
|
||||
end
|
||||
|
|
@ -5,22 +5,34 @@
|
|||
defmodule Pleroma.Web.MediaProxy.Invalidation do
|
||||
@moduledoc false
|
||||
|
||||
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()}
|
||||
@callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()}
|
||||
@spec enabled?() :: boolean()
|
||||
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
|
||||
|
||||
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
|
||||
def purge(urls) do
|
||||
[:media_proxy, :invalidation, :enabled]
|
||||
|> Config.get()
|
||||
|> do_purge(urls)
|
||||
prepared_urls = prepare_urls(urls)
|
||||
|
||||
if enabled?() do
|
||||
do_purge(prepared_urls)
|
||||
else
|
||||
{:ok, prepared_urls}
|
||||
end
|
||||
end
|
||||
|
||||
defp do_purge(true, urls) do
|
||||
defp do_purge(urls) do
|
||||
provider = Config.get([:media_proxy, :invalidation, :provider])
|
||||
options = Config.get(provider)
|
||||
provider.purge(urls, options)
|
||||
end
|
||||
|
||||
defp do_purge(_, _), do: :ok
|
||||
def prepare_urls(urls) do
|
||||
urls
|
||||
|> List.wrap()
|
||||
|> Enum.map(&MediaProxy.url/1)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,10 +9,10 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
|
|||
require Logger
|
||||
|
||||
@impl Pleroma.Web.MediaProxy.Invalidation
|
||||
def purge(urls, opts) do
|
||||
method = Map.get(opts, :method, :purge)
|
||||
headers = Map.get(opts, :headers, [])
|
||||
options = Map.get(opts, :options, [])
|
||||
def purge(urls, opts \\ []) do
|
||||
method = Keyword.get(opts, :method, :purge)
|
||||
headers = Keyword.get(opts, :headers, [])
|
||||
options = Keyword.get(opts, :options, [])
|
||||
|
||||
Logger.debug("Running cache purge: #{inspect(urls)}")
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
|
|||
end
|
||||
end)
|
||||
|
||||
{:ok, "success"}
|
||||
{:ok, urls}
|
||||
end
|
||||
|
||||
defp do_purge(method, url, headers, options) do
|
||||
|
|
|
|||
|
|
@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
|
|||
require Logger
|
||||
|
||||
@impl Pleroma.Web.MediaProxy.Invalidation
|
||||
def purge(urls, %{script_path: script_path} = _options) do
|
||||
def purge(urls, opts \\ []) do
|
||||
args =
|
||||
urls
|
||||
|> List.wrap()
|
||||
|> Enum.uniq()
|
||||
|> Enum.join(" ")
|
||||
|
||||
path = Path.expand(script_path)
|
||||
|
||||
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
|
||||
|
||||
case do_purge(path, [args]) do
|
||||
{result, exit_status} when exit_status > 0 ->
|
||||
Logger.error("Error while cache purge: #{inspect(result)}")
|
||||
{:error, inspect(result)}
|
||||
|
||||
_ ->
|
||||
{:ok, "success"}
|
||||
end
|
||||
opts
|
||||
|> Keyword.get(:script_path)
|
||||
|> do_purge([args])
|
||||
|> handle_result(urls)
|
||||
end
|
||||
|
||||
def purge(_, _), do: {:error, "not found script path"}
|
||||
|
||||
defp do_purge(path, args) do
|
||||
defp do_purge(script_path, args) when is_binary(script_path) do
|
||||
path = Path.expand(script_path)
|
||||
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
|
||||
System.cmd(path, args)
|
||||
rescue
|
||||
error -> {inspect(error), 1}
|
||||
error -> error
|
||||
end
|
||||
|
||||
defp do_purge(_, _), do: {:error, "not found script path"}
|
||||
|
||||
defp handle_result({_result, 0}, urls), do: {:ok, urls}
|
||||
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
|
||||
|
||||
defp handle_result(error, _) do
|
||||
Logger.error("Error while cache purge: #{inspect(error)}")
|
||||
{:error, inspect(error)}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
|
|||
alias Pleroma.Config
|
||||
alias Pleroma.Upload
|
||||
alias Pleroma.Web
|
||||
alias Pleroma.Web.MediaProxy.Invalidation
|
||||
|
||||
@base64_opts [padding: false]
|
||||
|
||||
@spec in_banned_urls(String.t()) :: boolean()
|
||||
def in_banned_urls(url), do: elem(Cachex.exists?(:banned_urls_cache, url(url)), 1)
|
||||
|
||||
def remove_from_banned_urls(urls) when is_list(urls) do
|
||||
Cachex.execute!(:banned_urls_cache, fn cache ->
|
||||
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
|
||||
end)
|
||||
end
|
||||
|
||||
def remove_from_banned_urls(url) when is_binary(url) do
|
||||
Cachex.del(:banned_urls_cache, url(url))
|
||||
end
|
||||
|
||||
def put_in_banned_urls(urls) when is_list(urls) do
|
||||
Cachex.execute!(:banned_urls_cache, fn cache ->
|
||||
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
|
||||
end)
|
||||
end
|
||||
|
||||
def put_in_banned_urls(url) when is_binary(url) do
|
||||
Cachex.put(:banned_urls_cache, url(url), true)
|
||||
end
|
||||
|
||||
def url(url) when is_nil(url) or url == "", do: nil
|
||||
def url("/" <> _ = url), do: url
|
||||
|
||||
def url(url) do
|
||||
if disabled?() or local?(url) or whitelisted?(url) do
|
||||
if disabled?() or not url_proxiable?(url) do
|
||||
url
|
||||
else
|
||||
encode_url(url)
|
||||
end
|
||||
end
|
||||
|
||||
@spec url_proxiable?(String.t()) :: boolean()
|
||||
def url_proxiable?(url) do
|
||||
if local?(url) or whitelisted?(url) do
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
|
||||
|
||||
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
|
||||
|
|
|
|||
|
|
@ -14,10 +14,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
|
|||
with config <- Pleroma.Config.get([:media_proxy], []),
|
||||
true <- Keyword.get(config, :enabled, false),
|
||||
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
|
||||
{_, false} <- {:in_banned_urls, MediaProxy.in_banned_urls(url)},
|
||||
:ok <- filename_matches(params, conn.request_path, url) do
|
||||
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
|
||||
else
|
||||
false ->
|
||||
error when error in [false, {:in_banned_urls, true}] ->
|
||||
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
|
||||
|
||||
{:error, :invalid_signature} ->
|
||||
|
|
|
|||
|
|
@ -209,6 +209,10 @@ defmodule Pleroma.Web.Router do
|
|||
post("/oauth_app", OAuthAppController, :create)
|
||||
patch("/oauth_app/:id", OAuthAppController, :update)
|
||||
delete("/oauth_app/:id", OAuthAppController, :delete)
|
||||
|
||||
get("/media_proxy_caches", MediaProxyCacheController, :index)
|
||||
post("/media_proxy_caches/delete", MediaProxyCacheController, :delete)
|
||||
post("/media_proxy_caches/purge", MediaProxyCacheController, :purge)
|
||||
end
|
||||
|
||||
scope "/api/pleroma/emoji", Pleroma.Web.PleromaAPI do
|
||||
|
|
|
|||
|
|
@ -18,13 +18,19 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
|
|||
},
|
||||
_job
|
||||
) do
|
||||
hrefs =
|
||||
Enum.flat_map(attachments, fn attachment ->
|
||||
Enum.map(attachment["url"], & &1["href"])
|
||||
end)
|
||||
attachments
|
||||
|> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end)
|
||||
|> fetch_objects
|
||||
|> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|
||||
|> filter_objects
|
||||
|> do_clean
|
||||
|
||||
names = Enum.map(attachments, & &1["name"])
|
||||
{:ok, :success}
|
||||
end
|
||||
|
||||
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
|
||||
|
||||
defp do_clean({object_ids, attachment_urls}) do
|
||||
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
|
||||
|
||||
prefix =
|
||||
|
|
@ -39,68 +45,70 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do
|
|||
"/"
|
||||
)
|
||||
|
||||
# find all objects for copies of the attachments, name and actor doesn't matter here
|
||||
object_ids_and_hrefs =
|
||||
from(o in Object,
|
||||
where:
|
||||
fragment(
|
||||
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
|
||||
o.data,
|
||||
o.data,
|
||||
^hrefs
|
||||
)
|
||||
)
|
||||
# The query above can be time consumptive on large instances until we
|
||||
# refactor how uploads are stored
|
||||
|> Repo.all(timeout: :infinity)
|
||||
# we should delete 1 object for any given attachment, but don't delete
|
||||
# files if there are more than 1 object for it
|
||||
|> Enum.reduce(%{}, fn %{
|
||||
id: id,
|
||||
data: %{
|
||||
"url" => [%{"href" => href}],
|
||||
"actor" => obj_actor,
|
||||
"name" => name
|
||||
}
|
||||
},
|
||||
acc ->
|
||||
Map.update(acc, href, %{id: id, count: 1}, fn val ->
|
||||
case obj_actor == actor and name in names do
|
||||
true ->
|
||||
# set id of the actor's object that will be deleted
|
||||
%{val | id: id, count: val.count + 1}
|
||||
Enum.each(attachment_urls, fn href ->
|
||||
href
|
||||
|> String.trim_leading("#{base_url}/#{prefix}")
|
||||
|> uploader.delete_file()
|
||||
end)
|
||||
|
||||
false ->
|
||||
# another actor's object, just increase count to not delete file
|
||||
%{val | count: val.count + 1}
|
||||
end
|
||||
end)
|
||||
end)
|
||||
|> Enum.map(fn {href, %{id: id, count: count}} ->
|
||||
# only delete files that have single instance
|
||||
with 1 <- count do
|
||||
href
|
||||
|> String.trim_leading("#{base_url}/#{prefix}")
|
||||
|> uploader.delete_file()
|
||||
|
||||
{id, href}
|
||||
else
|
||||
_ -> {id, nil}
|
||||
end
|
||||
end)
|
||||
|
||||
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
|
||||
|
||||
from(o in Object, where: o.id in ^object_ids)
|
||||
|> Repo.delete_all()
|
||||
|
||||
object_ids_and_hrefs
|
||||
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|
||||
|> Enum.map(&elem(&1, 1))
|
||||
|> Pleroma.Web.MediaProxy.Invalidation.purge()
|
||||
|
||||
{:ok, :success}
|
||||
delete_objects(object_ids)
|
||||
end
|
||||
|
||||
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
|
||||
defp delete_objects([_ | _] = object_ids) do
|
||||
Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
|
||||
end
|
||||
|
||||
defp delete_objects(_), do: :ok
|
||||
|
||||
# we should delete 1 object for any given attachment, but don't delete
|
||||
# files if there are more than 1 object for it
|
||||
defp filter_objects(objects) do
|
||||
Enum.reduce(objects, {[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
|
||||
with 1 <- count do
|
||||
{ids ++ [id], hrefs ++ [href]}
|
||||
else
|
||||
_ -> {ids ++ [id], hrefs}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp prepare_objects(objects, actor, names) do
|
||||
objects
|
||||
|> Enum.reduce(%{}, fn %{
|
||||
id: id,
|
||||
data: %{
|
||||
"url" => [%{"href" => href}],
|
||||
"actor" => obj_actor,
|
||||
"name" => name
|
||||
}
|
||||
},
|
||||
acc ->
|
||||
Map.update(acc, href, %{id: id, count: 1}, fn val ->
|
||||
case obj_actor == actor and name in names do
|
||||
true ->
|
||||
# set id of the actor's object that will be deleted
|
||||
%{val | id: id, count: val.count + 1}
|
||||
|
||||
false ->
|
||||
# another actor's object, just increase count to not delete file
|
||||
%{val | count: val.count + 1}
|
||||
end
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
defp fetch_objects(hrefs) do
|
||||
from(o in Object,
|
||||
where:
|
||||
fragment(
|
||||
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
|
||||
o.data,
|
||||
o.data,
|
||||
^hrefs
|
||||
)
|
||||
)
|
||||
# The query above can be time consumptive on large instances until we
|
||||
# refactor how uploads are stored
|
||||
|> Repo.all(timeout: :infinity)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue