Merge remote-tracking branch 'origin/develop' into shigusegubu

This commit is contained in:
Henry Jameson 2024-08-06 16:46:58 +03:00
commit bf5b097cd2
853 changed files with 8277 additions and 3096 deletions

View file

@ -14,7 +14,8 @@ defmodule Mix.Pleroma do
:swoosh,
:timex,
:fast_html,
:oban
:oban,
:logger_backends
]
@cachex_children ["object", "user", "scrubber", "web_resp"]
@doc "Common functions to be reused in mix tasks"

View file

@ -205,6 +205,35 @@ defmodule Mix.Tasks.Pleroma.Config do
end
end
# Removes any policies that are not a real module
# as they will prevent the server from starting
def run(["fix_mrf_policies"]) do
check_configdb(fn ->
start_pleroma()
group = :pleroma
key = :mrf
%{value: value} =
group
|> ConfigDB.get_by_group_and_key(key)
policies =
Keyword.get(value, :policies, [])
|> Enum.filter(&is_atom(&1))
|> Enum.filter(fn mrf ->
case Code.ensure_compiled(mrf) do
{:module, _} -> true
{:error, _} -> false
end
end)
value = Keyword.put(value, :policies, policies)
ConfigDB.update_or_create(%{group: group, key: key, value: value})
end)
end
@spec migrate_to_db(Path.t() | nil) :: any()
def migrate_to_db(file_path \\ nil) do
with :ok <- Pleroma.Config.DeprecationWarnings.warn() do

View file

@ -67,43 +67,168 @@ defmodule Mix.Tasks.Pleroma.Database do
OptionParser.parse(
args,
strict: [
vacuum: :boolean
vacuum: :boolean,
keep_threads: :boolean,
keep_non_public: :boolean,
prune_orphaned_activities: :boolean
]
)
start_pleroma()
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400))
Logger.info("Pruning objects older than #{deadline} days")
log_message = "Pruning objects older than #{deadline} days"
time_deadline =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(-(deadline * 86_400))
log_message =
if Keyword.get(options, :keep_non_public) do
log_message <> ", keeping non public posts"
else
log_message
end
from(o in Object,
where:
fragment(
"?->'to' \\? ? OR ?->'cc' \\? ?",
o.data,
^Pleroma.Constants.as_public(),
o.data,
^Pleroma.Constants.as_public()
),
where: o.inserted_at < ^time_deadline,
where:
log_message =
if Keyword.get(options, :keep_threads) do
log_message <> ", keeping threads intact"
else
log_message
end
log_message =
if Keyword.get(options, :prune_orphaned_activities) do
log_message <> ", pruning orphaned activities"
else
log_message
end
log_message =
if Keyword.get(options, :vacuum) do
log_message <>
", doing a full vacuum (you shouldn't do this as a recurring maintanance task)"
else
log_message
end
Logger.info(log_message)
if Keyword.get(options, :keep_threads) do
# We want to delete objects from threads where
# 1. the newest post is still old
# 2. none of the activities is local
# 3. none of the activities is bookmarked
# 4. optionally none of the posts is non-public
deletable_context =
if Keyword.get(options, :keep_non_public) do
Pleroma.Activity
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|> having(
[a],
not fragment(
# Posts (checked on Create Activity) is non-public
"bool_or((not(?->'to' \\? ? OR ?->'cc' \\? ?)) and ? ->> 'type' = 'Create')",
a.data,
^Pleroma.Constants.as_public(),
a.data,
^Pleroma.Constants.as_public(),
a.data
)
)
else
Pleroma.Activity
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|> group_by([a], fragment("? ->> 'context'::text", a.data))
end
|> having([a], max(a.updated_at) < ^time_deadline)
|> having([a], not fragment("bool_or(?)", a.local))
|> having([_, b], fragment("max(?::text) is null", b.id))
|> select([a], fragment("? ->> 'context'::text", a.data))
Pleroma.Object
|> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context))
else
if Keyword.get(options, :keep_non_public) do
Pleroma.Object
|> where(
[o],
fragment(
"?->'to' \\? ? OR ?->'cc' \\? ?",
o.data,
^Pleroma.Constants.as_public(),
o.data,
^Pleroma.Constants.as_public()
)
)
else
Pleroma.Object
end
|> where([o], o.updated_at < ^time_deadline)
|> where(
[o],
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
)
)
end
|> Repo.delete_all(timeout: :infinity)
prune_hashtags_query = """
if !Keyword.get(options, :keep_threads) do
# Without the --keep-threads option, it's possible that bookmarked
# objects have been deleted. We remove the corresponding bookmarks.
"""
delete from public.bookmarks
where id in (
select b.id from public.bookmarks b
left join public.activities a on b.activity_id = a.id
left join public.objects o on a."data" ->> 'object' = o.data ->> 'id'
where o.id is null
)
"""
|> Repo.query([], timeout: :infinity)
end
if Keyword.get(options, :prune_orphaned_activities) do
# Prune activities who link to a single object
"""
delete from public.activities
where id in (
select a.id from public.activities a
left join public.objects o on a.data ->> 'object' = o.data ->> 'id'
left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id'
left join public.users u on a.data ->> 'object' = u.ap_id
where not a.local
and jsonb_typeof(a."data" -> 'object') = 'string'
and o.id is null
and a2.id is null
and u.id is null
)
"""
|> Repo.query([], timeout: :infinity)
# Prune activities who link to an array of objects
"""
delete from public.activities
where id in (
select a.id from public.activities a
join json_array_elements_text((a."data" -> 'object')::json) as j on jsonb_typeof(a."data" -> 'object') = 'array'
left join public.objects o on j.value = o.data ->> 'id'
left join public.activities a2 on j.value = a2.data ->> 'id'
left join public.users u on j.value = u.ap_id
group by a.id
having max(o.data ->> 'id') is null
and max(a2.data ->> 'id') is null
and max(u.ap_id) is null
)
"""
|> Repo.query([], timeout: :infinity)
end
"""
DELETE FROM hashtags AS ht
WHERE NOT EXISTS (
SELECT 1 FROM hashtags_objects hto
WHERE ht.id = hto.hashtag_id)
"""
Repo.query(prune_hashtags_query)
|> Repo.query()
if Keyword.get(options, :vacuum) do
Maintenance.vacuum("full")
@ -226,7 +351,7 @@ defmodule Mix.Tasks.Pleroma.Database do
)
end
shell_info('Done.')
shell_info(~c"Done.")
end
end

View file

@ -0,0 +1,83 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Search.Indexer do
import Mix.Pleroma
import Ecto.Query
alias Pleroma.Workers.SearchIndexingWorker
def run(["create_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).create_index() do
IO.puts("Index created")
else
e -> IO.puts("Could not create index: #{inspect(e)}")
end
end
def run(["drop_index"]) do
start_pleroma()
with :ok <- Pleroma.Config.get([Pleroma.Search, :module]).drop_index() do
IO.puts("Index dropped")
else
e -> IO.puts("Could not drop index: #{inspect(e)}")
end
end
def run(["index" | options]) do
{options, [], []} =
OptionParser.parse(
options,
strict: [
chunk: :integer,
limit: :integer,
step: :integer
]
)
start_pleroma()
chunk_size = Keyword.get(options, :chunk, 100)
limit = Keyword.get(options, :limit, 100_000)
per_step = Keyword.get(options, :step, 1000)
chunks = max(div(limit, per_step), 1)
1..chunks
|> Enum.each(fn step ->
q =
from(a in Pleroma.Activity,
limit: ^per_step,
offset: ^per_step * (^step - 1),
select: [:id],
order_by: [desc: :id]
)
{:ok, ids} =
Pleroma.Repo.transaction(fn ->
Pleroma.Repo.stream(q, timeout: :infinity)
|> Enum.map(fn a ->
a.id
end)
end)
IO.puts("Got #{length(ids)} activities, adding to indexer")
ids
|> Enum.chunk_every(chunk_size)
|> Enum.each(fn chunk ->
IO.puts("Adding #{length(chunk)} activities to indexing queue")
chunk
|> Enum.map(fn id ->
SearchIndexingWorker.new(%{"op" => "add_to_index", "activity" => id})
end)
|> Oban.insert_all()
end)
end)
end
end

View file

@ -0,0 +1,25 @@
defmodule Mix.Tasks.Pleroma.TestRunner do
@shortdoc "Retries tests once if they fail"
use Mix.Task
def run(args \\ []) do
case System.cmd("mix", ["test"] ++ args, into: IO.stream(:stdio, :line)) do
{_, 0} ->
:ok
_ ->
retry(args)
end
end
def retry(args) do
case System.cmd("mix", ["test", "--failed"] ++ args, into: IO.stream(:stdio, :line)) do
{_, 0} ->
:ok
_ ->
exit(1)
end
end
end

View file

@ -51,7 +51,11 @@ defmodule Pleroma.Application do
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn()
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
if Config.get([Pleroma.Web.Plugs.HTTPSecurityPlug, :enable], true) do
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
end
Pleroma.ApplicationRequirements.verify!()
load_custom_modules()
Pleroma.Docs.JSON.compile()
@ -109,7 +113,8 @@ defmodule Pleroma.Application do
streamer_registry() ++
background_migrators() ++
shout_child(shout_enabled?()) ++
[Pleroma.Gopher.Server]
[Pleroma.Gopher.Server] ++
[Pleroma.Search.Healthcheck]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
@ -286,8 +291,6 @@ defmodule Pleroma.Application do
config = Config.get(ConcurrentLimiter, [])
[
Pleroma.Web.RichMedia.Helpers,
Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy,
Pleroma.Search
]
|> Enum.each(fn module ->

View file

@ -241,10 +241,9 @@ defmodule Pleroma.ApplicationRequirements do
missing_mrfs =
Enum.reduce(mrfs, [], fn x, acc ->
if Code.ensure_compiled(x) do
acc
else
acc ++ [x]
case Code.ensure_compiled(x) do
{:module, _} -> acc
{:error, _} -> acc ++ [x]
end
end)

View file

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Config.TransferTask do
@ -44,14 +44,9 @@ defmodule Pleroma.Config.TransferTask do
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
# We need to restart applications for loaded settings take effect
{logger, other} =
settings =
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
logger
|> Enum.sort()
|> Enum.each(&configure/1)
started_applications = Application.started_applications()
@ -64,7 +59,7 @@ defmodule Pleroma.Config.TransferTask do
[:pleroma | reject]
end
other
settings
|> Enum.map(&update/1)
|> Enum.uniq()
|> Enum.reject(&(&1 in reject))
@ -102,38 +97,6 @@ defmodule Pleroma.Config.TransferTask do
{group, key, value, merged}
end
# change logger configuration in runtime, without restart
defp configure({_, :backends, _, merged}) do
# removing current backends
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
Enum.each(merged, &Logger.add_backend/1)
:ok = update_env(:logger, :backends, merged)
end
defp configure({_, key, _, merged}) when key in [:console, :ex_syslogger] do
merged =
if key == :console do
put_in(merged[:format], merged[:format] <> "\n")
else
merged
end
backend =
if key == :ex_syslogger,
do: {ExSyslogger, :ex_syslogger},
else: key
Logger.configure_backend(backend, merged)
:ok = update_env(:logger, key, merged)
end
defp configure({_, key, _, merged}) do
Logger.configure([{key, merged}])
:ok = update_env(:logger, key, merged)
end
defp update({group, key, value, merged}) do
try do
:ok = update_env(group, key, merged)

View file

@ -165,8 +165,7 @@ defmodule Pleroma.ConfigDB do
{:pleroma, :ecto_repos},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist},
{:logger, :backends}
{:swarm, :node_blacklist}
]
Enum.any?(full_key_update, fn
@ -385,7 +384,12 @@ defmodule Pleroma.ConfigDB do
@spec module_name?(String.t()) :: boolean()
def module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
if String.contains?(string, ".") do
[name | _] = String.split(string, ".", parts: 2)
name in ~w[Pleroma Phoenix Tesla Ueberauth Swoosh Logger LoggerBackends]
else
string in ~w[Oban Ueberauth ExSyslogger ConcurrentLimiter]
end
end
end

View file

@ -12,6 +12,8 @@ defmodule Pleroma.Conversation.Participation do
import Ecto.Changeset
import Ecto.Query
@type t :: %__MODULE__{}
schema "conversation_participations" do
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
belongs_to(:conversation, Conversation)

View file

@ -27,11 +27,3 @@ defenum(Pleroma.DataMigration.State,
failed: 4,
manual: 5
)
defenum(Pleroma.User.Backup.State,
pending: 1,
running: 2,
complete: 3,
failed: 4,
invalid: 5
)

View file

@ -345,37 +345,22 @@ defmodule Pleroma.Emails.UserEmail do
Router.Helpers.subscription_url(Endpoint, :unsubscribe, token)
end
def backup_is_ready_email(backup, admin_user_id \\ nil) do
def backup_is_ready_email(backup) do
%{user: user} = Pleroma.Repo.preload(backup, :user)
Gettext.with_locale_or_default user.language do
download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup)
html_body =
if is_nil(admin_user_id) do
Gettext.dpgettext(
"static_pages",
"account archive email body - self-requested",
"""
<p>You requested a full backup of your Pleroma account. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
download_url: download_url
)
else
admin = Pleroma.Repo.get(User, admin_user_id)
Gettext.dpgettext(
"static_pages",
"account archive email body - admin requested",
"""
<p>Admin @%{admin_nickname} requested a full backup of your Pleroma account. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
admin_nickname: admin.nickname,
download_url: download_url
)
end
Gettext.dpgettext(
"static_pages",
"account archive email body",
"""
<p>A full backup of your Pleroma account was requested. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
download_url: download_url
)
new()
|> to(recipient(user))

View file

@ -416,10 +416,10 @@ defmodule Pleroma.Emoji.Pack do
end
defp create_archive_and_cache(pack, hash) do
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
{:ok, {_, result}} =
:zip.zip('#{pack.name}.zip', files, [:memory, cwd: to_charlist(pack.path)])
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
@ -586,7 +586,7 @@ defmodule Pleroma.Emoji.Pack do
with :ok <- File.mkdir_p!(local_pack.path) do
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
# Fallback cannot contain a pack.json file
files = if pack_info[:fallback], do: files, else: ['pack.json' | files]
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
end

View file

@ -199,8 +199,8 @@ defmodule Pleroma.FollowingRelationship do
|> preload([:follower])
|> Repo.all()
|> Enum.map(fn following_relationship ->
Pleroma.Web.CommonAPI.follow(following_relationship.follower, target)
Pleroma.Web.CommonAPI.unfollow(following_relationship.follower, origin)
Pleroma.Web.CommonAPI.follow(target, following_relationship.follower)
Pleroma.Web.CommonAPI.unfollow(origin, following_relationship.follower)
end)
|> case do
[] ->

View file

@ -43,10 +43,6 @@ defmodule Pleroma.Frontend do
{:download_or_unzip, _} ->
Logger.info("Could not download or unzip the frontend")
{:error, "Could not download or unzip the frontend"}
_e ->
Logger.info("Could not install the frontend")
{:error, "Could not install the frontend"}
end
end

View file

@ -9,7 +9,7 @@ defmodule Pleroma.Gun.ConnectionPool.Reclaimer do
def start_monitor do
pid =
case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
case GenServer.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
{:ok, pid} ->
pid

View file

@ -5,6 +5,9 @@
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
alias Pleroma.Config
alias Pleroma.Gun.ConnectionPool.Worker
use DynamicSupervisor
def start_link(opts) do
@ -14,21 +17,28 @@ defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
def init(_opts) do
DynamicSupervisor.init(
strategy: :one_for_one,
max_children: Pleroma.Config.get([:connections_pool, :max_connections])
max_children: Config.get([:connections_pool, :max_connections])
)
end
def start_worker(opts, last_attempt \\ false) do
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
def start_worker(opts, last_attempt \\ false)
if Enum.any?(funs, fn fun -> fun.() end) do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
else
start_worker(opts, true)
end
def start_worker(opts, true) do
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
{:error, :max_children} ->
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
res ->
res
end
end
def start_worker(opts, false) do
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
{:error, :max_children} ->
free_pool()
start_worker(opts, true)
res ->
res

View file

@ -16,4 +16,15 @@ defmodule Pleroma.Helpers.InetHelper do
def parse_address(ip) do
:inet.parse_address(ip)
end
def parse_cidr(proxy) when is_binary(proxy) do
proxy =
cond do
"/" in String.codepoints(proxy) -> proxy
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
end
InetCidr.parse_cidr!(proxy, true)
end
end

View file

@ -25,7 +25,7 @@ defmodule Pleroma.Helpers.MediaHelper do
end
def image_resize(url, options) do
with {:ok, env} <- HTTP.get(url, [], pool: :media),
with {:ok, env} <- HTTP.get(url, [], http_client_opts()),
{:ok, resized} <-
Operation.thumbnail_buffer(env.body, options.max_width,
height: options.max_height,
@ -45,8 +45,8 @@ defmodule Pleroma.Helpers.MediaHelper do
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
false <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, false} <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], http_client_opts()),
{:ok, pid} <- StringIO.open(env.body) do
body_stream = IO.binstream(pid, 1)
@ -71,17 +71,19 @@ defmodule Pleroma.Helpers.MediaHelper do
end)
case Task.yield(task, 5_000) do
nil ->
{:ok, result} ->
{:ok, result}
_ ->
Task.shutdown(task)
@cachex.put(:failed_media_helper_cache, url, nil)
{:error, {:ffmpeg, :timeout}}
result ->
{:ok, result}
end
else
nil -> {:error, {:ffmpeg, :command_not_found}}
{:error, _} = error -> error
end
end
defp http_client_opts, do: Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
end

View file

@ -37,7 +37,7 @@ defmodule Pleroma.HTTP do
See `Pleroma.HTTP.request/5`
"""
@spec post(Request.url(), String.t(), Request.headers(), keyword()) ::
@spec post(Request.url(), Tesla.Env.body(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
@ -56,7 +56,7 @@ defmodule Pleroma.HTTP do
`{:ok, %Tesla.Env{}}` or `{:error, error}`
"""
@spec request(method(), Request.url(), String.t(), Request.headers(), keyword()) ::
@spec request(method(), Request.url(), Tesla.Env.body(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def request(method, url, body, headers, options) when is_binary(url) do
uri = URI.parse(url)
@ -68,7 +68,9 @@ defmodule Pleroma.HTTP do
adapter = Application.get_env(:tesla, :adapter)
client = Tesla.client(adapter_middlewares(adapter), adapter)
extra_middleware = options[:tesla_middleware] || []
client = Tesla.client(adapter_middlewares(adapter, extra_middleware), adapter)
maybe_limit(
fn ->
@ -102,20 +104,21 @@ defmodule Pleroma.HTTP do
fun.()
end
defp adapter_middlewares(Tesla.Adapter.Gun) do
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
defp adapter_middlewares(Tesla.Adapter.Gun, extra_middleware) do
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] ++
extra_middleware
end
defp adapter_middlewares({Tesla.Adapter.Finch, _}) do
[Tesla.Middleware.FollowRedirects]
defp adapter_middlewares({Tesla.Adapter.Finch, _}, extra_middleware) do
[Tesla.Middleware.FollowRedirects] ++ extra_middleware
end
defp adapter_middlewares(_) do
defp adapter_middlewares(_, extra_middleware) do
if Pleroma.Config.get(:env) == :test do
# Emulate redirects in test env, which are handled by adapters in other environments
[Tesla.Middleware.FollowRedirects]
else
[]
extra_middleware
end
end
end

View file

@ -15,7 +15,7 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
retry_timeout: 1_000
]
@type pool() :: :federation | :upload | :media | :default
@type pool() :: :federation | :upload | :media | :rich_media | :default
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do

View file

@ -0,0 +1,4 @@
defmodule Pleroma.HTTPSignaturesAPI do
@callback validate_conn(conn :: Plug.Conn.t()) :: boolean
@callback signature_for_conn(conn :: Plug.Conn.t()) :: map
end

View file

@ -10,7 +10,7 @@ defmodule Pleroma.Instances.Instance do
alias Pleroma.Maps
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Workers.BackgroundWorker
alias Pleroma.Workers.DeleteWorker
use Ecto.Schema
@ -297,7 +297,7 @@ defmodule Pleroma.Instances.Instance do
all of those users' activities and notifications.
"""
def delete_users_and_activities(host) when is_binary(host) do
BackgroundWorker.enqueue("delete_instance", %{"host" => host})
DeleteWorker.enqueue("delete_instance", %{"host" => host})
end
def perform(:delete_instance, host) when is_binary(host) do

View file

@ -489,7 +489,7 @@ defmodule Pleroma.Notification do
NOTE: might be called for FAKE Activities, see ActivityPub.Utils.get_notified_from_object/1
"""
@spec get_notified_from_activity(Activity.t(), boolean()) :: {list(User.t()), list(User.t())}
@spec get_notified_from_activity(Activity.t(), boolean()) :: list(User.t())
def get_notified_from_activity(activity, local_only \\ true)
def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only)
@ -734,7 +734,7 @@ defmodule Pleroma.Notification do
def mark_as_read?(activity, target_user) do
user = Activity.user_actor(activity)
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(target_user, activity)
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(activity, target_user)
end
def for_user_and_activity(user, activity) do
@ -757,8 +757,9 @@ defmodule Pleroma.Notification do
|> Repo.update_all(set: [seen: true])
end
@spec send(list(Notification.t())) :: :ok
def send(notifications) do
@doc "Streams a list of notifications over websockets and web push"
@spec stream(list(Notification.t())) :: :ok
def stream(notifications) do
Enum.each(notifications, fn notification ->
Streamer.stream(["user", "user:notification"], notification)
Push.send(notification)

View file

@ -99,21 +99,24 @@ defmodule Pleroma.Object do
def get_by_id(nil), do: nil
def get_by_id(id), do: Repo.get(Object, id)
@spec get_by_id_and_maybe_refetch(integer(), list()) :: Object.t() | nil
def get_by_id_and_maybe_refetch(id, opts \\ []) do
%{updated_at: updated_at} = object = get_by_id(id)
with %Object{updated_at: updated_at} = object <- get_by_id(id) do
if opts[:interval] &&
NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do
case Fetcher.refetch_object(object) do
{:ok, %Object{} = object} ->
object
if opts[:interval] &&
NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do
case Fetcher.refetch_object(object) do
{:ok, %Object{} = object} ->
object
e ->
Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}")
object
e ->
Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}")
object
end
else
object
end
else
object
nil -> nil
end
end

View file

@ -59,6 +59,7 @@ defmodule Pleroma.Object.Fetcher do
end
# Note: will create a Create activity, which we need internally at the moment.
@spec fetch_object_from_id(String.t(), list()) :: {:ok, Object.t()} | {:error | :reject, any()}
def fetch_object_from_id(id, options \\ []) do
with {_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)},
{_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])},

View file

@ -134,7 +134,10 @@ defmodule Pleroma.Object.Updater do
else
%{updated_object: updated_data} =
updated_data
|> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false)
|> maybe_update_history(original_data,
updated: updated,
use_history_in_new_object?: false
)
updated_data
|> Map.put("updated", date)

View file

@ -8,7 +8,7 @@ defmodule Pleroma.ReverseProxy do
~w(if-unmodified-since if-none-match) ++ @range_headers
@resp_cache_headers ~w(etag date last-modified)
@keep_resp_headers @resp_cache_headers ++
~w(content-type content-disposition content-encoding) ++
~w(content-length content-type content-disposition content-encoding) ++
~w(content-range accept-ranges vary)
@default_cache_control_header "public, max-age=1209600"
@valid_resp_codes [200, 206, 304]
@ -180,6 +180,7 @@ defmodule Pleroma.ReverseProxy do
result =
conn
|> put_resp_headers(build_resp_headers(headers, opts))
|> streaming_compat
|> send_chunked(status)
|> chunk_reply(client, opts)
@ -417,4 +418,17 @@ defmodule Pleroma.ReverseProxy do
@cachex.put(:failed_proxy_url_cache, url, true, ttl: ttl)
end
# When Cowboy handles a chunked response with a content-length header it streams
# over HTTP 1.1 instead of chunking. Bandit cannot stream over HTTP 1.1 so the header
# must be stripped or it breaks RFC compliance for Transfer Encoding: Chunked. RFC9112§6.2
#
# HTTP2 is always streamed for all adapters.
defp streaming_compat(conn) do
with Phoenix.Endpoint.Cowboy2Adapter <- Pleroma.Web.Endpoint.config(:adapter) do
conn
else
_ -> delete_resp_header(conn, "content-length")
end
end
end

View file

@ -204,7 +204,7 @@ defmodule Pleroma.ScheduledActivity do
def job_query(scheduled_activity_id) do
from(j in Oban.Job,
where: j.queue == "scheduled_activities",
where: j.queue == "federator_outgoing",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
)
end

View file

@ -10,8 +10,12 @@ defmodule Pleroma.Search do
end
def search(query, options) do
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.search(options[:for_user], query, options)
end
def healthcheck_endpoints do
search_module = Pleroma.Config.get([Pleroma.Search, :module])
search_module.healthcheck_endpoints()
end
end

View file

@ -28,7 +28,7 @@ defmodule Pleroma.Search.DatabaseSearch do
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> restrict_public(user)
|> query_with(index_type, search_query, :websearch)
|> query_with(index_type, search_query)
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)
@ -48,6 +48,15 @@ defmodule Pleroma.Search.DatabaseSearch do
@impl true
def remove_from_index(_object), do: :ok
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
@impl true
def healthcheck_endpoints, do: nil
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end
@ -79,25 +88,7 @@ defmodule Pleroma.Search.DatabaseSearch do
)
end
defp query_with(q, :gin, search_query, :plain) do
%{rows: [[tsc]]} =
Ecto.Adapters.SQL.query!(
Pleroma.Repo,
"select current_setting('default_text_search_config')::regconfig::oid;"
)
from([a, o] in q,
where:
fragment(
"to_tsvector(?::oid::regconfig, ?->>'content') @@ plainto_tsquery(?)",
^tsc,
o.data,
^search_query
)
)
end
defp query_with(q, :gin, search_query, :websearch) do
defp query_with(q, :gin, search_query) do
%{rows: [[tsc]]} =
Ecto.Adapters.SQL.query!(
Pleroma.Repo,
@ -115,19 +106,7 @@ defmodule Pleroma.Search.DatabaseSearch do
)
end
defp query_with(q, :rum, search_query, :plain) do
from([a, o] in q,
where:
fragment(
"? @@ plainto_tsquery(?)",
o.fts_content,
^search_query
),
order_by: [fragment("? <=> now()::date", o.inserted_at)]
)
end
defp query_with(q, :rum, search_query, :websearch) do
defp query_with(q, :rum, search_query) do
from([a, o] in q,
where:
fragment(

View file

@ -0,0 +1,86 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Search.Healthcheck do
@doc """
Monitors health of search backend to control processing of events based on health and availability.
"""
use GenServer
require Logger
@queue :search_indexing
@tick :timer.seconds(5)
@timeout :timer.seconds(2)
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@impl true
def init(_) do
state = %{healthy: false}
{:ok, state, {:continue, :start}}
end
@impl true
def handle_continue(:start, state) do
tick()
{:noreply, state}
end
@impl true
def handle_info(:check, state) do
urls = Pleroma.Search.healthcheck_endpoints()
new_state =
if check(urls) do
Oban.resume_queue(queue: @queue)
Map.put(state, :healthy, true)
else
Oban.pause_queue(queue: @queue)
Map.put(state, :healthy, false)
end
maybe_log_state_change(state, new_state)
tick()
{:noreply, new_state}
end
@impl true
def handle_call(:state, _from, state) do
{:reply, state, state, :hibernate}
end
def state, do: GenServer.call(__MODULE__, :state)
def check([]), do: true
def check(urls) when is_list(urls) do
Enum.all?(
urls,
fn url ->
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
{:ok, %{status: 200}} -> true
_ -> false
end
end
)
end
def check(_), do: true
defp tick do
Process.send_after(self(), :check, @tick)
end
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
end
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
end
defp maybe_log_state_change(_, _), do: :ok
end

View file

@ -10,6 +10,12 @@ defmodule Pleroma.Search.Meilisearch do
@behaviour Pleroma.Search.SearchBackend
@impl true
def create_index, do: :ok
@impl true
def drop_index, do: :ok
defp meili_headers do
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
@ -178,4 +184,15 @@ defmodule Pleroma.Search.Meilisearch do
def remove_from_index(object) do
meili_delete("/indexes/objects/documents/#{object.id}")
end
@impl true
def healthcheck_endpoints do
endpoint =
Config.get([Pleroma.Search.Meilisearch, :url])
|> URI.parse()
|> Map.put(:path, "/health")
|> URI.to_string()
[endpoint]
end
end

View file

@ -0,0 +1,182 @@
defmodule Pleroma.Search.QdrantSearch do
@behaviour Pleroma.Search.SearchBackend
import Ecto.Query
alias Pleroma.Activity
alias Pleroma.Config.Getting, as: Config
alias __MODULE__.OpenAIClient
alias __MODULE__.QdrantClient
import Pleroma.Search.Meilisearch, only: [object_to_search_data: 1]
import Pleroma.Search.DatabaseSearch, only: [maybe_fetch: 3]
@impl true
def create_index do
payload = Config.get([Pleroma.Search.QdrantSearch, :qdrant_index_configuration])
with {:ok, %{status: 200}} <- QdrantClient.put("/collections/posts", payload) do
:ok
else
e -> {:error, e}
end
end
@impl true
def drop_index do
with {:ok, %{status: 200}} <- QdrantClient.delete("/collections/posts") do
:ok
else
e -> {:error, e}
end
end
def get_embedding(text) do
with {:ok, %{body: %{"data" => [%{"embedding" => embedding}]}}} <-
OpenAIClient.post("/v1/embeddings", %{
input: text,
model: Config.get([Pleroma.Search.QdrantSearch, :openai_model])
}) do
{:ok, embedding}
else
_ ->
{:error, "Failed to get embedding"}
end
end
defp actor_from_activity(%{data: %{"actor" => actor}}) do
actor
end
defp actor_from_activity(_), do: nil
defp build_index_payload(activity, embedding) do
actor = actor_from_activity(activity)
published_at = activity.data["published"]
%{
points: [
%{
id: activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!(),
vector: embedding,
payload: %{actor: actor, published_at: published_at}
}
]
}
end
defp build_search_payload(embedding, options) do
base = %{
vector: embedding,
limit: options[:limit] || 20,
offset: options[:offset] || 0
}
if author = options[:author] do
Map.put(base, :filter, %{
must: [%{key: "actor", match: %{value: author.ap_id}}]
})
else
base
end
end
@impl true
def add_to_index(activity) do
# This will only index public or unlisted notes
maybe_search_data = object_to_search_data(activity.object)
if activity.data["type"] == "Create" and maybe_search_data do
with {:ok, embedding} <- get_embedding(maybe_search_data.content),
{:ok, %{status: 200}} <-
QdrantClient.put(
"/collections/posts/points",
build_index_payload(activity, embedding)
) do
:ok
else
e -> {:error, e}
end
else
:ok
end
end
@impl true
def remove_from_index(object) do
activity = Activity.get_by_object_ap_id_with_object(object.data["id"])
id = activity.id |> FlakeId.from_string() |> Ecto.UUID.cast!()
with {:ok, %{status: 200}} <-
QdrantClient.post("/collections/posts/points/delete", %{"points" => [id]}) do
:ok
else
e -> {:error, e}
end
end
@impl true
def search(user, original_query, options) do
query = "Represent this sentence for searching relevant passages: #{original_query}"
with {:ok, embedding} <- get_embedding(query),
{:ok, %{body: %{"result" => result}}} <-
QdrantClient.post(
"/collections/posts/points/search",
build_search_payload(embedding, options)
) do
ids =
Enum.map(result, fn %{"id" => id} ->
Ecto.UUID.dump!(id)
end)
from(a in Activity, where: a.id in ^ids)
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> Ecto.Query.order_by([a], fragment("array_position(?, ?)", ^ids, a.id))
|> Pleroma.Repo.all()
|> maybe_fetch(user, original_query)
else
_ ->
[]
end
end
@impl true
def healthcheck_endpoints do
qdrant_health =
Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])
|> URI.parse()
|> Map.put(:path, "/healthz")
|> URI.to_string()
openai_health = Config.get([Pleroma.Search.QdrantSearch, :openai_healthcheck_url])
[qdrant_health, openai_health] |> Enum.filter(& &1)
end
end
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"Authorization",
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
])
end
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
use Tesla
alias Pleroma.Config.Getting, as: Config
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
plug(Tesla.Middleware.JSON)
plug(Tesla.Middleware.Headers, [
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
])
end

View file

@ -21,4 +21,22 @@ defmodule Pleroma.Search.SearchBackend do
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
@doc """
Create the index
"""
@callback create_index() :: :ok | {:error, any()}
@doc """
Drop the index
"""
@callback drop_index() :: :ok | {:error, any()}
@doc """
Healthcheck endpoints of search backend infrastructure to monitor for controlling
processing of jobs in the Oban queue.
It is expected a 200 response is healthy and other responses are unhealthy.
"""
@callback healthcheck_endpoints :: list() | nil
end

View file

@ -10,6 +10,14 @@ defmodule Pleroma.Signature do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
import Plug.Conn, only: [put_req_header: 3]
@http_signatures_impl Application.compile_env(
:pleroma,
[__MODULE__, :http_signatures_impl],
HTTPSignatures
)
@known_suffixes ["/publickey", "/main-key"]
def key_id_to_actor_id(key_id) do
@ -44,8 +52,7 @@ defmodule Pleroma.Signature do
defp remove_suffix(uri, []), do: uri
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
with {:ok, actor_id} <- get_actor_id(conn),
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
else
@ -55,8 +62,7 @@ defmodule Pleroma.Signature do
end
def refetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
with {:ok, actor_id} <- get_actor_id(conn),
{:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id),
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
@ -66,6 +72,16 @@ defmodule Pleroma.Signature do
end
end
def get_actor_id(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid) do
{:ok, actor_id}
else
e ->
{:error, e}
end
end
def sign(%User{keys: keys} = user, headers) do
with {:ok, private_key, _} <- Keys.keys_from_pem(keys) do
HTTPSignatures.sign(private_key, user.ap_id <> "#main-key", headers)
@ -77,4 +93,48 @@ defmodule Pleroma.Signature do
def signed_date(%NaiveDateTime{} = date) do
Timex.format!(date, "{WDshort}, {0D} {Mshort} {YYYY} {h24}:{m}:{s} GMT")
end
@spec validate_signature(Plug.Conn.t(), String.t()) :: boolean()
def validate_signature(%Plug.Conn{} = conn, request_target) do
# Newer drafts for HTTP signatures now use @request-target instead of the
# old (request-target). We'll now support both for incoming signatures.
conn =
conn
|> put_req_header("(request-target)", request_target)
|> put_req_header("@request-target", request_target)
@http_signatures_impl.validate_conn(conn)
end
@spec validate_signature(Plug.Conn.t()) :: boolean()
def validate_signature(%Plug.Conn{} = conn) do
# This (request-target) is non-standard, but many implementations do it
# this way due to a misinterpretation of
# https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures-06
# "path" was interpreted as not having the query, though later examples
# show that it must be the absolute path + query. This behavior is kept to
# make sure most software (Pleroma itself, Mastodon, and probably others)
# do not break.
request_target = Enum.join([String.downcase(conn.method), conn.request_path], " ")
# This is the proper way to build the @request-target, as expected by
# many HTTP signature libraries, clarified in the following draft:
# https://www.ietf.org/archive/id/draft-ietf-httpbis-message-signatures-11.html#section-2.2.6
# It is the same as before, but containing the query part as well.
proper_target = Enum.join([request_target, "?", conn.query_string], "")
cond do
# Normal, non-standard behavior but expected by Pleroma and more.
validate_signature(conn, request_target) ->
true
# Has query string and the previous one failed: let's try the standard.
conn.query_string != "" ->
validate_signature(conn, proper_target)
# If there's no query string and signature fails, it's rotten.
true ->
false
end
end
end

View file

@ -39,7 +39,7 @@ defmodule Pleroma.Telemetry.Logger do
_,
_
) do
Logger.error(fn ->
Logger.debug(fn ->
"Connection pool failed to reclaim any connections due to all of them being in use. It will have to drop requests for opening connections to new hosts"
end)
end
@ -70,7 +70,7 @@ defmodule Pleroma.Telemetry.Logger do
%{key: key},
_
) do
Logger.warning(fn ->
Logger.debug(fn ->
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
end)
end

View file

@ -239,20 +239,26 @@ defmodule Pleroma.Upload do
""
end
[base_url, path]
|> Path.join()
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
else
[base_url, path]
|> Path.join()
end
end
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
@spec base_url() :: binary
def base_url do
uploader = @config_impl.get([Pleroma.Upload, :uploader])
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
upload_fallback_url = Pleroma.Web.Endpoint.url() <> "/media/"
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url]) || upload_fallback_url
public_endpoint = @config_impl.get([uploader, :public_endpoint])
case uploader do
Pleroma.Uploaders.Local ->
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
upload_base_url
Pleroma.Uploaders.S3 ->
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
@ -264,11 +270,14 @@ defmodule Pleroma.Upload do
!is_nil(truncated_namespace) ->
truncated_namespace
!is_nil(namespace) ->
!is_nil(namespace) and !is_nil(bucket) ->
namespace <> ":" <> bucket
true ->
!is_nil(bucket) ->
bucket
true ->
""
end
if public_endpoint do
@ -277,8 +286,11 @@ defmodule Pleroma.Upload do
Path.join([upload_base_url, bucket_with_namespace])
end
Pleroma.Uploaders.IPFS ->
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
_ ->
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
public_endpoint || upload_base_url
end
end
end

View file

@ -9,8 +9,6 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
"""
@behaviour Pleroma.Upload.Filter
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
# Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
@ -18,7 +16,9 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", "-png:all=", file],
parallelism: true
) do
{_response, 0} -> {:ok, :filtered}
{error, 1} -> {:error, error}
end

View file

@ -38,7 +38,6 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
[{"fill", "yellow"}, {"tint", "40"}]
]
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])

View file

@ -8,7 +8,6 @@ defmodule Pleroma.Upload.Filter.Mogrify do
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
@type conversions :: conversion() | [conversion()]
@spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))

View file

@ -0,0 +1,72 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.IPFS do
@behaviour Pleroma.Uploaders.Uploader
require Logger
alias Tesla.Multipart
@api_add "/api/v0/add"
@api_delete "/api/v0/files/rm"
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
@placeholder "{CID}"
def placeholder, do: @placeholder
@impl true
def get_file(file) do
b_url = Pleroma.Upload.base_url()
if String.contains?(b_url, @placeholder) do
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
else
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
end
end
@impl true
def put_file(%Pleroma.Upload{tempfile: tempfile}) do
mp =
Multipart.new()
|> Multipart.add_content_type_param("charset=utf-8")
|> Multipart.add_file(tempfile)
endpoint = ipfs_endpoint(@api_add)
with {:ok, %{body: body}} when is_binary(body) <-
Pleroma.HTTP.post(endpoint, mp, [], params: ["cid-version": "1"], pool: :upload),
{_, {:ok, decoded}} <- {:json, Jason.decode(body)},
{_, true} <- {:hash, Map.has_key?(decoded, "Hash")} do
{:ok, {:file, decoded["Hash"]}}
else
{:hash, false} ->
{:error, "JSON doesn't contain Hash key"}
{:json, error} ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "JSON decode failed"}
error ->
Logger.error("#{__MODULE__}: #{inspect(error)}")
{:error, "IPFS Gateway upload failed"}
end
end
@impl true
def delete_file(file) do
endpoint = ipfs_endpoint(@api_delete)
case Pleroma.HTTP.post(endpoint, "", [], params: [arg: file]) do
{:ok, %{status: 204}} -> :ok
error -> {:error, inspect(error)}
end
end
defp ipfs_endpoint(path) do
URI.parse(@config_impl.get([__MODULE__, :post_gateway_url]))
|> Map.put(:path, path)
|> URI.to_string()
end
end

View file

@ -38,6 +38,8 @@ defmodule Pleroma.User do
alias Pleroma.Web.OAuth
alias Pleroma.Web.RelMe
alias Pleroma.Workers.BackgroundWorker
alias Pleroma.Workers.DeleteWorker
alias Pleroma.Workers.UserRefreshWorker
require Logger
require Pleroma.Constants
@ -1981,7 +1983,7 @@ defmodule Pleroma.User do
def delete(%User{} = user) do
# Purge the user immediately
purge(user)
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
DeleteWorker.enqueue("delete_user", %{"user_id" => user.id})
end
# *Actually* delete the user from the DB
@ -2053,7 +2055,8 @@ defmodule Pleroma.User do
%{scheme: scheme, userinfo: nil, host: host}
when not_empty_string(host) and scheme in ["http", "https"] <-
URI.parse(value),
{:not_idn, true} <- {:not_idn, to_string(:idna.encode(host)) == host},
{:not_idn, true} <-
{:not_idn, match?(^host, to_string(:idna.encode(to_charlist(host))))},
"me" <- Pleroma.Web.RelMe.maybe_put_rel_me(value, profile_urls) do
CommonUtils.to_masto_date(NaiveDateTime.utc_now())
else
@ -2153,20 +2156,20 @@ defmodule Pleroma.User do
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
@spec get_or_fetch_by_ap_id(String.t()) :: {:ok, User.t()} | {:error, any()}
def get_or_fetch_by_ap_id(ap_id) do
cached_user = get_cached_by_ap_id(ap_id)
with cached_user = %User{} <- get_cached_by_ap_id(ap_id),
_ <- maybe_refresh(cached_user) do
{:ok, cached_user}
else
_ -> fetch_by_ap_id(ap_id)
end
end
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
case {cached_user, maybe_fetched_user} do
{_, {:ok, %User{} = user}} ->
{:ok, user}
{%User{} = user, _} ->
{:ok, user}
_ ->
{:error, :not_found}
defp maybe_refresh(user) do
if needs_update?(user) do
UserRefreshWorker.new(%{"ap_id" => user.ap_id})
|> Oban.insert()
end
end
@ -2727,7 +2730,7 @@ defmodule Pleroma.User do
end
end
@spec add_to_block(User.t(), User.t()) ::
@spec remove_from_block(User.t(), User.t()) ::
{:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()}
defp remove_from_block(%User{} = user, %User{} = blocked) do
with {:ok, relationship} <- UserRelationship.delete_block(user, blocked) do

View file

@ -14,9 +14,10 @@ defmodule Pleroma.User.Backup do
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.Config
alias Pleroma.Repo
alias Pleroma.Uploaders.Uploader
alias Pleroma.User
alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView
@ -29,71 +30,111 @@ defmodule Pleroma.User.Backup do
field(:file_name, :string)
field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false)
field(:state, State, default: :invalid)
field(:processed_number, :integer, default: 0)
field(:tempdir, :string)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
@doc """
Schedules a job to backup a user if the number of backup requests has not exceeded the limit.
def create(user, admin_id \\ nil) do
with :ok <- validate_limit(user, admin_id),
{:ok, backup} <- user |> new() |> Repo.insert() do
BackupWorker.process(backup, admin_id)
Admins can directly call new/1 and schedule_backup/1 to bypass the limit.
"""
@spec user(User.t()) :: {:ok, t()} | {:error, any()}
def user(user) do
days = Config.get([__MODULE__, :limit_days])
with true <- permitted?(user),
%__MODULE__{} = backup <- new(user),
{:ok, inserted_backup} <- Repo.insert(backup),
{:ok, %Oban.Job{}} <- schedule_backup(inserted_backup) do
{:ok, inserted_backup}
else
false ->
{:error,
dngettext(
"errors",
"Last export was less than a day ago",
"Last export was less than %{days} days ago",
days,
days: days
)}
e ->
{:error, e}
end
end
@doc "Generates a %Backup{} for a user with a random file name"
@spec new(User.t()) :: t()
def new(user) do
rand_str = :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false)
datetime = Calendar.NaiveDateTime.Format.iso8601_basic(NaiveDateTime.utc_now())
name = "archive-#{user.nickname}-#{datetime}-#{rand_str}.zip"
%__MODULE__{
user_id: user.id,
content_type: "application/zip",
file_name: name,
state: :pending
tempdir: tempdir(),
user: user
}
end
def delete(backup) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
@doc "Schedules the execution of the provided backup"
@spec schedule_backup(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
def schedule_backup(backup) do
with false <- is_nil(backup.id) do
%{"op" => "process", "backup_id" => backup.id}
|> BackupWorker.new()
|> Oban.insert()
else
true ->
{:error, "Backup is missing id. Please insert it into the Repo first."}
e ->
{:error, e}
end
end
@doc "Deletes the backup archive file and removes the database record"
@spec delete_archive(t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()}
def delete_archive(backup) do
uploader = Config.get([Pleroma.Upload, :uploader])
with :ok <- uploader.delete_file(Path.join("backups", backup.file_name)) do
Repo.delete(backup)
end
end
defp validate_limit(_user, admin_id) when is_binary(admin_id), do: :ok
@doc "Schedules a job to delete the backup archive"
@spec schedule_delete(t()) :: {:ok, Oban.Job.t()} | {:error, any()}
def schedule_delete(backup) do
days = Config.get([__MODULE__, :purge_after_days])
time = 60 * 60 * 24 * days
scheduled_at = Calendar.NaiveDateTime.add!(backup.inserted_at, time)
defp validate_limit(user, nil) do
case get_last(user.id) do
%__MODULE__{inserted_at: inserted_at} ->
days = Pleroma.Config.get([__MODULE__, :limit_days])
diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days)
%{"op" => "delete", "backup_id" => backup.id}
|> BackupWorker.new(scheduled_at: scheduled_at)
|> Oban.insert()
end
if diff > days do
:ok
else
{:error,
dngettext(
"errors",
"Last export was less than a day ago",
"Last export was less than %{days} days ago",
days,
days: days
)}
end
nil ->
:ok
defp permitted?(user) do
with {_, %__MODULE__{inserted_at: inserted_at}} <- {:last, get_last(user)},
days = Config.get([__MODULE__, :limit_days]),
diff = Timex.diff(NaiveDateTime.utc_now(), inserted_at, :days),
{_, true} <- {:diff, diff > days} do
true
else
{:last, nil} -> true
{:diff, false} -> false
end
end
def get_last(user_id) do
@doc "Returns last backup for the provided user"
@spec get_last(User.t()) :: t()
def get_last(%User{id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
|> order_by(desc: :id)
@ -101,6 +142,8 @@ defmodule Pleroma.User.Backup do
|> Repo.one()
end
@doc "Lists all existing backups for a user"
@spec list(User.t()) :: [Ecto.Schema.t() | term()]
def list(%User{id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
@ -108,149 +151,107 @@ defmodule Pleroma.User.Backup do
|> Repo.all()
end
def remove_outdated(%__MODULE__{id: latest_id, user_id: user_id}) do
__MODULE__
|> where(user_id: ^user_id)
|> where([b], b.id != ^latest_id)
|> Repo.all()
|> Enum.each(&BackupWorker.delete/1)
@doc "Schedules deletion of all but the the most recent backup"
@spec remove_outdated(User.t()) :: :ok
def remove_outdated(user) do
with %__MODULE__{} = latest_backup <- get_last(user) do
__MODULE__
|> where(user_id: ^user.id)
|> where([b], b.id != ^latest_backup.id)
|> Repo.all()
|> Enum.each(&schedule_delete/1)
else
_ -> :ok
end
end
def get(id), do: Repo.get(__MODULE__, id)
defp set_state(backup, state, processed_number \\ nil) do
struct =
%{state: state}
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
def get_by_id(id), do: Repo.get(__MODULE__, id)
@doc "Generates changeset for %Pleroma.User.Backup{}"
@spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{}
def changeset(backup \\ %__MODULE__{}, attrs) do
backup
|> cast(struct, [:state, :processed_number])
|> cast(attrs, [:content_type, :file_name, :file_size, :processed, :tempdir])
end
@doc "Updates the backup record"
@spec update_record(%__MODULE__{}, map()) :: {:ok, %__MODULE__{}} | {:error, %Ecto.Changeset{}}
def update_record(%__MODULE__{} = backup, attrs) do
backup
|> changeset(attrs)
|> Repo.update()
end
def process(
%__MODULE__{} = backup,
processor_module \\ __MODULE__.Processor
) do
set_state(backup, :running, 0)
current_pid = self()
task =
Task.Supervisor.async_nolink(
Pleroma.TaskSupervisor,
processor_module,
:do_process,
[backup, current_pid]
)
wait_backup(backup, backup.processed_number, task)
end
defp wait_backup(backup, current_processed, task) do
wait_time = @config_impl.get([__MODULE__, :process_wait_time])
receive do
{:progress, new_processed} ->
total_processed = current_processed + new_processed
set_state(backup, :running, total_processed)
wait_backup(backup, total_processed, task)
{:DOWN, _ref, _proc, _pid, reason} ->
backup = get(backup.id)
if reason != :normal do
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :exit,
details: reason
}}
else
{:ok, backup}
end
after
wait_time ->
Logger.error(
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
)
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :timeout
}}
end
end
@files [
'actor.json',
'outbox.json',
'likes.json',
'bookmarks.json',
'followers.json',
'following.json'
~c"actor.json",
~c"outbox.json",
~c"likes.json",
~c"bookmarks.json",
~c"followers.json",
~c"following.json"
]
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
def export(%__MODULE__{} = backup, caller_pid) do
backup = Repo.preload(backup, :user)
dir = backup_tempdir(backup)
with :ok <- File.mkdir(dir),
:ok <- actor(dir, backup.user, caller_pid),
:ok <- statuses(dir, backup.user, caller_pid),
:ok <- likes(dir, backup.user, caller_pid),
:ok <- bookmarks(dir, backup.user, caller_pid),
:ok <- followers(dir, backup.user, caller_pid),
:ok <- following(dir, backup.user, caller_pid),
{:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do
{:ok, zip_path}
@spec run(t()) :: {:ok, t()} | {:error, :failed}
def run(%__MODULE__{} = backup) do
backup = Repo.preload(backup, :user)
tempfile = Path.join([backup.tempdir, backup.file_name])
with {_, :ok} <- {:mkdir, File.mkdir_p(backup.tempdir)},
{_, :ok} <- {:actor, actor(backup.tempdir, backup.user)},
{_, :ok} <- {:statuses, statuses(backup.tempdir, backup.user)},
{_, :ok} <- {:likes, likes(backup.tempdir, backup.user)},
{_, :ok} <- {:bookmarks, bookmarks(backup.tempdir, backup.user)},
{_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
{_, :ok} <- {:following, following(backup.tempdir, backup.user)},
{_, {:ok, _zip_path}} <-
{:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
{_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
{:ok, updated_backup}
else
_ -> :error
_ ->
File.rm_rf(backup.tempdir)
{:error, :failed}
end
end
def dir(name) do
dir = Pleroma.Config.get([__MODULE__, :dir]) || System.tmp_dir!()
Path.join(dir, name)
defp tempdir do
rand = :crypto.strong_rand_bytes(8) |> Base.url_encode64(padding: false)
subdir = "backup-#{rand}"
case Config.get([__MODULE__, :tempdir]) do
nil ->
Path.join([System.tmp_dir!(), subdir])
path ->
Path.join([path, subdir])
end
end
def upload(%__MODULE__{} = backup, zip_path) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
@doc "Uploads the completed backup and marks it as processed"
@spec upload(t()) :: {:ok, t()}
def upload(%__MODULE__{tempdir: tempdir} = backup) when is_binary(tempdir) do
uploader = Config.get([Pleroma.Upload, :uploader])
upload = %Pleroma.Upload{
name: backup.file_name,
tempfile: zip_path,
tempfile: Path.join([tempdir, backup.file_name]),
content_type: backup.content_type,
path: Path.join("backups", backup.file_name)
}
with {:ok, _} <- Pleroma.Uploaders.Uploader.put_file(uploader, upload),
:ok <- File.rm(zip_path) do
{:ok, upload}
with {:ok, _} <- Uploader.put_file(uploader, upload),
{:ok, uploaded_backup} <- update_record(backup, %{processed: true}),
{:ok, _} <- File.rm_rf(tempdir) do
{:ok, uploaded_backup}
end
end
defp actor(dir, user, caller_pid) do
defp actor(dir, user) do
with {:ok, json} <-
UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do
send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json)
end
end
@ -269,22 +270,10 @@ defmodule Pleroma.User.Backup do
)
end
defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
defp backup_tempdir(backup) do
name = String.trim_trailing(backup.file_name, ".zip")
dir(name)
end
defp cleanup(backup) do
dir = backup_tempdir(backup)
File.rm_rf(dir)
end
defp write(query, dir, name, fun, caller_pid) do
defp write(query, dir, name, fun) do
path = Path.join(dir, "#{name}.json")
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
chunk_size = Config.get([__MODULE__, :process_chunk_size])
with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do
@ -300,10 +289,6 @@ defmodule Pleroma.User.Backup do
end),
{:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do
if should_report?(acc + 1, chunk_size) do
send(caller_pid, {:progress, chunk_size})
end
acc + 1
else
{:error, e} ->
@ -318,31 +303,29 @@ defmodule Pleroma.User.Backup do
end
end)
send(caller_pid, {:progress, rem(total, chunk_size)})
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file)
end
end
end
defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
defp bookmarks(dir, %{id: user_id} = _user) do
Bookmark
|> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
end
defp likes(dir, user, caller_pid) do
defp likes(dir, user) do
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
|> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
|> write(dir, "likes", fn a -> {:ok, a.object} end)
end
defp statuses(dir, user, caller_pid) do
defp statuses(dir, user) do
opts =
%{}
|> Map.put(:type, ["Create", "Announce"])
@ -362,52 +345,17 @@ defmodule Pleroma.User.Backup do
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
end,
caller_pid
end
)
end
defp followers(dir, user, caller_pid) do
defp followers(dir, user) do
User.get_followers_query(user)
|> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid)
|> write(dir, "followers", fn a -> {:ok, a.ap_id} end)
end
defp following(dir, user, caller_pid) do
defp following(dir, user) do
User.get_friends_query(user)
|> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid)
end
end
defmodule Pleroma.User.Backup.ProcessorAPI do
@callback do_process(%Pleroma.User.Backup{}, pid()) ::
{:ok, %Pleroma.User.Backup{}} | {:error, any()}
end
defmodule Pleroma.User.Backup.Processor do
@behaviour Pleroma.User.Backup.ProcessorAPI
alias Pleroma.Repo
alias Pleroma.User.Backup
import Ecto.Changeset
@impl true
def do_process(backup, current_pid) do
with {:ok, zip_file} <- Backup.export(backup, current_pid),
{:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- Backup.upload(backup, zip_file) do
backup
|> cast(
%{
file_size: size,
processed: true,
state: :complete
},
[:file_size, :processed, :state]
)
|> Repo.update()
else
e -> {:error, e}
end
|> write(dir, "following", fn a -> {:ok, a.ap_id} end)
end
end

View file

@ -31,7 +31,7 @@ defmodule Pleroma.User.Import do
identifiers,
fn identifier ->
with {:ok, %User{} = blocked} <- User.get_or_fetch(identifier),
{:ok, _block} <- CommonAPI.block(blocker, blocked) do
{:ok, _block} <- CommonAPI.block(blocked, blocker) do
blocked
else
error -> handle_error(:blocks_import, identifier, error)
@ -46,7 +46,7 @@ defmodule Pleroma.User.Import do
fn identifier ->
with {:ok, %User{} = followed} <- User.get_or_fetch(identifier),
{:ok, follower, followed} <- User.maybe_direct_follow(follower, followed),
{:ok, _, _, _} <- CommonAPI.follow(follower, followed) do
{:ok, _, _, _} <- CommonAPI.follow(followed, follower) do
followed
else
error -> handle_error(:follow_import, identifier, error)

View file

@ -163,7 +163,7 @@ defmodule Pleroma.Web do
"""
def safe_render_many(collection, view, template, assigns \\ %{}) do
Enum.map(collection, fn resource ->
as = Map.get(assigns, :as) || view.__resource__
as = Map.get(assigns, :as) || view.__resource__()
assigns = Map.put(assigns, as, resource)
safe_render(view, template, assigns)
end)

View file

@ -201,7 +201,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
def notify_and_stream(activity) do
{:ok, notifications} = Notification.create_notifications(activity)
Notification.send(notifications)
Notification.stream(notifications)
original_activity =
case activity do
@ -979,8 +979,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp restrict_replies(query, %{exclude_replies: true}) do
from(
[_activity, object] in query,
where: fragment("?->>'inReplyTo' is null", object.data)
[activity, object] in query,
where:
fragment("?->>'inReplyTo' is null or ?->>'type' = 'Announce'", object.data, activity.data)
)
end
@ -1660,7 +1661,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
}}
else
{:error, _} = e -> e
e -> {:error, e}
end
end
@ -1793,24 +1793,25 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
def pinned_fetch_task(nil), do: nil
def pinned_fetch_task(%{pinned_objects: pins}) do
if Enum.all?(pins, fn {ap_id, _} ->
Object.get_cached_by_ap_id(ap_id) ||
match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id))
end) do
:ok
else
:error
end
def enqueue_pin_fetches(%{pinned_objects: pins}) do
# enqueue a task to fetch all pinned objects
Enum.each(pins, fn {ap_id, _} ->
if is_nil(Object.get_cached_by_ap_id(ap_id)) do
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
"id" => ap_id,
"depth" => 1
})
end
end)
end
def enqueue_pin_fetches(_), do: nil
def make_user_from_ap_id(ap_id, additional \\ []) do
user = User.get_cached_by_ap_id(ap_id)
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
enqueue_pin_fetches(data)
if user do
user

View file

@ -52,6 +52,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
when action in [:activity, :object]
)
plug(:log_inbox_metadata when action in [:inbox])
plug(:set_requester_reachable when action in [:inbox])
plug(:relay_active? when action in [:relay])
@ -292,8 +293,15 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
json(conn, "ok")
end
def inbox(%{assigns: %{valid_signature: false}, req_headers: req_headers} = conn, params) do
Federator.incoming_ap_doc(%{req_headers: req_headers, params: params})
def inbox(%{assigns: %{valid_signature: false}} = conn, params) do
Federator.incoming_ap_doc(%{
method: conn.method,
req_headers: conn.req_headers,
request_path: conn.request_path,
params: params,
query_string: conn.query_string
})
json(conn, "ok")
end
@ -521,6 +529,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
conn
end
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
Logger.metadata(actor: actor, type: type)
conn
end
defp log_inbox_metadata(conn, _), do: conn
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <-
ActivityPub.upload(

View file

@ -204,7 +204,7 @@ defmodule Pleroma.Web.ActivityPub.MRF do
if function_exported?(policy, :config_description, 0) do
description =
@default_description
|> Map.merge(policy.config_description)
|> Map.merge(policy.config_description())
|> Map.put(:group, :pleroma)
|> Map.put(:tab, :mrf)
|> Map.put(:type, :group)

View file

@ -0,0 +1,87 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiMentionSpamPolicy do
alias Pleroma.Config
alias Pleroma.User
require Pleroma.Constants
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp user_has_posted?(%User{} = u), do: u.note_count > 0
defp user_has_age?(%User{} = u) do
user_age_limit = Config.get([:mrf_antimentionspam, :user_age_limit], 30_000)
diff = NaiveDateTime.utc_now() |> NaiveDateTime.diff(u.inserted_at, :millisecond)
diff >= user_age_limit
end
defp good_reputation?(%User{} = u) do
user_has_age?(u) and user_has_posted?(u)
end
# copied from HellthreadPolicy
defp get_recipient_count(message) do
recipients = (message["to"] || []) ++ (message["cc"] || [])
follower_collection =
User.get_cached_by_ap_id(message["actor"] || message["attributedTo"]).follower_address
if Enum.member?(recipients, Pleroma.Constants.as_public()) do
recipients =
recipients
|> List.delete(Pleroma.Constants.as_public())
|> List.delete(follower_collection)
{:public, length(recipients)}
else
recipients =
recipients
|> List.delete(follower_collection)
{:not_public, length(recipients)}
end
end
defp object_has_recipients?(%{"object" => object} = activity) do
{_, object_count} = get_recipient_count(object)
{_, activity_count} = get_recipient_count(activity)
object_count + activity_count > 0
end
defp object_has_recipients?(object) do
{_, count} = get_recipient_count(object)
count > 0
end
@impl true
def filter(%{"type" => "Create", "actor" => actor} = activity) do
with {:ok, %User{local: false} = u} <- User.get_or_fetch_by_ap_id(actor),
{:has_mentions, true} <- {:has_mentions, object_has_recipients?(activity)},
{:good_reputation, true} <- {:good_reputation, good_reputation?(u)} do
{:ok, activity}
else
{:ok, %User{local: true}} ->
{:ok, activity}
{:has_mentions, false} ->
{:ok, activity}
{:good_reputation, false} ->
{:reject, "[AntiMentionSpamPolicy] User rejected"}
{:error, _} ->
{:reject, "[AntiMentionSpamPolicy] Failed to get or fetch user by ap_id"}
e ->
{:reject, "[AntiMentionSpamPolicy] Unhandled error #{inspect(e)}"}
end
end
# in all other cases, pass through
def filter(message), do: {:ok, message}
@impl true
def describe, do: {:ok, %{}}
end

View file

@ -0,0 +1,146 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
@moduledoc """
Dynamic activity filtering based on an RBL database
This MRF makes queries to a custom DNS server which will
respond with values indicating the classification of the domain
the activity originated from. This method has been widely used
in the email anti-spam industry for very fast reputation checks.
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
Other values such as 127.0.0.2 may be used for specific classifications.
Information for why the host is blocked can be stored in a corresponding TXT record.
This method is fail-open so if the queries fail the activites are accepted.
An example of software meant for this purpsoe is rbldnsd which can be found
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
https://git.pleroma.social/feld/rbldnsd
It is highly recommended that you run your own copy of rbldnsd and use an
external mechanism to sync/share the contents of the zone file. This is
important to keep the latency on the queries as low as possible and prevent
your DNS server from being attacked so it fails and content is permitted.
"""
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Config
require Logger
@query_retries 1
@query_timeout 500
@impl true
def filter(%{"actor" => actor} = object) do
actor_info = URI.parse(actor)
with {:ok, object} <- check_rbl(actor_info, object) do
{:ok, object}
else
_ -> {:reject, "[DNSRBLPolicy]"}
end
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe do
mrf_dnsrbl =
Config.get(:mrf_dnsrbl)
|> Enum.into(%{})
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
end
@impl true
def config_description do
%{
key: :mrf_dnsrbl,
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
label: "MRF DNSRBL",
description: "DNS RealTime Blackhole Policy",
children: [
%{
key: :nameserver,
type: {:string},
description: "DNSRBL Nameserver to Query (IP or hostame)",
suggestions: ["127.0.0.1"]
},
%{
key: :port,
type: {:string},
description: "Nameserver port",
suggestions: ["53"]
},
%{
key: :zone,
type: {:string},
description: "Root zone for querying",
suggestions: ["bl.pleroma.com"]
}
]
}
end
defp check_rbl(%{host: actor_host}, object) do
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
query =
Enum.join([actor_host, zone], ".")
|> String.to_charlist()
rbl_response = rblquery(query)
if Enum.empty?(rbl_response) do
{:ok, object}
else
Task.start(fn ->
reason =
case rblquery(query, :txt) do
[[result]] -> result
_ -> "undefined"
end
Logger.warning(
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
)
end)
:error
end
else
_ -> {:ok, object}
end
end
defp get_rblhost_ip(rblhost) do
case rblhost |> String.to_charlist() |> :inet_parse.address() do
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
end
end
defp rblquery(query, type \\ :a) do
config = Config.get([:mrf_dnsrbl])
case get_rblhost_ip(config[:nameserver]) do
{:ok, rblnsip} ->
:inet_res.lookup(query, :in, type,
nameservers: [{rblnsip, config[:port]}],
timeout: @query_timeout,
retry: @query_retries
)
_ ->
[]
end
end
end

View file

@ -49,7 +49,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do
"#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}"
)
CommonAPI.follow(follower, user)
CommonAPI.follow(user, follower)
end
end)

View file

@ -11,11 +11,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
@adapter_options [
pool: :media,
recv_timeout: 10_000
]
@impl true
def history_awareness, do: :auto
@ -27,17 +22,14 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
Logger.debug("Prefetching #{inspect(url)} as #{inspect(prefetch_url)}")
if Pleroma.Config.get(:env) == :test do
fetch(prefetch_url)
else
ConcurrentLimiter.limit(__MODULE__, fn ->
Task.start(fn -> fetch(prefetch_url) end)
end)
end
fetch(prefetch_url)
end
end
defp fetch(url), do: HTTP.get(url, [], @adapter_options)
defp fetch(url) do
http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
HTTP.get(url, [], http_client_opts)
end
defp preload(%{"object" => %{"attachment" => attachments}} = _message) do
Enum.each(attachments, fn

View file

@ -0,0 +1,264 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do
@moduledoc """
Hide, delete, or mark sensitive NSFW content with artificial intelligence.
Requires a NSFW API server, configured like so:
config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF,
url: "http://127.0.0.1:5000/",
threshold: 0.7,
mark_sensitive: true,
unlist: false,
reject: false
The NSFW API server must implement an HTTP endpoint like this:
curl http://localhost:5000/?url=https://fedi.com/images/001.jpg
Returning a response like this:
{"score", 0.314}
Where a score is 0-1, with `1` being definitely NSFW.
A good API server is here: https://github.com/EugenCepoi/nsfw_api
You can run it with Docker with a one-liner:
docker run -it -p 127.0.0.1:5000:5000/tcp --env PORT=5000 eugencepoi/nsfw_api:latest
Options:
- `url`: Base URL of the API server. Default: "http://127.0.0.1:5000/"
- `threshold`: Lowest score to take action on. Default: `0.7`
- `mark_sensitive`: Mark sensitive all detected NSFW content? Default: `true`
- `unlist`: Unlist all detected NSFW content? Default: `false`
- `reject`: Reject all detected NSFW content (takes precedence)? Default: `false`
"""
alias Pleroma.Config
alias Pleroma.Constants
alias Pleroma.HTTP
alias Pleroma.User
require Logger
require Pleroma.Constants
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@policy :mrf_nsfw_api
def build_request_url(url) do
Config.get([@policy, :url])
|> URI.parse()
|> fix_path()
|> Map.put(:query, "url=#{url}")
|> URI.to_string()
end
def parse_url(url) do
request = build_request_url(url)
with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do
Jason.decode(body)
else
error ->
Logger.warning("""
[NsfwApiPolicy]: The API server failed. Skipping.
#{inspect(error)}
""")
error
end
end
def check_url_nsfw(url) when is_binary(url) do
threshold = Config.get([@policy, :threshold])
case parse_url(url) do
{:ok, %{"score" => score}} when score >= threshold ->
{:nsfw, %{url: url, score: score, threshold: threshold}}
{:ok, %{"score" => score}} ->
{:sfw, %{url: url, score: score, threshold: threshold}}
_ ->
{:sfw, %{url: url, score: nil, threshold: threshold}}
end
end
def check_url_nsfw(%{"href" => url}) when is_binary(url) do
check_url_nsfw(url)
end
def check_url_nsfw(url) do
threshold = Config.get([@policy, :threshold])
{:sfw, %{url: url, score: nil, threshold: threshold}}
end
def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do
if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do
{:sfw, attachment}
else
{:nsfw, attachment}
end
end
def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do
case check_url_nsfw(url) do
{:sfw, _} -> {:sfw, attachment}
{:nsfw, _} -> {:nsfw, attachment}
end
end
def check_attachment_nsfw(attachment), do: {:sfw, attachment}
def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do
if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do
{:sfw, object}
else
{:nsfw, object}
end
end
def check_object_nsfw(%{"object" => %{} = child_object} = object) do
case check_object_nsfw(child_object) do
{:sfw, _} -> {:sfw, object}
{:nsfw, _} -> {:nsfw, object}
end
end
def check_object_nsfw(object), do: {:sfw, object}
@impl true
def filter(object) do
with {:sfw, object} <- check_object_nsfw(object) do
{:ok, object}
else
{:nsfw, _data} -> handle_nsfw(object)
end
end
defp handle_nsfw(object) do
if Config.get([@policy, :reject]) do
{:reject, object}
else
{:ok,
object
|> maybe_unlist()
|> maybe_mark_sensitive()}
end
end
defp maybe_unlist(object) do
if Config.get([@policy, :unlist]) do
unlist(object)
else
object
end
end
defp maybe_mark_sensitive(object) do
if Config.get([@policy, :mark_sensitive]) do
mark_sensitive(object)
else
object
end
end
def unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do
with %User{} = user <- User.get_cached_by_ap_id(actor) do
to =
[user.follower_address | to]
|> List.delete(Constants.as_public())
|> Enum.uniq()
cc =
[Constants.as_public() | cc]
|> List.delete(user.follower_address)
|> Enum.uniq()
object
|> Map.put("to", to)
|> Map.put("cc", cc)
else
_ -> raise "[NsfwApiPolicy]: Could not find user #{actor}"
end
end
def mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do
Map.put(object, "object", mark_sensitive(child_object))
end
def mark_sensitive(object) when is_map(object) do
tags = (object["tag"] || []) ++ ["nsfw"]
object
|> Map.put("tag", tags)
|> Map.put("sensitive", true)
end
# Hackney needs a trailing slash
defp fix_path(%URI{path: path} = uri) when is_binary(path) do
path = String.trim_trailing(path, "/") <> "/"
Map.put(uri, :path, path)
end
defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/")
@impl true
def describe do
options = %{
threshold: Config.get([@policy, :threshold]),
mark_sensitive: Config.get([@policy, :mark_sensitive]),
unlist: Config.get([@policy, :unlist]),
reject: Config.get([@policy, :reject])
}
{:ok, %{@policy => options}}
end
@impl true
def config_description do
%{
key: @policy,
related_policy: to_string(__MODULE__),
label: "NSFW API Policy",
description:
"Hide, delete, or mark sensitive NSFW content with artificial intelligence. Requires running an external API server.",
children: [
%{
key: :url,
type: :string,
description: "Base URL of the API server.",
suggestions: ["http://127.0.0.1:5000/"]
},
%{
key: :threshold,
type: :float,
description: "Lowest score to take action on. Between 0 and 1.",
suggestions: [0.7]
},
%{
key: :mark_sensitive,
type: :boolean,
description: "Mark sensitive all detected NSFW content?",
suggestions: [true]
},
%{
key: :unlist,
type: :boolean,
description: "Unlist sensitive all detected NSFW content?",
suggestions: [false]
},
%{
key: :reject,
type: :boolean,
description: "Reject sensitive all detected NSFW content (takes precedence)?",
suggestions: [false]
}
]
}
end
end

View file

@ -220,9 +220,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
{:ok, object} <- check_object(object) do
{:ok, object}
else
{:reject, nil} -> {:reject, "[SimplePolicy]"}
{:reject, _} = e -> e
_ -> {:reject, "[SimplePolicy]"}
end
end
@ -236,9 +234,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
{:ok, object} <- check_banner_removal(actor_info, object) do
{:ok, object}
else
{:reject, nil} -> {:reject, "[SimplePolicy]"}
{:reject, _} = e -> e
_ -> {:reject, "[SimplePolicy]"}
end
end
@ -249,9 +245,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
{:ok, object} <- check_reject(uri, object) do
{:ok, object}
else
{:reject, nil} -> {:reject, "[SimplePolicy]"}
{:reject, _} = e -> e
_ -> {:reject, "[SimplePolicy]"}
end
end

View file

@ -31,7 +31,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.VocabularyPolicy do
{:reject, _} = e -> e
{:accepted, _} -> {:reject, "[VocabularyPolicy] #{message_type} not in accept list"}
{:rejected, _} -> {:reject, "[VocabularyPolicy] #{message_type} in reject list"}
_ -> {:reject, "[VocabularyPolicy]"}
end
end

View file

@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
field(:type, :string, default: "Link")
field(:mediaType, ObjectValidators.MIME, default: "application/octet-stream")
field(:name, :string)
field(:summary, :string)
field(:blurhash, :string)
embeds_many :url, UrlObjectValidator, primary_key: false do
@ -44,7 +45,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|> fix_url()
struct
|> cast(data, [:id, :type, :mediaType, :name, :blurhash])
|> cast(data, [:id, :type, :mediaType, :name, :summary, :blurhash])
|> cast_embed(:url, with: &url_changeset/2, required: true)
|> validate_inclusion(:type, ~w[Link Document Audio Image Video])
|> validate_required([:type, :mediaType])

View file

@ -23,7 +23,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do
defp config, do: Config.get([:pipeline, :config], Config)
@spec common_pipeline(map(), keyword()) ::
{:ok, Activity.t() | Object.t(), keyword()} | {:error, any()}
{:ok, Activity.t() | Object.t(), keyword()} | {:error | :reject, any()}
def common_pipeline(object, meta) do
case Repo.transaction(fn -> do_common_pipeline(object, meta) end, Utils.query_timeout()) do
{:ok, {:ok, activity, meta}} ->

View file

@ -80,13 +80,26 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
parameters set:
* `inbox`: the inbox to publish to
* `json`: the JSON message body representing the ActivityPub message
* `actor`: the actor which is signing the message
* `id`: the ActivityStreams URI of the message
* `activity_id`: the internal activity id
* `cc`: the cc recipients relevant to this inbox (optional)
"""
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
Logger.debug("Federating #{id} to #{inbox}")
def publish_one(%{inbox: inbox, activity_id: activity_id} = params) do
activity = Activity.get_by_id_with_user_actor(activity_id)
actor = activity.user_actor
ap_id = activity.data["id"]
Logger.debug("Federating #{ap_id} to #{inbox}")
uri = %{path: path} = URI.parse(inbox)
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
cc = Map.get(params, :cc)
json =
data
|> Map.put("cc", cc)
|> Jason.encode!()
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
date = Pleroma.Signature.signed_date()
@ -119,36 +132,34 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
else
{_post_result, %{status: code} = response} = e ->
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
Logger.metadata(activity: id, inbox: inbox, status: code)
Logger.metadata(activity: activity_id, inbox: inbox, status: code)
Logger.error("Publisher failed to inbox #{inbox} with status #{code}")
case response do
%{status: 403} -> {:discard, :forbidden}
%{status: 404} -> {:discard, :not_found}
%{status: 410} -> {:discard, :not_found}
%{status: 400} -> {:cancel, :bad_request}
%{status: 403} -> {:cancel, :forbidden}
%{status: 404} -> {:cancel, :not_found}
%{status: 410} -> {:cancel, :not_found}
_ -> {:error, e}
end
{:error, {:already_started, _}} ->
Logger.debug("Publisher snoozing worker job due worker :already_started race condition")
connection_pool_snooze()
{:error, :pool_full} ->
Logger.debug("Publisher snoozing worker job due to full connection pool")
{:snooze, 30}
connection_pool_snooze()
e ->
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
Logger.metadata(activity: id, inbox: inbox)
Logger.metadata(activity: activity_id, inbox: inbox)
Logger.error("Publisher failed to inbox #{inbox} #{inspect(e)}")
{:error, e}
end
end
def publish_one(%{actor_id: actor_id} = params) do
actor = User.get_cached_by_id(actor_id)
params
|> Map.delete(:actor_id)
|> Map.put(:actor, actor)
|> publish_one()
end
defp connection_pool_snooze, do: {:snooze, 3}
defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
if port == URI.default_port(scheme) do
@ -250,7 +261,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
when is_list(bcc) and bcc != [] do
public = public?(activity)
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
[priority_recipients, recipients] = recipients(actor, activity)
@ -275,16 +285,10 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
# instance would only accept a first message for the first recipient and ignore the rest.
cc = get_cc_ap_ids(ap_id, recipients)
json =
data
|> Map.put("cc", cc)
|> Jason.encode!()
__MODULE__.enqueue_one(%{
inbox: inbox,
json: json,
actor_id: actor.id,
id: activity.data["id"],
cc: cc,
activity_id: activity.id,
unreachable_since: unreachable_since
})
end)
@ -301,9 +305,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
Relay.publish(activity)
end
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
json = Jason.encode!(data)
[priority_inboxes, inboxes] =
recipients(actor, activity)
|> Enum.map(fn recipients ->
@ -325,9 +326,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
__MODULE__.enqueue_one(
%{
inbox: inbox,
json: json,
actor_id: actor.id,
id: activity.data["id"],
activity_id: activity.id,
unreachable_since: unreachable_since
},
priority: priority

View file

@ -22,7 +22,7 @@ defmodule Pleroma.Web.ActivityPub.Relay do
def follow(target_instance) do
with %User{} = local_user <- get_actor(),
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_instance),
{:ok, _, _, activity} <- CommonAPI.follow(local_user, target_user) do
{:ok, _, _, activity} <- CommonAPI.follow(target_user, local_user) do
Logger.info("relay: followed instance: #{target_instance}; id=#{activity.data["id"]}")
{:ok, activity}
else

View file

@ -453,7 +453,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
) do
orig_object_ap_id = updated_object["id"]
orig_object = Object.get_by_ap_id(orig_object_ap_id)
orig_object_data = orig_object.data
orig_object_data = Map.get(orig_object, :data)
updated_object =
if meta[:local] do
@ -592,9 +592,9 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
with {:ok, _} <- Repo.delete(object), do: :ok
end
defp send_notifications(meta) do
defp stream_notifications(meta) do
Keyword.get(meta, :notifications, [])
|> Notification.send()
|> Notification.stream()
meta
end
@ -625,7 +625,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
@impl true
def handle_after_transaction(meta) do
meta
|> send_notifications()
|> stream_notifications()
|> send_streamables()
end
end

View file

@ -530,6 +530,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
else
_ -> e
end
e ->
{:error, e}
end
end
@ -913,9 +916,11 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
def add_emoji_tags(object), do: object
defp build_emoji_tag({name, url}) do
def build_emoji_tag({name, url}) do
url = URI.encode(url)
%{
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
"icon" => %{"url" => "#{url}", "type" => "Image"},
"name" => ":" <> name <> ":",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z",

View file

@ -939,26 +939,14 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|> Repo.all()
end
@spec maybe_handle_group_posts(Activity.t()) :: :ok
@doc "Automatically repeats posts for local group actor recipients"
def maybe_handle_group_posts(activity) do
poster = User.get_cached_by_ap_id(activity.actor)
mentions =
activity.data["to"]
|> Enum.filter(&(&1 != activity.actor))
mentioned_local_groups =
User.get_all_by_ap_id(mentions)
|> Enum.filter(fn user ->
user.actor_type == "Group" and
user.local and
not User.blocks?(user, poster)
end)
mentioned_local_groups
|> Enum.each(fn group ->
Pleroma.Web.CommonAPI.repeat(activity.id, group)
end)
:ok
User.get_recipients_from_activity(activity)
|> Enum.filter(&match?("Group", &1.actor_type))
|> Enum.reject(&User.blocks?(&1, poster))
|> Enum.each(&Pleroma.Web.CommonAPI.repeat(activity.id, &1))
end
end

View file

@ -13,6 +13,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
alias Pleroma.ModerationLog
alias Pleroma.Stats
alias Pleroma.User
alias Pleroma.User.Backup
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.AdminAPI
alias Pleroma.Web.AdminAPI.AccountView
@ -429,7 +430,9 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
def create_backup(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_by_nickname(nickname),
{:ok, _} <- Pleroma.User.Backup.create(user, admin.id) do
%Backup{} = backup <- Backup.new(user),
{:ok, inserted_backup} <- Pleroma.Repo.insert(backup),
{:ok, %Oban.Job{}} <- Backup.schedule_backup(inserted_backup) do
ModerationLog.insert_log(%{actor: admin, subject: user, action: "create_backup"})
json(conn, "")

View file

@ -46,7 +46,6 @@ defmodule Pleroma.Web.AdminAPI.InviteController do
render(conn, "show.json", invite: updated_invite)
else
nil -> {:error, :not_found}
error -> error
end
end

View file

@ -24,7 +24,7 @@ defmodule Pleroma.Web.AdminAPI.RuleController do
plug(OAuthScopesPlug, %{scopes: ["admin:read"]} when action == :index)
action_fallback(AdminAPI.FallbackController)
action_fallback(Pleroma.Web.AdminAPI.FallbackController)
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.RuleOperation

View file

@ -18,6 +18,8 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do
alias OpenApiSpex.Plug.PutApiSpec
alias Plug.Conn
require Logger
@impl Plug
def init(opts) do
opts
@ -51,6 +53,10 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do
conn
{:error, reason} ->
Logger.error(
"Strict ApiSpec: request denied to #{conn.request_path} with params #{inspect(conn.params)}"
)
opts = render_error.init(reason)
conn

View file

@ -203,7 +203,10 @@ defmodule Pleroma.Web.ApiSpec.NotificationOperation do
"move",
"follow_request",
"poll",
"status"
"status",
"update",
"admin.sign_up",
"admin.report"
],
description: """
The type of event that resulted in the notification.
@ -218,6 +221,9 @@ defmodule Pleroma.Web.ApiSpec.NotificationOperation do
- `pleroma:chat_mention` - Someone mentioned you in a chat message
- `pleroma:report` - Someone was reported
- `status` - Someone you are subscribed to created a status
- `update` - A status you boosted has been edited
- `admin.sign_up` - Someone signed up (optionally sent to admins)
- `admin.report` - A new report has been filed
"""
}
end

View file

@ -65,12 +65,7 @@ defmodule Pleroma.Web.ApiSpec.PleromaBackupOperation do
file_name: %Schema{type: :string},
file_size: %Schema{type: :integer},
processed: %Schema{type: :boolean, description: "whether this backup has succeeded"},
state: %Schema{
type: :string,
description: "the state of the backup",
enum: ["pending", "running", "complete", "failed"]
},
processed_number: %Schema{type: :integer, description: "the number of records processed"}
tempdir: %Schema{type: :string}
},
example: %{
"content_type" => "application/zip",
@ -79,8 +74,7 @@ defmodule Pleroma.Web.ApiSpec.PleromaBackupOperation do
"file_size" => 4105,
"inserted_at" => "2020-09-08T16:42:07.000Z",
"processed" => true,
"state" => "complete",
"processed_number" => 20
"tempdir" => "/tmp/PZIMw40vmpM"
}
}
end

View file

@ -79,7 +79,9 @@ defmodule Pleroma.Web.ApiSpec.SearchOperation do
%Schema{type: :string, enum: ["accounts", "hashtags", "statuses"]},
"Search type"
),
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query", required: true),
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query",
required: true
),
Operation.parameter(
:resolve,
:query,

View file

@ -31,11 +31,17 @@ defmodule Pleroma.Web.ApiSpec.StatusOperation do
security: [%{"oAuth" => ["read:statuses"]}],
parameters: [
Operation.parameter(
:ids,
:id,
:query,
%Schema{type: :array, items: FlakeID},
"Array of status IDs"
),
Operation.parameter(
:ids,
:query,
%Schema{type: :array, items: FlakeID},
"Deprecated, use `id` instead"
),
Operation.parameter(
:with_muted,
:query,

View file

@ -139,7 +139,7 @@ defmodule Pleroma.Web.ApiSpec.StreamingOperation do
end
defp get_schema(%Schema{} = schema), do: schema
defp get_schema(schema), do: schema.schema
defp get_schema(schema), do: schema.schema()
defp server_sent_event_helper(name, description, type, payload, opts \\ []) do
payload_type = Keyword.get(opts, :payload_type, :json)

View file

@ -50,7 +50,11 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Attachment do
pleroma: %Schema{
type: :object,
properties: %{
mime_type: %Schema{type: :string, description: "mime type of the attachment"}
mime_type: %Schema{type: :string, description: "mime type of the attachment"},
name: %Schema{
type: :string,
description: "Name of the attachment, typically the filename"
}
}
}
},

View file

@ -68,7 +68,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Chat do
},
"id" => "1",
"unread" => 2,
"last_message" => ChatMessage.schema().example(),
"last_message" => ChatMessage.schema().example,
"updated_at" => "2020-04-21T15:06:45.000Z"
}
})

View file

@ -91,7 +91,8 @@ defmodule Pleroma.Web.Auth.LDAPAuthenticator do
end
error ->
error
Logger.error("Could not bind LDAP user #{name}: #{inspect(error)}")
{:error, {:ldap_bind_error, error}}
end
end
@ -102,28 +103,38 @@ defmodule Pleroma.Web.Auth.LDAPAuthenticator do
{:scope, :eldap.wholeSubtree()},
{:timeout, @search_timeout}
]) do
{:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _}} ->
params = %{
name: name,
nickname: name,
password: nil
}
# The :eldap_search_result record structure changed in OTP 24.3 and added a controls field
# https://github.com/erlang/otp/pull/5538
{:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals}} ->
try_register(name, attributes)
params =
case List.keyfind(attributes, 'mail', 0) do
{_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail))
_ -> params
end
changeset = User.register_changeset_ldap(%User{}, params)
case User.register(changeset) do
{:ok, user} -> user
error -> error
end
{:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals, _controls}} ->
try_register(name, attributes)
error ->
error
Logger.error("Couldn't register user because LDAP search failed: #{inspect(error)}")
{:error, {:ldap_search_error, error}}
end
end
defp try_register(name, attributes) do
params = %{
name: name,
nickname: name,
password: nil
}
params =
case List.keyfind(attributes, ~c"mail", 0) do
{_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail))
_ -> params
end
changeset = User.register_changeset_ldap(%User{}, params)
case User.register(changeset) do
{:ok, user} -> user
error -> error
end
end
end

View file

@ -19,19 +19,23 @@ defmodule Pleroma.Web.CommonAPI do
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft
import Ecto.Query, only: [where: 3]
import Pleroma.Web.Gettext
import Pleroma.Web.CommonAPI.Utils
require Pleroma.Constants
require Logger
def block(blocker, blocked) do
@spec block(User.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def block(blocked, blocker) do
with {:ok, block_data, _} <- Builder.block(blocker, blocked),
{:ok, block, _} <- Pipeline.common_pipeline(block_data, local: true) do
{:ok, block}
end
end
@spec post_chat_message(User.t(), User.t(), String.t(), list()) ::
{:ok, Activity.t()} | {:error, any()}
def post_chat_message(%User{} = user, %User{} = recipient, content, opts \\ []) do
with maybe_attachment <- opts[:media_id] && Object.get_by_id(opts[:media_id]),
:ok <- validate_chat_attachment_attribution(maybe_attachment, user),
@ -95,7 +99,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def unblock(blocker, blocked) do
@spec unblock(User.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def unblock(blocked, blocker) do
with {_, %Activity{} = block} <- {:fetch_block, Utils.fetch_latest_block(blocker, blocked)},
{:ok, unblock_data, _} <- Builder.undo(blocker, block),
{:ok, unblock, _} <- Pipeline.common_pipeline(unblock_data, local: true) do
@ -114,7 +119,9 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def follow(follower, followed) do
@spec follow(User.t(), User.t()) ::
{:ok, User.t(), User.t(), Activity.t() | Object.t()} | {:error, :rejected}
def follow(followed, follower) do
timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout])
with {:ok, follow_data, _} <- Builder.follow(follower, followed),
@ -128,7 +135,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def unfollow(follower, unfollowed) do
@spec unfollow(User.t(), User.t()) :: {:ok, User.t()} | {:error, any()}
def unfollow(unfollowed, follower) do
with {:ok, follower, _follow_activity} <- User.unfollow(follower, unfollowed),
{:ok, _activity} <- ActivityPub.unfollow(follower, unfollowed),
{:ok, _subscription} <- User.unsubscribe(follower, unfollowed),
@ -137,6 +145,7 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec accept_follow_request(User.t(), User.t()) :: {:ok, User.t()} | {:error, any()}
def accept_follow_request(follower, followed) do
with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, accept_data, _} <- Builder.accept(followed, follow_activity),
@ -145,6 +154,7 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec reject_follow_request(User.t(), User.t()) :: {:ok, User.t()} | {:error, any()} | nil
def reject_follow_request(follower, followed) do
with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, reject_data, _} <- Builder.reject(followed, follow_activity),
@ -153,9 +163,11 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec delete(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def delete(activity_id, user) do
with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(activity_id, filter: [])},
{_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(activity)},
{_, %Object{} = object, _} <-
{:find_object, Object.normalize(activity, fetch: false), activity},
true <- User.privileged?(user, :messages_delete) || user.ap_id == object.data["actor"],
@ -201,6 +213,7 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec repeat(String.t(), User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def repeat(id, user, params \\ %{}) do
with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id),
object = %Object{} <- Object.normalize(activity, fetch: false),
@ -218,11 +231,13 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec unrepeat(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def unrepeat(id, user) do
with {_, %Activity{data: %{"type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note),
{_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(announce)},
{:ok, undo, _} <- Builder.undo(user, announce),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@ -232,8 +247,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec favorite(User.t(), binary()) :: {:ok, Activity.t() | :already_liked} | {:error, any()}
def favorite(%User{} = user, id) do
@spec favorite(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def favorite(id, %User{} = user) do
case favorite_helper(user, id) do
{:ok, _} = res ->
res
@ -247,7 +262,7 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def favorite_helper(user, id) do
defp favorite_helper(user, id) do
with {_, %Activity{object: object}} <- {:find_object, Activity.get_by_id_with_object(id)},
{_, {:ok, like_object, meta}} <- {:build_object, Builder.like(user, object)},
{_, {:ok, %Activity{} = activity, _meta}} <-
@ -270,11 +285,13 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec unfavorite(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def unfavorite(id, user) do
with {_, %Activity{data: %{"type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = like <- Utils.get_existing_like(user.ap_id, note),
{_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(like)},
{:ok, undo, _} <- Builder.undo(user, like),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@ -284,6 +301,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec react_with_emoji(String.t(), User.t(), String.t()) ::
{:ok, Activity.t()} | {:error, any()}
def react_with_emoji(id, user, emoji) do
with %Activity{} = activity <- Activity.get_by_id(id),
object <- Object.normalize(activity, fetch: false),
@ -296,8 +315,11 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec unreact_with_emoji(String.t(), User.t(), String.t()) ::
{:ok, Activity.t()} | {:error, any()}
def unreact_with_emoji(id, user, emoji) do
with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji),
{_, {:ok, _}} <- {:cancel_jobs, maybe_cancel_jobs(reaction_activity)},
{:ok, undo, _} <- Builder.undo(user, reaction_activity),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
@ -307,7 +329,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def vote(user, %{data: %{"type" => "Question"}} = object, choices) do
@spec vote(Object.t(), User.t(), list()) :: {:ok, list(), Object.t()} | {:error, any()}
def vote(%Object{data: %{"type" => "Question"}} = object, %User{} = user, choices) do
with :ok <- validate_not_author(object, user),
:ok <- validate_existing_votes(user, object),
{:ok, options, choices} <- normalize_and_validate_choices(choices, object) do
@ -368,14 +391,16 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def public_announce?(_, %{visibility: visibility})
when visibility in ~w{public unlisted private direct},
do: visibility in ~w(public unlisted)
defp public_announce?(_, %{visibility: visibility})
when visibility in ~w{public unlisted private direct},
do: visibility in ~w(public unlisted)
def public_announce?(object, _) do
defp public_announce?(object, _) do
Visibility.public?(object)
end
@spec get_visibility(map(), map() | nil, Participation.t() | nil) ::
{String.t() | nil, String.t() | nil}
def get_visibility(_, _, %Participation{}), do: {"direct", "direct"}
def get_visibility(%{visibility: visibility}, in_reply_to, _)
@ -394,6 +419,7 @@ defmodule Pleroma.Web.CommonAPI do
def get_visibility(_, in_reply_to, _), do: {"public", get_replied_to_visibility(in_reply_to)}
@spec get_replied_to_visibility(Activity.t() | nil) :: String.t() | nil
def get_replied_to_visibility(nil), do: nil
def get_replied_to_visibility(activity) do
@ -402,6 +428,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
@spec check_expiry_date({:ok, nil | integer()} | String.t()) ::
{:ok, boolean() | nil} | {:error, String.t()}
def check_expiry_date({:ok, nil} = res), do: res
def check_expiry_date({:ok, in_seconds}) do
@ -419,19 +447,22 @@ defmodule Pleroma.Web.CommonAPI do
|> check_expiry_date()
end
@spec listen(User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def listen(user, data) do
with {:ok, draft} <- ActivityDraft.listen(user, data) do
ActivityPub.listen(draft.changes)
end
end
@spec post(User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def post(user, %{status: _} = data) do
with {:ok, draft} <- ActivityDraft.create(user, data) do
ActivityPub.create(draft.changes, draft.preview?)
end
end
def update(user, orig_activity, changes) do
@spec update(Activity.t(), User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def update(orig_activity, %User{} = user, changes) do
with orig_object <- Object.normalize(orig_activity),
{:ok, new_object} <- make_update_data(user, orig_object, changes),
{:ok, update_data, _} <- Builder.update(user, new_object),
@ -521,7 +552,8 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def add_mute(user, activity, params \\ %{}) do
@spec add_mute(Activity.t(), User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def add_mute(activity, user, params \\ %{}) do
expires_in = Map.get(params, :expires_in, 0)
with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]),
@ -540,15 +572,17 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def remove_mute(%User{} = user, %Activity{} = activity) do
@spec remove_mute(Activity.t(), User.t()) :: {:ok, Activity.t()} | {:error, any()}
def remove_mute(%Activity{} = activity, %User{} = user) do
ThreadMute.remove_mute(user.id, activity.data["context"])
{:ok, activity}
end
def remove_mute(user_id, activity_id) do
@spec remove_mute(String.t(), String.t()) :: {:ok, Activity.t()} | {:error, any()}
def remove_mute(activity_id, user_id) do
with {:user, %User{} = user} <- {:user, User.get_by_id(user_id)},
{:activity, %Activity{} = activity} <- {:activity, Activity.get_by_id(activity_id)} do
remove_mute(user, activity)
remove_mute(activity, user)
else
{what, result} = error ->
Logger.warning(
@ -559,13 +593,15 @@ defmodule Pleroma.Web.CommonAPI do
end
end
def thread_muted?(%User{id: user_id}, %{data: %{"context" => context}})
@spec thread_muted?(Activity.t(), User.t()) :: boolean()
def thread_muted?(%{data: %{"context" => context}}, %User{id: user_id})
when is_binary(context) do
ThreadMute.exists?(user_id, context)
end
def thread_muted?(_, _), do: false
@spec report(User.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
def report(user, data) do
with {:ok, account} <- get_reported_account(data.account_id),
{:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]),
@ -599,6 +635,8 @@ defmodule Pleroma.Web.CommonAPI do
|> Enum.filter(&Rule.exists?/1)
end
@spec update_report_state(String.t() | [String.t()], String.t()) ::
{:ok, any()} | {:error, any()}
def update_report_state(activity_ids, state) when is_list(activity_ids) do
case Utils.update_report_state(activity_ids, state) do
:ok -> {:ok, activity_ids}
@ -611,17 +649,16 @@ defmodule Pleroma.Web.CommonAPI do
Utils.update_report_state(activity, state)
else
nil -> {:error, :not_found}
_ -> {:error, dgettext("errors", "Could not update state")}
end
end
@spec update_activity_scope(String.t(), map()) :: {:ok, any()} | {:error, any()}
def update_activity_scope(activity_id, opts \\ %{}) do
with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id),
{:ok, activity} <- toggle_sensitive(activity, opts) do
set_visibility(activity, opts)
else
nil -> {:error, :not_found}
{:error, reason} -> {:error, reason}
end
end
@ -649,14 +686,17 @@ defmodule Pleroma.Web.CommonAPI do
defp set_visibility(activity, _), do: {:ok, activity}
def hide_reblogs(%User{} = user, %User{} = target) do
@spec hide_reblogs(User.t(), User.t()) :: {:ok, any()} | {:error, any()}
def hide_reblogs(%User{} = target, %User{} = user) do
UserRelationship.create_reblog_mute(user, target)
end
def show_reblogs(%User{} = user, %User{} = target) do
@spec show_reblogs(User.t(), User.t()) :: {:ok, any()} | {:error, any()}
def show_reblogs(%User{} = target, %User{} = user) do
UserRelationship.delete_reblog_mute(user, target)
end
@spec get_user(String.t(), boolean()) :: User.t() | nil
def get_user(ap_id, fake_record_fallback \\ true) do
cond do
user = User.get_cached_by_ap_id(ap_id) ->
@ -673,4 +713,14 @@ defmodule Pleroma.Web.CommonAPI do
nil
end
end
defp maybe_cancel_jobs(%Activity{id: activity_id}) do
Oban.Job
|> where([j], j.worker == "Pleroma.Workers.PublisherWorker")
|> where([j], j.args["op"] == "publish_one")
|> where([j], j.args["params"]["activity_id"] == ^activity_id)
|> Oban.cancel_all_jobs()
end
defp maybe_cancel_jobs(_), do: {:ok, 0}
end

View file

@ -129,8 +129,22 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
defp in_reply_to(%{params: %{in_reply_to_status_id: id}} = draft) when is_binary(id) do
%__MODULE__{draft | in_reply_to: Activity.get_by_id(id)}
defp in_reply_to(%{params: %{in_reply_to_status_id: :deleted}} = draft) do
add_error(draft, dgettext("errors", "Cannot reply to a deleted status"))
end
defp in_reply_to(%{params: %{in_reply_to_status_id: id} = params} = draft) when is_binary(id) do
activity = Activity.get_by_id(id)
params =
if is_nil(activity) do
# Deleted activities are returned as nil
Map.put(params, :in_reply_to_status_id, :deleted)
else
Map.put(params, :in_reply_to_status_id, activity)
end
in_reply_to(%{draft | params: params})
end
defp in_reply_to(%{params: %{in_reply_to_status_id: %Activity{} = in_reply_to}} = draft) do

View file

@ -38,6 +38,8 @@ defmodule Pleroma.Web.Endpoint do
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(Pleroma.Web.Plugs.LoggerMetadataPath)
plug(Pleroma.Web.Plugs.SetLocalePlug)
plug(CORSPlug)
plug(Pleroma.Web.Plugs.HTTPSecurityPlug)

View file

@ -35,16 +35,18 @@ defmodule Pleroma.Web.Federator do
end
# Client API
def incoming_ap_doc(%{params: params, req_headers: req_headers}) do
def incoming_ap_doc(%{params: _params, req_headers: _req_headers} = args) do
job_args = Enum.into(args, %{}, fn {k, v} -> {Atom.to_string(k), v} end)
ReceiverWorker.enqueue(
"incoming_ap_doc",
%{"req_headers" => req_headers, "params" => params, "timeout" => :timer.seconds(20)},
Map.put(job_args, "timeout", :timer.seconds(20)),
priority: 2
)
end
def incoming_ap_doc(%{"type" => "Delete"} = params) do
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3)
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params}, priority: 3, queue: :slow)
end
def incoming_ap_doc(params) do

View file

@ -460,7 +460,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
end
def unfollow(%{assigns: %{user: follower, account: followed}} = conn, _params) do
with {:ok, follower} <- CommonAPI.unfollow(follower, followed) do
with {:ok, follower} <- CommonAPI.unfollow(followed, follower) do
render(conn, "relationship.json", user: follower, target: followed)
end
end
@ -495,7 +495,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
@doc "POST /api/v1/accounts/:id/block"
def block(%{assigns: %{user: blocker, account: blocked}} = conn, _params) do
with {:ok, _activity} <- CommonAPI.block(blocker, blocked) do
with {:ok, _activity} <- CommonAPI.block(blocked, blocker) do
render(conn, "relationship.json", user: blocker, target: blocked)
else
{:error, message} -> json_response(conn, :forbidden, %{error: message})
@ -504,7 +504,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
@doc "POST /api/v1/accounts/:id/unblock"
def unblock(%{assigns: %{user: blocker, account: blocked}} = conn, _params) do
with {:ok, _activity} <- CommonAPI.unblock(blocker, blocked) do
with {:ok, _activity} <- CommonAPI.unblock(blocked, blocker) do
render(conn, "relationship.json", user: blocker, target: blocked)
else
{:error, message} -> json_response(conn, :forbidden, %{error: message})

View file

@ -51,7 +51,7 @@ defmodule Pleroma.Web.MastodonAPI.PollController do
with %Object{data: %{"type" => "Question"}} = object <- Object.get_by_id(id),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user),
{:ok, _activities, object} <- get_cached_vote_or_vote(user, object, choices) do
{:ok, _activities, object} <- get_cached_vote_or_vote(object, user, choices) do
try_render(conn, "show.json", %{object: object, for: user})
else
nil -> render_error(conn, :not_found, "Record not found")
@ -60,11 +60,11 @@ defmodule Pleroma.Web.MastodonAPI.PollController do
end
end
defp get_cached_vote_or_vote(user, object, choices) do
defp get_cached_vote_or_vote(object, user, choices) do
idempotency_key = "polls:#{user.id}:#{object.data["id"]}"
@cachex.fetch!(:idempotency_cache, idempotency_key, fn _ ->
case CommonAPI.vote(user, object, choices) do
case CommonAPI.vote(object, user, choices) do
{:error, _message} = res -> {:ignore, res}
res -> {:commit, res}
end

View file

@ -111,10 +111,11 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
`ids` query param is required
"""
def index(
%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{ids: ids} = params}}} =
%{assigns: %{user: user}, private: %{open_api_spex: %{params: params}}} =
conn,
_
) do
ids = Map.get(params, :id, Map.get(params, :ids))
limit = 100
activities =
@ -276,7 +277,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
actor <- Activity.user_actor(activity),
{_, true} <- {:own_status, actor.id == user.id},
changes <- body_params |> put_application(conn),
{_, {:ok, _update_activity}} <- {:pipeline, CommonAPI.update(user, activity, changes)},
{_, {:ok, _update_activity}} <- {:pipeline, CommonAPI.update(activity, user, changes)},
{_, %Activity{}} = {_, activity} <- {:refetched, Activity.get_by_id_with_object(id)} do
try_render(conn, "show.json",
activity: activity,
@ -357,7 +358,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
conn,
_
) do
with {:ok, _fav} <- CommonAPI.favorite(user, activity_id),
with {:ok, _fav} <- CommonAPI.favorite(activity_id, user),
%Activity{} = activity <- Activity.get_by_id(activity_id) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
@ -453,7 +454,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
_
) do
with %Activity{} = activity <- Activity.get_by_id(id),
{:ok, activity} <- CommonAPI.add_mute(user, activity, params) do
{:ok, activity} <- CommonAPI.add_mute(activity, user, params) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
end
@ -467,7 +468,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
_
) do
with %Activity{} = activity <- Activity.get_by_id(id),
{:ok, activity} <- CommonAPI.remove_mute(user, activity) do
{:ok, activity} <- CommonAPI.remove_mute(activity, user) do
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
end
end

View file

@ -16,7 +16,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPI do
def follow(follower, followed, params \\ %{}) do
result =
if not User.following?(follower, followed) do
CommonAPI.follow(follower, followed)
CommonAPI.follow(followed, follower)
else
{:ok, follower, followed, nil}
end
@ -30,11 +30,11 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPI do
end
defp set_reblogs_visibility(false, {:ok, follower, followed, _}) do
CommonAPI.hide_reblogs(follower, followed)
CommonAPI.hide_reblogs(followed, follower)
end
defp set_reblogs_visibility(_, {:ok, follower, followed, _}) do
CommonAPI.show_reblogs(follower, followed)
CommonAPI.show_reblogs(followed, follower)
end
defp set_subscription(true, {:ok, follower, followed, _}) do

View file

@ -152,6 +152,7 @@ defmodule Pleroma.Web.MastodonAPI.InstanceView do
def federation do
quarantined = Config.get([:instance, :quarantined_instances], [])
rejected = Config.get([:instance, :rejected_instances], [])
if Config.get([:mrf, :transparency]) do
{:ok, data} = MRF.describe()
@ -171,6 +172,12 @@ defmodule Pleroma.Web.MastodonAPI.InstanceView do
|> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
|> Map.new()
})
|> Map.put(
:rejected_instances,
rejected
|> Enum.map(fn {instance, reason} -> {instance, %{"reason" => reason}} end)
|> Map.new()
)
else
%{}
end

View file

@ -30,7 +30,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
# pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity ->
spawn(fn -> Card.get_by_activity(activity) end)
Card.get_by_activity(activity)
end)
end
@ -293,7 +293,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
true -> CommonAPI.thread_muted?(activity, opts[:for])
end
attachment_data = object.data["attachment"] || []
@ -624,6 +624,19 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
to_string(attachment["id"] || hash_id)
end
description =
if attachment["summary"] do
HTML.strip_tags(attachment["summary"])
else
attachment["name"]
end
name = if attachment["summary"], do: attachment["name"]
pleroma =
%{mime_type: media_type}
|> Maps.put_if_present(:name, name)
%{
id: attachment_id,
url: href,
@ -631,8 +644,8 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
preview_url: href_preview,
text_url: href,
type: type,
description: attachment["name"],
pleroma: %{mime_type: media_type},
description: description,
pleroma: pleroma,
blurhash: attachment["blurhash"]
}
|> Maps.put_if_present(:meta, meta)

View file

@ -104,7 +104,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
end
def handle_info(:close, state) do
{:stop, {:closed, 'connection closed by server'}, state}
{:stop, {:closed, ~c"connection closed by server"}, state}
end
def handle_info(msg, state) do

View file

@ -75,8 +75,7 @@ defmodule Pleroma.Web.MediaProxy do
%{host: domain} = URI.parse(url)
mediaproxy_whitelist_domains =
[:media_proxy, :whitelist]
|> Config.get()
Config.get([:media_proxy, :whitelist], [])
|> Kernel.++(["#{Upload.base_url()}"])
|> Enum.map(&maybe_get_domain_from_url/1)

View file

@ -54,9 +54,10 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do
defp handle_preview(conn, url) do
media_proxy_url = MediaProxy.url(url)
http_client_opts = Pleroma.Config.get([:media_proxy, :proxy_opts, :http], pool: :media)
with {:ok, %{status: status} = head_response} when status in 200..299 <-
Pleroma.HTTP.request(:head, media_proxy_url, "", [], pool: :media) do
Pleroma.HTTP.request(:head, media_proxy_url, "", [], http_client_opts) do
content_type = Tesla.get_header(head_response, "content-type")
content_length = Tesla.get_header(head_response, "content-length")
content_length = content_length && String.to_integer(content_length)

View file

@ -25,11 +25,14 @@ defmodule Pleroma.Web.Metadata.Utils do
|> scrub_html_and_truncate_object_field(object)
end
def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
def scrub_html_and_truncate(%{data: %{"content" => content}} = object)
when is_binary(content) and content != "" do
content
|> scrub_html_and_truncate_object_field(object)
end
def scrub_html_and_truncate(%{}), do: ""
def scrub_html_and_truncate(content, max_length \\ 200, omission \\ "...")
when is_binary(content) do
content

View file

@ -62,7 +62,7 @@ defmodule Pleroma.Web.OAuth.App do
|> Repo.insert()
end
@spec update(pos_integer(), map()) :: {:ok, t()} | {:error, Ecto.Changeset.t()}
@spec update(pos_integer(), map()) :: {:ok, t()} | {:error, Ecto.Changeset.t()} | nil
def update(id, params) do
with %__MODULE__{} = app <- Repo.get(__MODULE__, id) do
app

View file

@ -96,7 +96,7 @@ defmodule Pleroma.Web.OAuth.Token do
|> validate_required([:valid_until])
end
@spec create(App.t(), User.t(), map()) :: {:ok, Token} | {:error, Ecto.Changeset.t()}
@spec create(App.t(), User.t(), map()) :: {:ok, Token.t()} | {:error, Ecto.Changeset.t()}
def create(%App{} = app, %User{} = user, attrs \\ %{}) do
with {:ok, token} <- do_create(app, user, attrs) do
if Pleroma.Config.get([:oauth2, :clean_expired_tokens]) do

View file

@ -20,7 +20,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do
end
def create(%{assigns: %{user: user}} = conn, _params) do
with {:ok, _} <- Backup.create(user) do
with {:ok, _} <- Backup.user(user) do
backups = Backup.list(user)
render(conn, "index.json", backups: backups)
end

View file

@ -9,22 +9,12 @@ defmodule Pleroma.Web.PleromaAPI.BackupView do
alias Pleroma.Web.CommonAPI.Utils
def render("show.json", %{backup: %Backup{} = backup}) do
# To deal with records before the migration
state =
if backup.state == :invalid do
if backup.processed, do: :complete, else: :failed
else
backup.state
end
%{
id: backup.id,
content_type: backup.content_type,
url: download_url(backup),
file_size: backup.file_size,
processed: backup.processed,
state: to_string(state),
processed_number: backup.processed_number,
inserted_at: Utils.to_masto_date(backup.inserted_at)
}
end

View file

@ -3,26 +3,27 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
alias Pleroma.Config
import Plug.Conn
require Logger
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
def init(opts), do: opts
def call(conn, _options) do
if Config.get([:http_security, :enabled]) do
if @config_impl.get([:http_security, :enabled]) do
conn
|> merge_resp_headers(headers())
|> maybe_send_sts_header(Config.get([:http_security, :sts]))
|> maybe_send_sts_header(@config_impl.get([:http_security, :sts]))
else
conn
end
end
def primary_frontend do
with %{"name" => frontend} <- Config.get([:frontends, :primary]),
available <- Config.get([:frontends, :available]),
with %{"name" => frontend} <- @config_impl.get([:frontends, :primary]),
available <- @config_impl.get([:frontends, :available]),
%{} = primary_frontend <- Map.get(available, frontend) do
{:ok, primary_frontend}
end
@ -37,8 +38,8 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
end
def headers do
referrer_policy = Config.get([:http_security, :referrer_policy])
report_uri = Config.get([:http_security, :report_uri])
referrer_policy = @config_impl.get([:http_security, :referrer_policy])
report_uri = @config_impl.get([:http_security, :report_uri])
custom_http_frontend_headers = custom_http_frontend_headers()
headers = [
@ -86,10 +87,10 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
@csp_start [Enum.join(static_csp_rules, ";") <> ";"]
defp csp_string do
scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme]
scheme = @config_impl.get([Pleroma.Web.Endpoint, :url])[:scheme]
static_url = Pleroma.Web.Endpoint.static_url()
websocket_url = Pleroma.Web.Endpoint.websocket_url()
report_uri = Config.get([:http_security, :report_uri])
report_uri = @config_impl.get([:http_security, :report_uri])
img_src = "img-src 'self' data: blob:"
media_src = "media-src 'self'"
@ -97,8 +98,8 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
# Strict multimedia CSP enforcement only when MediaProxy is enabled
{img_src, media_src, connect_src} =
if Config.get([:media_proxy, :enabled]) &&
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
if @config_impl.get([:media_proxy, :enabled]) &&
!@config_impl.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
sources = build_csp_multimedia_source_list()
{
@ -115,17 +116,21 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
end
connect_src =
if Config.get(:env) == :dev do
if @config_impl.get([:env]) == :dev do
[connect_src, " http://localhost:3035/"]
else
connect_src
end
script_src =
if Config.get(:env) == :dev do
"script-src 'self' 'unsafe-eval'"
if @config_impl.get([:http_security, :allow_unsafe_eval]) do
if @config_impl.get([:env]) == :dev do
"script-src 'self' 'unsafe-eval'"
else
"script-src 'self' 'wasm-unsafe-eval'"
end
else
"script-src 'self' 'wasm-unsafe-eval'"
"script-src 'self'"
end
report = if report_uri, do: ["report-uri ", report_uri, ";report-to csp-endpoint"]
@ -161,11 +166,11 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
defp build_csp_multimedia_source_list do
media_proxy_whitelist =
[:media_proxy, :whitelist]
|> Config.get()
|> @config_impl.get()
|> build_csp_from_whitelist([])
captcha_method = Config.get([Pleroma.Captcha, :method])
captcha_endpoint = Config.get([captcha_method, :endpoint])
captcha_method = @config_impl.get([Pleroma.Captcha, :method])
captcha_endpoint = @config_impl.get([captcha_method, :endpoint])
base_endpoints =
[
@ -173,7 +178,7 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
[Pleroma.Upload, :base_url],
[Pleroma.Uploaders.S3, :public_endpoint]
]
|> Enum.map(&Config.get/1)
|> Enum.map(&@config_impl.get/1)
[captcha_endpoint | base_endpoints]
|> Enum.map(&build_csp_param/1)
@ -200,7 +205,7 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
end
def warn_if_disabled do
unless Config.get([:http_security, :enabled]) do
unless Pleroma.Config.get([:http_security, :enabled]) do
Logger.warning("
.i;;;;i.
iYcviii;vXY:
@ -245,8 +250,8 @@ your instance and your users via malicious posts:
end
defp maybe_send_sts_header(conn, true) do
max_age_sts = Config.get([:http_security, :sts_max_age])
max_age_ct = Config.get([:http_security, :ct_max_age])
max_age_sts = @config_impl.get([:http_security, :sts_max_age])
max_age_ct = @config_impl.get([:http_security, :ct_max_age])
merge_resp_headers(conn, [
{"strict-transport-security", "max-age=#{max_age_sts}; includeSubDomains"},

View file

@ -3,10 +3,18 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
alias Pleroma.Helpers.InetHelper
import Plug.Conn
import Phoenix.Controller, only: [get_format: 1, text: 2]
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.MRF
require Logger
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
def init(options) do
options
end
@ -19,54 +27,14 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
if get_format(conn) in ["json", "activity+json"] do
conn
|> maybe_assign_valid_signature()
|> maybe_assign_actor_id()
|> maybe_require_signature()
|> maybe_filter_requests()
else
conn
end
end
defp validate_signature(conn, request_target) do
# Newer drafts for HTTP signatures now use @request-target instead of the
# old (request-target). We'll now support both for incoming signatures.
conn =
conn
|> put_req_header("(request-target)", request_target)
|> put_req_header("@request-target", request_target)
HTTPSignatures.validate_conn(conn)
end
defp validate_signature(conn) do
# This (request-target) is non-standard, but many implementations do it
# this way due to a misinterpretation of
# https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures-06
# "path" was interpreted as not having the query, though later examples
# show that it must be the absolute path + query. This behavior is kept to
# make sure most software (Pleroma itself, Mastodon, and probably others)
# do not break.
request_target = String.downcase("#{conn.method}") <> " #{conn.request_path}"
# This is the proper way to build the @request-target, as expected by
# many HTTP signature libraries, clarified in the following draft:
# https://www.ietf.org/archive/id/draft-ietf-httpbis-message-signatures-11.html#section-2.2.6
# It is the same as before, but containing the query part as well.
proper_target = request_target <> "?#{conn.query_string}"
cond do
# Normal, non-standard behavior but expected by Pleroma and more.
validate_signature(conn, request_target) ->
true
# Has query string and the previous one failed: let's try the standard.
conn.query_string != "" ->
validate_signature(conn, proper_target)
# If there's no query string and signature fails, it's rotten.
true ->
false
end
end
defp maybe_assign_valid_signature(conn) do
if has_signature_header?(conn) do
# we replace the digest header with the one we computed in DigestPlug
@ -76,27 +44,70 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
conn -> conn
end
assign(conn, :valid_signature, validate_signature(conn))
assign(conn, :valid_signature, Signature.validate_signature(conn))
else
Logger.debug("No signature header!")
conn
end
end
defp maybe_assign_actor_id(%{assigns: %{valid_signature: true}} = conn) do
adapter = Application.get_env(:http_signatures, :adapter)
{:ok, actor_id} = adapter.get_actor_id(conn)
assign(conn, :actor_id, actor_id)
end
defp maybe_assign_actor_id(conn), do: conn
defp has_signature_header?(conn) do
conn |> get_req_header("signature") |> Enum.at(0, false)
end
defp maybe_require_signature(%{assigns: %{valid_signature: true}} = conn), do: conn
defp maybe_require_signature(conn) do
if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do
conn
|> put_status(:unauthorized)
|> text("Request not signed")
|> halt()
defp maybe_require_signature(%{remote_ip: remote_ip} = conn) do
if @config_impl.get([:activitypub, :authorized_fetch_mode], false) do
exceptions =
@config_impl.get([:activitypub, :authorized_fetch_mode_exceptions], [])
|> Enum.map(&InetHelper.parse_cidr/1)
if Enum.any?(exceptions, fn x -> InetCidr.contains?(x, remote_ip) end) do
conn
else
conn
|> put_status(:unauthorized)
|> text("Request not signed")
|> halt()
end
else
conn
end
end
defp maybe_filter_requests(%{halted: true} = conn), do: conn
defp maybe_filter_requests(conn) do
if @config_impl.get([:activitypub, :authorized_fetch_mode], false) and
conn.assigns[:actor_id] do
%{host: host} = URI.parse(conn.assigns.actor_id)
if MRF.subdomain_match?(rejected_domains(), host) do
conn
|> put_status(:unauthorized)
|> halt()
else
conn
end
else
conn
end
end
defp rejected_domains do
@config_impl.get([:instance, :rejected_instances])
|> Pleroma.Web.ActivityPub.MRF.instance_list_from_tuples()
|> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
end
end

View file

@ -0,0 +1,12 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.LoggerMetadataPath do
def init(opts), do: opts
def call(conn, _) do
Logger.metadata(path: conn.request_path)
conn
end
end

View file

@ -0,0 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Plugs.LoggerMetadataUser do
alias Pleroma.User
def init(opts), do: opts
def call(%{assigns: %{user: user = %User{}}} = conn, _) do
Logger.metadata(user: user.nickname)
conn
end
def call(conn, _) do
conn
end
end

View file

@ -52,7 +52,7 @@ defmodule Pleroma.Web.Plugs.OAuthPlug do
where: t.token == ^token
)
with %Token{user_id: user_id} = token_record <- Repo.one(token_query),
with %Token{user_id: user_id} = token_record <- Repo.one(token_query) |> Repo.preload(:user),
false <- is_nil(user_id),
%User{} = user <- User.get_cached_by_id(user_id) do
{:ok, user, token_record}

View file

@ -34,7 +34,9 @@ defmodule Pleroma.Web.Plugs.OAuthScopesPlug do
permissions = Enum.join(missing_scopes, " #{op} ")
error_message =
dgettext("errors", "Insufficient permissions: %{permissions}.", permissions: permissions)
dgettext("errors", "Insufficient permissions: %{permissions}.",
permissions: permissions
)
conn
|> put_resp_content_type("application/json")

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Web.Plugs.RemoteIp do
"""
alias Pleroma.Config
alias Pleroma.Helpers.InetHelper
import Plug.Conn
@behaviour Plug
@ -30,19 +31,8 @@ defmodule Pleroma.Web.Plugs.RemoteIp do
proxies =
Config.get([__MODULE__, :proxies], [])
|> Enum.concat(reserved)
|> Enum.map(&maybe_add_cidr/1)
|> Enum.map(&InetHelper.parse_cidr/1)
{headers, proxies}
end
defp maybe_add_cidr(proxy) when is_binary(proxy) do
proxy =
cond do
"/" in String.codepoints(proxy) -> proxy
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
end
InetCidr.parse_cidr!(proxy, true)
end
end

Some files were not shown because too many files have changed in this diff Show more