Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into image-description-summary
This commit is contained in:
commit
f4c0a01f09
791 changed files with 19327 additions and 5997 deletions
|
|
@ -1,113 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Mix.Tasks.Pleroma.Benchmark do
|
||||
import Mix.Pleroma
|
||||
use Mix.Task
|
||||
|
||||
def run(["search"]) do
|
||||
start_pleroma()
|
||||
|
||||
Benchee.run(%{
|
||||
"search" => fn ->
|
||||
Pleroma.Activity.search(nil, "cofe")
|
||||
end
|
||||
})
|
||||
end
|
||||
|
||||
def run(["tag"]) do
|
||||
start_pleroma()
|
||||
|
||||
Benchee.run(%{
|
||||
"tag" => fn ->
|
||||
%{"type" => "Create", "tag" => "cofe"}
|
||||
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|
||||
end
|
||||
})
|
||||
end
|
||||
|
||||
def run(["render_timeline", nickname | _] = args) do
|
||||
start_pleroma()
|
||||
user = Pleroma.User.get_by_nickname(nickname)
|
||||
|
||||
activities =
|
||||
%{}
|
||||
|> Map.put("type", ["Create", "Announce"])
|
||||
|> Map.put("blocking_user", user)
|
||||
|> Map.put("muting_user", user)
|
||||
|> Map.put("user", user)
|
||||
|> Map.put("limit", 4096)
|
||||
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|
||||
|> Enum.reverse()
|
||||
|
||||
inputs = %{
|
||||
"1 activity" => Enum.take_random(activities, 1),
|
||||
"10 activities" => Enum.take_random(activities, 10),
|
||||
"20 activities" => Enum.take_random(activities, 20),
|
||||
"40 activities" => Enum.take_random(activities, 40),
|
||||
"80 activities" => Enum.take_random(activities, 80)
|
||||
}
|
||||
|
||||
inputs =
|
||||
if Enum.at(args, 2) == "extended" do
|
||||
Map.merge(inputs, %{
|
||||
"200 activities" => Enum.take_random(activities, 200),
|
||||
"500 activities" => Enum.take_random(activities, 500),
|
||||
"2000 activities" => Enum.take_random(activities, 2000),
|
||||
"4096 activities" => Enum.take_random(activities, 4096)
|
||||
})
|
||||
else
|
||||
inputs
|
||||
end
|
||||
|
||||
Benchee.run(
|
||||
%{
|
||||
"Standart rendering" => fn activities ->
|
||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
||||
activities: activities,
|
||||
for: user,
|
||||
as: :activity
|
||||
})
|
||||
end
|
||||
},
|
||||
inputs: inputs
|
||||
)
|
||||
end
|
||||
|
||||
def run(["adapters"]) do
|
||||
start_pleroma()
|
||||
|
||||
:ok =
|
||||
Pleroma.Gun.Conn.open(
|
||||
"https://httpbin.org/stream-bytes/1500",
|
||||
:gun_connections
|
||||
)
|
||||
|
||||
Process.sleep(1_500)
|
||||
|
||||
Benchee.run(
|
||||
%{
|
||||
"Without conn and without pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||
pool: :no_pool,
|
||||
receive_conn: false
|
||||
)
|
||||
end,
|
||||
"Without conn and with pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
|
||||
end,
|
||||
"With reused conn and without pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
|
||||
end,
|
||||
"With reused conn and with pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
||||
end
|
||||
},
|
||||
parallel: 10
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -193,7 +193,7 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
"ALTER DATABASE #{db} SET default_text_search_config = '#{tsconfig}';"
|
||||
)
|
||||
|
||||
# non-exist config will not raise excpetion but only give >0 messages
|
||||
# non-exist config will not raise exception but only give >0 messages
|
||||
if length(msg) > 0 do
|
||||
shell_info("Error: #{inspect(msg, pretty: true)}")
|
||||
else
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ defmodule Mix.Tasks.Pleroma.Digest do
|
|||
shell_info("Digest email have been sent to #{nickname} (#{user.email})")
|
||||
else
|
||||
_ ->
|
||||
shell_info("Cound't find any mentions for #{nickname} since #{last_digest_emailed_at}")
|
||||
shell_info("Couldn't find any mentions for #{nickname} since #{last_digest_emailed_at}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
|
|||
Logger.configure(level: :info)
|
||||
|
||||
if opts[:env] == "test" do
|
||||
Logger.info("Rollback succesfully")
|
||||
Logger.info("Rollback successfully")
|
||||
else
|
||||
{:ok, _, _} =
|
||||
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|
||||
{:ok, _} =
|
||||
:zip.unzip(binary_archive,
|
||||
cwd: pack_path,
|
||||
cwd: String.to_charlist(pack_path),
|
||||
file_list: files_to_unzip
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -266,12 +266,20 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
config_dir = Path.dirname(config_path)
|
||||
psql_dir = Path.dirname(psql_path)
|
||||
|
||||
# Note: Distros requiring group read (0o750) on those directories should
|
||||
# pre-create the directories.
|
||||
[config_dir, psql_dir, static_dir, uploads_dir]
|
||||
|> Enum.reject(&File.exists?/1)
|
||||
|> Enum.map(&File.mkdir_p!/1)
|
||||
|> Enum.each(fn dir ->
|
||||
File.mkdir_p!(dir)
|
||||
File.chmod!(dir, 0o700)
|
||||
end)
|
||||
|
||||
shell_info("Writing config to #{config_path}.")
|
||||
|
||||
# Sadly no fchmod(2) equivalent in Elixir…
|
||||
File.touch!(config_path)
|
||||
File.chmod!(config_path, 0o640)
|
||||
File.write(config_path, result_config)
|
||||
shell_info("Writing the postgres script to #{psql_path}.")
|
||||
File.write(psql_path, result_psql)
|
||||
|
|
@ -284,14 +292,13 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
|
||||
if db_configurable? do
|
||||
shell_info(
|
||||
" Please transfer your config to the database after running database migrations. Refer to \"Transfering the config to/from the database\" section of the docs for more information."
|
||||
" Please transfer your config to the database after running database migrations. Refer to \"Transferring the config to/from the database\" section of the docs for more information."
|
||||
)
|
||||
end
|
||||
else
|
||||
shell_error(
|
||||
"The task would have overwritten the following files:\n" <>
|
||||
(Enum.map(will_overwrite, &"- #{&1}\n") |> Enum.join("")) <>
|
||||
"Rerun with `--force` to overwrite them."
|
||||
Enum.map_join(will_overwrite, &"- #{&1}\n") <> "Rerun with `--force` to overwrite them."
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -345,6 +352,4 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
|
||||
enabled_filters
|
||||
end
|
||||
|
||||
defp upload_filters(_), do: []
|
||||
end
|
||||
|
|
|
|||
145
lib/mix/tasks/pleroma/search/meilisearch.ex
Normal file
145
lib/mix/tasks/pleroma/search/meilisearch.ex
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
||||
require Pleroma.Constants
|
||||
|
||||
import Mix.Pleroma
|
||||
import Ecto.Query
|
||||
|
||||
import Pleroma.Search.Meilisearch,
|
||||
only: [meili_post: 2, meili_put: 2, meili_get: 1, meili_delete: 1]
|
||||
|
||||
def run(["index"]) do
|
||||
start_pleroma()
|
||||
Pleroma.HTML.compile_scrubbers()
|
||||
|
||||
meili_version =
|
||||
(
|
||||
{:ok, result} = meili_get("/version")
|
||||
|
||||
result["pkgVersion"]
|
||||
)
|
||||
|
||||
# The ranking rule syntax was changed but nothing about that is mentioned in the changelog
|
||||
if not Version.match?(meili_version, ">= 0.25.0") do
|
||||
raise "Meilisearch <0.24.0 not supported"
|
||||
end
|
||||
|
||||
{:ok, _} =
|
||||
meili_post(
|
||||
"/indexes/objects/settings/ranking-rules",
|
||||
[
|
||||
"published:desc",
|
||||
"words",
|
||||
"exactness",
|
||||
"proximity",
|
||||
"typo",
|
||||
"attribute",
|
||||
"sort"
|
||||
]
|
||||
)
|
||||
|
||||
{:ok, _} =
|
||||
meili_post(
|
||||
"/indexes/objects/settings/searchable-attributes",
|
||||
[
|
||||
"content"
|
||||
]
|
||||
)
|
||||
|
||||
IO.puts("Created indices. Starting to insert posts.")
|
||||
|
||||
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
|
||||
|
||||
Pleroma.Repo.transaction(
|
||||
fn ->
|
||||
query =
|
||||
from(Pleroma.Object,
|
||||
# Only index public and unlisted posts which are notes and have some text
|
||||
where:
|
||||
fragment("data->>'type' = 'Note'") and
|
||||
(fragment("data->'to' \\? ?", ^Pleroma.Constants.as_public()) or
|
||||
fragment("data->'cc' \\? ?", ^Pleroma.Constants.as_public())),
|
||||
order_by: [desc: fragment("data->'published'")]
|
||||
)
|
||||
|
||||
count = query |> Pleroma.Repo.aggregate(:count, :data)
|
||||
IO.puts("Entries to index: #{count}")
|
||||
|
||||
Pleroma.Repo.stream(
|
||||
query,
|
||||
timeout: :infinity
|
||||
)
|
||||
|> Stream.map(&Pleroma.Search.Meilisearch.object_to_search_data/1)
|
||||
|> Stream.filter(fn o -> not is_nil(o) end)
|
||||
|> Stream.chunk_every(chunk_size)
|
||||
|> Stream.transform(0, fn objects, acc ->
|
||||
new_acc = acc + Enum.count(objects)
|
||||
|
||||
# Reset to the beginning of the line and rewrite it
|
||||
IO.write("\r")
|
||||
IO.write("Indexed #{new_acc} entries")
|
||||
|
||||
{[objects], new_acc}
|
||||
end)
|
||||
|> Stream.each(fn objects ->
|
||||
result =
|
||||
meili_put(
|
||||
"/indexes/objects/documents",
|
||||
objects
|
||||
)
|
||||
|
||||
with {:ok, res} <- result do
|
||||
if not Map.has_key?(res, "uid") do
|
||||
IO.puts("\nFailed to index: #{inspect(result)}")
|
||||
end
|
||||
else
|
||||
e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
|
||||
end
|
||||
end)
|
||||
|> Stream.run()
|
||||
end,
|
||||
timeout: :infinity
|
||||
)
|
||||
|
||||
IO.write("\n")
|
||||
end
|
||||
|
||||
def run(["clear"]) do
|
||||
start_pleroma()
|
||||
|
||||
meili_delete("/indexes/objects/documents")
|
||||
end
|
||||
|
||||
def run(["show-keys", master_key]) do
|
||||
start_pleroma()
|
||||
|
||||
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|
||||
{:ok, result} =
|
||||
Pleroma.HTTP.get(
|
||||
Path.join(endpoint, "/keys"),
|
||||
[{"Authorization", "Bearer #{master_key}"}]
|
||||
)
|
||||
|
||||
decoded = Jason.decode!(result.body)
|
||||
|
||||
if decoded["results"] do
|
||||
Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
|
||||
IO.puts("#{desc}: #{key}")
|
||||
end)
|
||||
else
|
||||
IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
|
||||
end
|
||||
end
|
||||
|
||||
def run(["stats"]) do
|
||||
start_pleroma()
|
||||
|
||||
{:ok, result} = meili_get("/indexes/objects/stats")
|
||||
IO.puts("Number of entries: #{result["numberOfDocuments"]}")
|
||||
IO.puts("Indexing? #{result["isIndexing"]}")
|
||||
end
|
||||
end
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Phoenix.Transports.WebSocket.Raw do
|
||||
import Plug.Conn,
|
||||
only: [
|
||||
fetch_query_params: 1,
|
||||
send_resp: 3
|
||||
]
|
||||
|
||||
alias Phoenix.Socket.Transport
|
||||
|
||||
def default_config do
|
||||
[
|
||||
timeout: 60_000,
|
||||
transport_log: false,
|
||||
cowboy: Phoenix.Endpoint.CowboyWebSocket
|
||||
]
|
||||
end
|
||||
|
||||
def init(%Plug.Conn{method: "GET"} = conn, {endpoint, handler, transport}) do
|
||||
{_, opts} = handler.__transport__(transport)
|
||||
|
||||
conn =
|
||||
conn
|
||||
|> fetch_query_params
|
||||
|> Transport.transport_log(opts[:transport_log])
|
||||
|> Transport.force_ssl(handler, endpoint, opts)
|
||||
|> Transport.check_origin(handler, endpoint, opts)
|
||||
|
||||
case conn do
|
||||
%{halted: false} = conn ->
|
||||
case handler.connect(%{
|
||||
endpoint: endpoint,
|
||||
transport: transport,
|
||||
options: [serializer: nil],
|
||||
params: conn.params
|
||||
}) do
|
||||
{:ok, socket} ->
|
||||
{:ok, conn, {__MODULE__, {socket, opts}}}
|
||||
|
||||
:error ->
|
||||
send_resp(conn, :forbidden, "")
|
||||
{:error, conn}
|
||||
end
|
||||
|
||||
_ ->
|
||||
{:error, conn}
|
||||
end
|
||||
end
|
||||
|
||||
def init(conn, _) do
|
||||
send_resp(conn, :bad_request, "")
|
||||
{:error, conn}
|
||||
end
|
||||
|
||||
def ws_init({socket, config}) do
|
||||
Process.flag(:trap_exit, true)
|
||||
{:ok, %{socket: socket}, config[:timeout]}
|
||||
end
|
||||
|
||||
def ws_handle(op, data, state) do
|
||||
state.socket.handler
|
||||
|> apply(:handle, [op, data, state])
|
||||
|> case do
|
||||
{op, data} ->
|
||||
{:reply, {op, data}, state}
|
||||
|
||||
{op, data, state} ->
|
||||
{:reply, {op, data}, state}
|
||||
|
||||
%{} = state ->
|
||||
{:ok, state}
|
||||
|
||||
_ ->
|
||||
{:ok, state}
|
||||
end
|
||||
end
|
||||
|
||||
def ws_info({_, _} = tuple, state) do
|
||||
{:reply, tuple, state}
|
||||
end
|
||||
|
||||
def ws_info(_tuple, state), do: {:ok, state}
|
||||
|
||||
def ws_close(state) do
|
||||
ws_handle(:closed, :normal, state)
|
||||
end
|
||||
|
||||
def ws_terminate(reason, state) do
|
||||
ws_handle(:closed, reason, state)
|
||||
end
|
||||
end
|
||||
|
|
@ -368,7 +368,7 @@ defmodule Pleroma.Activity do
|
|||
)
|
||||
end
|
||||
|
||||
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
|
||||
defdelegate search(user, query, options \\ []), to: Pleroma.Search.DatabaseSearch
|
||||
|
||||
def direct_conversation_id(activity, for_user) do
|
||||
alias Pleroma.Conversation.Participation
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ defmodule Pleroma.Activity.HTML do
|
|||
end
|
||||
end
|
||||
|
||||
defp add_cache_key_for(activity_id, additional_key) do
|
||||
def add_cache_key_for(activity_id, additional_key) do
|
||||
current = get_cache_keys_for(activity_id)
|
||||
|
||||
unless additional_key in current do
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Activity.Queries do
|
|||
|
||||
import Ecto.Query, only: [from: 2, where: 3]
|
||||
|
||||
@type query :: Ecto.Queryable.t() | Activity.t()
|
||||
@type query :: Ecto.Queryable.t() | Pleroma.Activity.t()
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.User
|
||||
|
|
|
|||
|
|
@ -23,19 +23,21 @@ defmodule Pleroma.Announcement do
|
|||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
def change(struct, params \\ %{}) do
|
||||
struct
|
||||
|> cast(validate_params(struct, params), [:data, :starts_at, :ends_at, :rendered])
|
||||
@doc "Generates changeset for %Pleroma.Announcement{}"
|
||||
@spec changeset(%__MODULE__{}, map()) :: %Ecto.Changeset{}
|
||||
def changeset(announcement \\ %__MODULE__{}, params \\ %{data: %{}}) do
|
||||
announcement
|
||||
|> cast(validate_params(announcement, params), [:data, :starts_at, :ends_at, :rendered])
|
||||
|> validate_required([:data])
|
||||
end
|
||||
|
||||
defp validate_params(struct, params) do
|
||||
defp validate_params(announcement, params) do
|
||||
base_data =
|
||||
%{
|
||||
"content" => "",
|
||||
"all_day" => false
|
||||
}
|
||||
|> Map.merge((struct && struct.data) || %{})
|
||||
|> Map.merge((announcement && announcement.data) || %{})
|
||||
|
||||
merged_data =
|
||||
Map.merge(base_data, params.data)
|
||||
|
|
@ -61,13 +63,13 @@ defmodule Pleroma.Announcement do
|
|||
end
|
||||
|
||||
def add(params) do
|
||||
changeset = change(%__MODULE__{}, params)
|
||||
changeset = changeset(%__MODULE__{}, params)
|
||||
|
||||
Repo.insert(changeset)
|
||||
end
|
||||
|
||||
def update(announcement, params) do
|
||||
changeset = change(announcement, params)
|
||||
changeset = changeset(announcement, params)
|
||||
|
||||
Repo.update(changeset)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ defmodule Pleroma.Application do
|
|||
@name Mix.Project.config()[:name]
|
||||
@version Mix.Project.config()[:version]
|
||||
@repository Mix.Project.config()[:source_url]
|
||||
@mix_env Mix.env()
|
||||
|
||||
def name, do: @name
|
||||
def version, do: @version
|
||||
|
|
@ -54,7 +53,6 @@ defmodule Pleroma.Application do
|
|||
Config.DeprecationWarnings.warn()
|
||||
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||
Pleroma.ApplicationRequirements.verify!()
|
||||
setup_instrumenters()
|
||||
load_custom_modules()
|
||||
Pleroma.Docs.JSON.compile()
|
||||
limiters_setup()
|
||||
|
|
@ -91,6 +89,7 @@ defmodule Pleroma.Application do
|
|||
# Define workers and child supervisors to be supervised
|
||||
children =
|
||||
[
|
||||
Pleroma.PromEx,
|
||||
Pleroma.Repo,
|
||||
Config.TransferTask,
|
||||
Pleroma.Emoji,
|
||||
|
|
@ -98,7 +97,7 @@ defmodule Pleroma.Application do
|
|||
{Task.Supervisor, name: Pleroma.TaskSupervisor}
|
||||
] ++
|
||||
cachex_children() ++
|
||||
http_children(adapter, @mix_env) ++
|
||||
http_children(adapter) ++
|
||||
[
|
||||
Pleroma.Stats,
|
||||
Pleroma.JobQueueMonitor,
|
||||
|
|
@ -106,46 +105,22 @@ defmodule Pleroma.Application do
|
|||
{Oban, Config.get(Oban)},
|
||||
Pleroma.Web.Endpoint
|
||||
] ++
|
||||
task_children(@mix_env) ++
|
||||
dont_run_in_test(@mix_env) ++
|
||||
task_children() ++
|
||||
streamer_registry() ++
|
||||
background_migrators() ++
|
||||
shout_child(shout_enabled?()) ++
|
||||
[Pleroma.Gopher.Server]
|
||||
[Pleroma.Gopher.Server] ++
|
||||
[Pleroma.Search.Healthcheck]
|
||||
|
||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||
# for other strategies and supported options
|
||||
# If we have a lot of caches, default max_restarts can cause test
|
||||
# resets to fail.
|
||||
# Go for the default 3 unless we're in test
|
||||
max_restarts =
|
||||
if @mix_env == :test do
|
||||
100
|
||||
else
|
||||
3
|
||||
end
|
||||
max_restarts = Application.get_env(:pleroma, __MODULE__)[:max_restarts]
|
||||
|
||||
opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
|
||||
result = Supervisor.start_link(children, opts)
|
||||
|
||||
set_postgres_server_version()
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
defp set_postgres_server_version do
|
||||
version =
|
||||
with %{rows: [[version]]} <- Ecto.Adapters.SQL.query!(Pleroma.Repo, "show server_version"),
|
||||
{num, _} <- Float.parse(version) do
|
||||
num
|
||||
else
|
||||
e ->
|
||||
Logger.warn(
|
||||
"Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
|
||||
)
|
||||
|
||||
9.6
|
||||
end
|
||||
|
||||
:persistent_term.put({Pleroma.Repo, :postgres_version}, version)
|
||||
Supervisor.start_link(children, opts)
|
||||
end
|
||||
|
||||
def load_custom_modules do
|
||||
|
|
@ -159,7 +134,7 @@ defmodule Pleroma.Application do
|
|||
raise "Invalid custom modules"
|
||||
|
||||
{:ok, modules, _warnings} ->
|
||||
if @mix_env != :test do
|
||||
if Application.get_env(:pleroma, __MODULE__)[:load_custom_modules] do
|
||||
Enum.each(modules, fn mod ->
|
||||
Logger.info("Custom module loaded: #{inspect(mod)}")
|
||||
end)
|
||||
|
|
@ -170,29 +145,6 @@ defmodule Pleroma.Application do
|
|||
end
|
||||
end
|
||||
|
||||
defp setup_instrumenters do
|
||||
require Prometheus.Registry
|
||||
|
||||
if Application.get_env(:prometheus, Pleroma.Repo.Instrumenter) do
|
||||
:ok =
|
||||
:telemetry.attach(
|
||||
"prometheus-ecto",
|
||||
[:pleroma, :repo, :query],
|
||||
&Pleroma.Repo.Instrumenter.handle_event/4,
|
||||
%{}
|
||||
)
|
||||
|
||||
Pleroma.Repo.Instrumenter.setup()
|
||||
end
|
||||
|
||||
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
||||
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
||||
|
||||
# Note: disabled until prometheus-phx is integrated into prometheus-phoenix:
|
||||
# Pleroma.Web.Endpoint.Instrumenter.setup()
|
||||
PrometheusPhx.setup()
|
||||
end
|
||||
|
||||
defp cachex_children do
|
||||
[
|
||||
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
|
||||
|
|
@ -205,12 +157,14 @@ defmodule Pleroma.Application do
|
|||
build_cachex("web_resp", limit: 2500),
|
||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||
build_cachex("failed_proxy_url", limit: 2500),
|
||||
build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
|
||||
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
|
||||
build_cachex("chat_message_id_idempotency_key",
|
||||
expiration: chat_message_id_idempotency_key_expiration(),
|
||||
limit: 500_000
|
||||
),
|
||||
build_cachex("rel_me", limit: 2500)
|
||||
build_cachex("rel_me", limit: 2500),
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
@ -236,24 +190,30 @@ defmodule Pleroma.Application do
|
|||
|
||||
defp shout_enabled?, do: Config.get([:shout, :enabled])
|
||||
|
||||
defp dont_run_in_test(env) when env in [:test, :benchmark], do: []
|
||||
|
||||
defp dont_run_in_test(_) do
|
||||
[
|
||||
{Registry,
|
||||
[
|
||||
name: Pleroma.Web.Streamer.registry(),
|
||||
keys: :duplicate,
|
||||
partitions: System.schedulers_online()
|
||||
]}
|
||||
] ++ background_migrators()
|
||||
defp streamer_registry do
|
||||
if Application.get_env(:pleroma, __MODULE__)[:streamer_registry] do
|
||||
[
|
||||
{Registry,
|
||||
[
|
||||
name: Pleroma.Web.Streamer.registry(),
|
||||
keys: :duplicate,
|
||||
partitions: System.schedulers_online()
|
||||
]}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp background_migrators do
|
||||
[
|
||||
Pleroma.Migrators.HashtagsTableMigrator,
|
||||
Pleroma.Migrators.ContextObjectsDeletionMigrator
|
||||
]
|
||||
if Application.get_env(:pleroma, __MODULE__)[:background_migrators] do
|
||||
[
|
||||
Pleroma.Migrators.HashtagsTableMigrator,
|
||||
Pleroma.Migrators.ContextObjectsDeletionMigrator
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp shout_child(true) do
|
||||
|
|
@ -265,37 +225,43 @@ defmodule Pleroma.Application do
|
|||
|
||||
defp shout_child(_), do: []
|
||||
|
||||
defp task_children(:test) do
|
||||
[
|
||||
defp task_children do
|
||||
children = [
|
||||
%{
|
||||
id: :web_push_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
defp task_children(_) do
|
||||
[
|
||||
%{
|
||||
id: :web_push_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||
restart: :temporary
|
||||
},
|
||||
%{
|
||||
id: :internal_fetch_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
if Application.get_env(:pleroma, __MODULE__)[:internal_fetch] do
|
||||
children ++
|
||||
[
|
||||
%{
|
||||
id: :internal_fetch_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
else
|
||||
children
|
||||
end
|
||||
end
|
||||
|
||||
# start hackney and gun pools in tests
|
||||
defp http_children(_, :test) do
|
||||
http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil)
|
||||
defp http_children(adapter) do
|
||||
if Application.get_env(:pleroma, __MODULE__)[:test_http_pools] do
|
||||
http_children_hackney() ++ http_children_gun()
|
||||
else
|
||||
cond do
|
||||
match?(Tesla.Adapter.Hackney, adapter) -> http_children_hackney()
|
||||
match?(Tesla.Adapter.Gun, adapter) -> http_children_gun()
|
||||
true -> []
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp http_children(Tesla.Adapter.Hackney, _) do
|
||||
defp http_children_hackney do
|
||||
pools = [:federation, :media]
|
||||
|
||||
pools =
|
||||
|
|
@ -311,18 +277,20 @@ defmodule Pleroma.Application do
|
|||
end
|
||||
end
|
||||
|
||||
defp http_children(Tesla.Adapter.Gun, _) do
|
||||
defp http_children_gun do
|
||||
Pleroma.Gun.ConnectionPool.children() ++
|
||||
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
|
||||
end
|
||||
|
||||
defp http_children(_, _), do: []
|
||||
|
||||
@spec limiters_setup() :: :ok
|
||||
def limiters_setup do
|
||||
config = Config.get(ConcurrentLimiter, [])
|
||||
|
||||
[Pleroma.Web.RichMedia.Helpers, Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy]
|
||||
[
|
||||
Pleroma.Web.RichMedia.Helpers,
|
||||
Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy,
|
||||
Pleroma.Search
|
||||
]
|
||||
|> Enum.each(fn module ->
|
||||
mod_config = Keyword.get(config, module, [])
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,10 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
The module represents the collection of validations to runs before start server.
|
||||
"""
|
||||
|
||||
defmodule VerifyError, do: defexception([:message])
|
||||
defmodule VerifyError do
|
||||
defexception([:message])
|
||||
@type t :: %__MODULE__{}
|
||||
end
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Helpers.MediaHelper
|
||||
|
|
@ -25,6 +28,7 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
|> check_welcome_message_config!()
|
||||
|> check_rum!()
|
||||
|> check_repo_pool_size!()
|
||||
|> check_mrfs()
|
||||
|> handle_result()
|
||||
end
|
||||
|
||||
|
|
@ -34,7 +38,7 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
defp check_welcome_message_config!(:ok) do
|
||||
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
|
||||
not Pleroma.Emails.Mailer.enabled?() do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
To send welcome emails, you need to enable the mailer.
|
||||
Welcome emails will NOT be sent with the current config.
|
||||
|
||||
|
|
@ -53,7 +57,7 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
def check_confirmation_accounts!(:ok) do
|
||||
if Pleroma.Config.get([:instance, :account_activation_required]) &&
|
||||
not Pleroma.Emails.Mailer.enabled?() do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
Account activation is required, but the mailer is disabled.
|
||||
Users will NOT be able to confirm their accounts with this config.
|
||||
Either disable account activation or enable the mailer.
|
||||
|
|
@ -168,8 +172,6 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
check_filter(Pleroma.Upload.Filter.Exiftool.ReadDescription, "exiftool"),
|
||||
check_filter(Pleroma.Upload.Filter.Mogrify, "mogrify"),
|
||||
check_filter(Pleroma.Upload.Filter.Mogrifun, "mogrify"),
|
||||
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "mogrify"),
|
||||
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "convert"),
|
||||
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "ffprobe")
|
||||
]
|
||||
|
||||
|
|
@ -195,8 +197,6 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
end
|
||||
end
|
||||
|
||||
defp check_system_commands!(result), do: result
|
||||
|
||||
defp check_repo_pool_size!(:ok) do
|
||||
if Pleroma.Config.get([Pleroma.Repo, :pool_size], 10) != 10 and
|
||||
not Pleroma.Config.get([:dangerzone, :override_repo_pool_size], false) do
|
||||
|
|
@ -235,4 +235,25 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
true
|
||||
end
|
||||
end
|
||||
|
||||
defp check_mrfs(:ok) do
|
||||
mrfs = Config.get!([:mrf, :policies])
|
||||
|
||||
missing_mrfs =
|
||||
Enum.reduce(mrfs, [], fn x, acc ->
|
||||
if Code.ensure_compiled(x) do
|
||||
acc
|
||||
else
|
||||
acc ++ [x]
|
||||
end
|
||||
end)
|
||||
|
||||
if Enum.empty?(missing_mrfs) do
|
||||
:ok
|
||||
else
|
||||
{:error, "The following MRF modules are configured but missing: #{inspect(missing_mrfs)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_mrfs(result), do: result
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.BBS.Authenticator do
|
||||
use Sshd.PasswordAuthenticator
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.Plugs.AuthenticationPlug
|
||||
|
||||
def authenticate(username, password) do
|
||||
username = to_string(username)
|
||||
password = to_string(password)
|
||||
|
||||
with %User{} = user <- User.get_by_nickname(username) do
|
||||
AuthenticationPlug.checkpw(password, user.password_hash)
|
||||
else
|
||||
_e -> false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -1,246 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.BBS.Handler do
|
||||
use Sshd.ShellHandler
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
def on_shell(username, _pubkey, _ip, _port) do
|
||||
:ok = IO.puts("Welcome to #{Pleroma.Config.get([:instance, :name])}!")
|
||||
user = Pleroma.User.get_cached_by_nickname(to_string(username))
|
||||
Logger.debug("#{inspect(user)}")
|
||||
loop(run_state(user: user))
|
||||
end
|
||||
|
||||
def on_connect(username, ip, port, method) do
|
||||
Logger.debug(fn ->
|
||||
"""
|
||||
Incoming SSH shell #{inspect(self())} requested for #{username} from #{inspect(ip)}:#{inspect(port)} using #{inspect(method)}
|
||||
"""
|
||||
end)
|
||||
end
|
||||
|
||||
def on_disconnect(username, ip, port) do
|
||||
Logger.debug(fn ->
|
||||
"Disconnecting SSH shell for #{username} from #{inspect(ip)}:#{inspect(port)}"
|
||||
end)
|
||||
end
|
||||
|
||||
defp loop(state) do
|
||||
self_pid = self()
|
||||
counter = state.counter
|
||||
prefix = state.prefix
|
||||
user = state.user
|
||||
|
||||
input = spawn(fn -> io_get(self_pid, prefix, counter, user.nickname) end)
|
||||
wait_input(state, input)
|
||||
end
|
||||
|
||||
def puts_activity(activity) do
|
||||
status = Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{activity: activity})
|
||||
|
||||
IO.puts("-- #{status.id} by #{status.account.display_name} (#{status.account.acct})")
|
||||
|
||||
status.content
|
||||
|> String.split("<br/>")
|
||||
|> Enum.map(&HTML.strip_tags/1)
|
||||
|> Enum.map(&HtmlEntities.decode/1)
|
||||
|> Enum.map(&IO.puts/1)
|
||||
end
|
||||
|
||||
def puts_notification(activity, user) do
|
||||
notification =
|
||||
Pleroma.Web.MastodonAPI.NotificationView.render("show.json", %{
|
||||
notification: activity,
|
||||
for: user
|
||||
})
|
||||
|
||||
IO.puts(
|
||||
"== (#{notification.type}) #{notification.status.id} by #{notification.account.display_name} (#{notification.account.acct})"
|
||||
)
|
||||
|
||||
notification.status.content
|
||||
|> String.split("<br/>")
|
||||
|> Enum.map(&HTML.strip_tags/1)
|
||||
|> Enum.map(&HtmlEntities.decode/1)
|
||||
|> (fn x ->
|
||||
case x do
|
||||
[content] ->
|
||||
"> " <> content
|
||||
|
||||
[head | _tail] ->
|
||||
# "> " <> hd <> "..."
|
||||
head
|
||||
|> String.slice(1, 80)
|
||||
|> (fn x -> "> " <> x <> "..." end).()
|
||||
end
|
||||
end).()
|
||||
|> IO.puts()
|
||||
|
||||
IO.puts("")
|
||||
end
|
||||
|
||||
def handle_command(state, "help") do
|
||||
IO.puts("Available commands:")
|
||||
IO.puts("help - This help")
|
||||
IO.puts("home - Show the home timeline")
|
||||
IO.puts("p <text> - Post the given text")
|
||||
IO.puts("r <id> <text> - Reply to the post with the given id")
|
||||
IO.puts("t <id> - Show a thread from the given id")
|
||||
IO.puts("n - Show notifications")
|
||||
IO.puts("n read - Mark all notifactions as read")
|
||||
IO.puts("f <id> - Favourites the post with the given id")
|
||||
IO.puts("R <id> - Repeat the post with the given id")
|
||||
IO.puts("quit - Quit")
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "r " <> text) do
|
||||
text = String.trim(text)
|
||||
[activity_id, rest] = String.split(text, " ", parts: 2)
|
||||
|
||||
with %Activity{} <- Activity.get_by_id(activity_id),
|
||||
{:ok, _activity} <-
|
||||
CommonAPI.post(user, %{status: rest, in_reply_to_status_id: activity_id}) do
|
||||
IO.puts("Replied!")
|
||||
else
|
||||
_e -> IO.puts("Could not reply...")
|
||||
end
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "t " <> activity_id) do
|
||||
with %Activity{} = activity <- Activity.get_by_id(activity_id) do
|
||||
activities =
|
||||
ActivityPub.fetch_activities_for_context(activity.data["context"], %{
|
||||
blocking_user: user,
|
||||
user: user,
|
||||
exclude_id: activity.id
|
||||
})
|
||||
|
||||
case activities do
|
||||
[] ->
|
||||
activity_id
|
||||
|> Activity.get_by_id()
|
||||
|> puts_activity()
|
||||
|
||||
_ ->
|
||||
activities
|
||||
|> Enum.reverse()
|
||||
|> Enum.each(&puts_activity/1)
|
||||
end
|
||||
else
|
||||
_e -> IO.puts("Could not show this thread...")
|
||||
end
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "n read") do
|
||||
Pleroma.Notification.clear(user)
|
||||
IO.puts("All notifications were marked as read")
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "n") do
|
||||
user
|
||||
|> Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(%{})
|
||||
|> Enum.each(&puts_notification(&1, user))
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "p " <> text) do
|
||||
text = String.trim(text)
|
||||
|
||||
with {:ok, activity} <- CommonAPI.post(user, %{status: text}) do
|
||||
IO.puts("Posted! ID: #{activity.id}")
|
||||
else
|
||||
_e -> IO.puts("Could not post...")
|
||||
end
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(%{user: user} = state, "f " <> id) do
|
||||
id = String.trim(id)
|
||||
|
||||
with %Activity{} = activity <- Activity.get_by_id(id),
|
||||
{:ok, _activity} <- CommonAPI.favorite(user, activity) do
|
||||
IO.puts("Favourited!")
|
||||
else
|
||||
_e -> IO.puts("Could not Favourite...")
|
||||
end
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(state, "home") do
|
||||
user = state.user
|
||||
|
||||
params =
|
||||
%{}
|
||||
|> Map.put(:type, ["Create"])
|
||||
|> Map.put(:blocking_user, user)
|
||||
|> Map.put(:muting_user, user)
|
||||
|> Map.put(:user, user)
|
||||
|
||||
activities =
|
||||
[user.ap_id | Pleroma.User.following(user)]
|
||||
|> ActivityPub.fetch_activities(params)
|
||||
|
||||
Enum.each(activities, fn activity ->
|
||||
puts_activity(activity)
|
||||
end)
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
def handle_command(state, command) do
|
||||
IO.puts("Unknown command '#{command}'")
|
||||
state
|
||||
end
|
||||
|
||||
defp wait_input(state, input) do
|
||||
receive do
|
||||
{:input, ^input, "quit\n"} ->
|
||||
IO.puts("Exiting...")
|
||||
|
||||
{:input, ^input, code} when is_binary(code) ->
|
||||
code = String.trim(code)
|
||||
|
||||
state = handle_command(state, code)
|
||||
|
||||
loop(%{state | counter: state.counter + 1})
|
||||
|
||||
{:input, ^input, {:error, :interrupted}} ->
|
||||
IO.puts("Caught Ctrl+C...")
|
||||
loop(%{state | counter: state.counter + 1})
|
||||
|
||||
{:input, ^input, msg} ->
|
||||
:ok = Logger.warn("received unknown message: #{inspect(msg)}")
|
||||
loop(%{state | counter: state.counter + 1})
|
||||
end
|
||||
end
|
||||
|
||||
defp run_state(opts) do
|
||||
%{prefix: "pleroma", counter: 1, user: opts[:user]}
|
||||
end
|
||||
|
||||
defp io_get(pid, prefix, counter, username) do
|
||||
prompt = prompt(prefix, counter, username)
|
||||
send(pid, {:input, self(), IO.gets(:stdio, prompt)})
|
||||
end
|
||||
|
||||
defp prompt(prefix, counter, username) do
|
||||
prompt = "#{username}@#{prefix}:#{counter}>"
|
||||
prompt <> " "
|
||||
end
|
||||
end
|
||||
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Bookmark do
|
|||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Bookmark
|
||||
alias Pleroma.BookmarkFolder
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
|
|
@ -18,33 +19,46 @@ defmodule Pleroma.Bookmark do
|
|||
schema "bookmarks" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:folder, BookmarkFolder, type: FlakeId.Ecto.CompatType)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
@spec create(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t()) ::
|
||||
{:ok, Bookmark.t()} | {:error, Changeset.t()}
|
||||
def create(user_id, activity_id) do
|
||||
@spec create(Ecto.UUID.t(), Ecto.UUID.t()) ::
|
||||
{:ok, Bookmark.t()} | {:error, Ecto.Changeset.t()}
|
||||
def create(user_id, activity_id, folder_id \\ nil) do
|
||||
attrs = %{
|
||||
user_id: user_id,
|
||||
activity_id: activity_id
|
||||
activity_id: activity_id,
|
||||
folder_id: folder_id
|
||||
}
|
||||
|
||||
%Bookmark{}
|
||||
|> cast(attrs, [:user_id, :activity_id])
|
||||
|> cast(attrs, [:user_id, :activity_id, :folder_id])
|
||||
|> validate_required([:user_id, :activity_id])
|
||||
|> unique_constraint(:activity_id, name: :bookmarks_user_id_activity_id_index)
|
||||
|> Repo.insert()
|
||||
|> Repo.insert(
|
||||
on_conflict: [set: [folder_id: folder_id]],
|
||||
conflict_target: [:user_id, :activity_id]
|
||||
)
|
||||
end
|
||||
|
||||
@spec for_user_query(FlakeId.Ecto.CompatType.t()) :: Ecto.Query.t()
|
||||
def for_user_query(user_id) do
|
||||
@spec for_user_query(Ecto.UUID.t()) :: Ecto.Query.t()
|
||||
def for_user_query(user_id, folder_id \\ nil) do
|
||||
Bookmark
|
||||
|> where(user_id: ^user_id)
|
||||
|> maybe_filter_by_folder(folder_id)
|
||||
|> join(:inner, [b], activity in assoc(b, :activity))
|
||||
|> preload([b, a], activity: a)
|
||||
end
|
||||
|
||||
defp maybe_filter_by_folder(query, nil), do: query
|
||||
|
||||
defp maybe_filter_by_folder(query, folder_id) do
|
||||
query
|
||||
|> where(folder_id: ^folder_id)
|
||||
end
|
||||
|
||||
def get(user_id, activity_id) do
|
||||
Bookmark
|
||||
|> where(user_id: ^user_id)
|
||||
|
|
@ -52,8 +66,8 @@ defmodule Pleroma.Bookmark do
|
|||
|> Repo.one()
|
||||
end
|
||||
|
||||
@spec destroy(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t()) ::
|
||||
{:ok, Bookmark.t()} | {:error, Changeset.t()}
|
||||
@spec destroy(Ecto.UUID.t(), Ecto.UUID.t()) ::
|
||||
{:ok, Bookmark.t()} | {:error, Ecto.Changeset.t()}
|
||||
def destroy(user_id, activity_id) do
|
||||
from(b in Bookmark,
|
||||
where: b.user_id == ^user_id,
|
||||
|
|
@ -62,4 +76,11 @@ defmodule Pleroma.Bookmark do
|
|||
|> Repo.one()
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def set_folder(bookmark, folder_id) do
|
||||
bookmark
|
||||
|> cast(%{folder_id: folder_id}, [:folder_id])
|
||||
|> validate_required([:folder_id])
|
||||
|> Repo.update()
|
||||
end
|
||||
end
|
||||
|
|
|
|||
115
lib/pleroma/bookmark_folder.ex
Normal file
115
lib/pleroma/bookmark_folder.ex
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.BookmarkFolder do
|
||||
use Ecto.Schema
|
||||
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.BookmarkFolder
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
||||
|
||||
schema "bookmark_folders" do
|
||||
field(:name, :string)
|
||||
field(:emoji, :string)
|
||||
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
def get_by_id(id), do: Repo.get_by(BookmarkFolder, id: id)
|
||||
|
||||
def create(user_id, name, emoji \\ nil) do
|
||||
%BookmarkFolder{}
|
||||
|> cast(
|
||||
%{
|
||||
user_id: user_id,
|
||||
name: name,
|
||||
emoji: emoji
|
||||
},
|
||||
[:user_id, :name, :emoji]
|
||||
)
|
||||
|> validate_required([:user_id, :name])
|
||||
|> fix_emoji()
|
||||
|> validate_emoji()
|
||||
|> unique_constraint([:user_id, :name])
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
def update(folder_id, name, emoji \\ nil) do
|
||||
get_by_id(folder_id)
|
||||
|> cast(
|
||||
%{
|
||||
name: name,
|
||||
emoji: emoji
|
||||
},
|
||||
[:name, :emoji]
|
||||
)
|
||||
|> fix_emoji()
|
||||
|> validate_emoji()
|
||||
|> unique_constraint([:user_id, :name])
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
defp fix_emoji(changeset) do
|
||||
with {:emoji_field, emoji} when is_binary(emoji) <-
|
||||
{:emoji_field, get_field(changeset, :emoji)},
|
||||
{:fixed_emoji, emoji} <-
|
||||
{:fixed_emoji,
|
||||
emoji
|
||||
|> Pleroma.Emoji.fully_qualify_emoji()
|
||||
|> Pleroma.Emoji.maybe_quote()} do
|
||||
put_change(changeset, :emoji, emoji)
|
||||
else
|
||||
{:emoji_field, _} -> changeset
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_emoji(changeset) do
|
||||
validate_change(changeset, :emoji, fn
|
||||
:emoji, nil ->
|
||||
[]
|
||||
|
||||
:emoji, emoji ->
|
||||
if Emoji.unicode?(emoji) or valid_local_custom_emoji?(emoji) do
|
||||
[]
|
||||
else
|
||||
[emoji: "Invalid emoji"]
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp valid_local_custom_emoji?(emoji) do
|
||||
with %{file: _path} <- Emoji.get(emoji) do
|
||||
true
|
||||
else
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
def delete(folder_id) do
|
||||
BookmarkFolder
|
||||
|> Repo.get_by(id: folder_id)
|
||||
|> Repo.delete()
|
||||
end
|
||||
|
||||
def for_user(user_id) do
|
||||
BookmarkFolder
|
||||
|> where(user_id: ^user_id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def belongs_to_user?(folder_id, user_id) do
|
||||
BookmarkFolder
|
||||
|> where(id: ^folder_id, user_id: ^user_id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
end
|
||||
|
|
@ -8,10 +8,13 @@ defmodule Pleroma.Caching do
|
|||
@callback put(Cachex.cache(), any(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
|
||||
@callback put(Cachex.cache(), any(), any()) :: {Cachex.status(), boolean()}
|
||||
@callback fetch!(Cachex.cache(), any(), function() | nil) :: any()
|
||||
@callback fetch(Cachex.cache(), any(), function() | nil) ::
|
||||
{atom(), any()} | {atom(), any(), any()}
|
||||
# @callback del(Cachex.cache(), any(), Keyword.t()) :: {Cachex.status(), boolean()}
|
||||
@callback del(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
|
||||
@callback stream!(Cachex.cache(), any()) :: Enumerable.t()
|
||||
@callback expire_at(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
|
||||
@callback expire(Cachex.cache(), binary(), number()) :: {Cachex.status(), boolean()}
|
||||
@callback exists?(Cachex.cache(), any()) :: {Cachex.status(), boolean()}
|
||||
@callback execute!(Cachex.cache(), function()) :: any()
|
||||
@callback get_and_update(Cachex.cache(), any(), function()) ::
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ defmodule Pleroma.Captcha.Kocaptcha do
|
|||
|
||||
@impl Service
|
||||
def validate(_token, captcha, answer_data) do
|
||||
# Here the token is unsed, because the unencrypted captcha answer is just passed to method
|
||||
# Here the token is unused, because the unencrypted captcha answer is just passed to method
|
||||
if not is_nil(captcha) and
|
||||
:crypto.hash(:md5, captcha) |> Base.encode16() == String.upcase(answer_data),
|
||||
do: :ok,
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ defmodule Pleroma.Chat do
|
|||
|> unique_constraint(:user_id, name: :chats_user_id_recipient_index)
|
||||
end
|
||||
|
||||
@spec get_by_user_and_id(User.t(), FlakeId.Ecto.CompatType.t()) ::
|
||||
@spec get_by_user_and_id(User.t(), Ecto.UUID.t()) ::
|
||||
{:ok, t()} | {:error, :not_found}
|
||||
def get_by_user_and_id(%User{id: user_id}, id) do
|
||||
from(c in __MODULE__,
|
||||
|
|
@ -52,17 +52,17 @@ defmodule Pleroma.Chat do
|
|||
|> Repo.find_resource()
|
||||
end
|
||||
|
||||
@spec get_by_id(FlakeId.Ecto.CompatType.t()) :: t() | nil
|
||||
@spec get_by_id(Ecto.UUID.t()) :: t() | nil
|
||||
def get_by_id(id) do
|
||||
Repo.get(__MODULE__, id)
|
||||
end
|
||||
|
||||
@spec get(FlakeId.Ecto.CompatType.t(), String.t()) :: t() | nil
|
||||
@spec get(Ecto.UUID.t(), String.t()) :: t() | nil
|
||||
def get(user_id, recipient) do
|
||||
Repo.get_by(__MODULE__, user_id: user_id, recipient: recipient)
|
||||
end
|
||||
|
||||
@spec get_or_create(FlakeId.Ecto.CompatType.t(), String.t()) ::
|
||||
@spec get_or_create(Ecto.UUID.t(), String.t()) ::
|
||||
{:ok, t()} | {:error, Ecto.Changeset.t()}
|
||||
def get_or_create(user_id, recipient) do
|
||||
%__MODULE__{}
|
||||
|
|
@ -75,7 +75,7 @@ defmodule Pleroma.Chat do
|
|||
)
|
||||
end
|
||||
|
||||
@spec bump_or_create(FlakeId.Ecto.CompatType.t(), String.t()) ::
|
||||
@spec bump_or_create(Ecto.UUID.t(), String.t()) ::
|
||||
{:ok, t()} | {:error, Ecto.Changeset.t()}
|
||||
def bump_or_create(user_id, recipient) do
|
||||
%__MODULE__{}
|
||||
|
|
@ -87,7 +87,7 @@ defmodule Pleroma.Chat do
|
|||
)
|
||||
end
|
||||
|
||||
@spec for_user_query(FlakeId.Ecto.CompatType.t()) :: Ecto.Query.t()
|
||||
@spec for_user_query(Ecto.UUID.t()) :: Ecto.Query.t()
|
||||
def for_user_query(user_id) do
|
||||
from(c in Chat,
|
||||
where: c.user_id == ^user_id,
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
filters = Config.get([Pleroma.Upload]) |> Keyword.get(:filters, [])
|
||||
|
||||
if Pleroma.Upload.Filter.Exiftool in filters do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using Exiftool as a filter instead of Exiftool.StripLocation. This should work for now, but you are advised to change to the new configuration to prevent possible issues later:
|
||||
|
||||
|
|
@ -63,7 +63,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
|> Enum.any?(fn {_, v} -> Enum.any?(v, &is_binary/1) end)
|
||||
|
||||
if has_strings do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using strings in the SimplePolicy configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
|
||||
|
||||
|
|
@ -121,7 +121,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
has_strings = Config.get([:instance, :quarantined_instances]) |> Enum.any?(&is_binary/1)
|
||||
|
||||
if has_strings do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using strings in the quarantined_instances configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
|
||||
|
||||
|
|
@ -158,7 +158,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
has_strings = Config.get([:mrf, :transparency_exclusions]) |> Enum.any?(&is_binary/1)
|
||||
|
||||
if has_strings do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using strings in the transparency_exclusions configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
|
||||
|
||||
|
|
@ -172,7 +172,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
|
||||
```
|
||||
config :pleroma, :mrf,
|
||||
transparency_exclusions: [{"instance.tld", "Reason to exlude transparency"}]
|
||||
transparency_exclusions: [{"instance.tld", "Reason to exclude transparency"}]
|
||||
```
|
||||
""")
|
||||
|
||||
|
|
@ -193,7 +193,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
|
||||
def check_hellthread_threshold do
|
||||
if Config.get([:mrf_hellthread, :threshold]) do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
You are using the old configuration mechanism for the hellthread filter. Please check config.md.
|
||||
""")
|
||||
|
|
@ -213,7 +213,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
check_gun_pool_options(),
|
||||
check_activity_expiration_config(),
|
||||
check_remote_ip_plug_name(),
|
||||
check_uploders_s3_public_endpoint(),
|
||||
check_uploaders_s3_public_endpoint(),
|
||||
check_old_chat_shoutbox(),
|
||||
check_quarantined_instances_tuples(),
|
||||
check_transparency_exclusions_tuples(),
|
||||
|
|
@ -256,7 +256,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
move_namespace_and_warn(@mrf_config_map, warning_preface)
|
||||
end
|
||||
|
||||
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok | nil
|
||||
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok | :error
|
||||
def move_namespace_and_warn(config_map, warning_preface) do
|
||||
warning =
|
||||
Enum.reduce(config_map, "", fn
|
||||
|
|
@ -274,17 +274,17 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
if warning == "" do
|
||||
:ok
|
||||
else
|
||||
Logger.warn(warning_preface <> warning)
|
||||
Logger.warning(warning_preface <> warning)
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_media_proxy_whitelist_config() :: :ok | nil
|
||||
@spec check_media_proxy_whitelist_config() :: :ok | :error
|
||||
def check_media_proxy_whitelist_config do
|
||||
whitelist = Config.get([:media_proxy, :whitelist])
|
||||
|
||||
if Enum.any?(whitelist, &(not String.starts_with?(&1, "http"))) do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old format (only domain) for MediaProxy whitelist option. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||
""")
|
||||
|
|
@ -299,7 +299,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
pool_config = Config.get(:connections_pool)
|
||||
|
||||
if timeout = pool_config[:await_up_timeout] do
|
||||
Logger.warn("""
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`. Please change to `config :pleroma, :connections_pool, connect_timeout` to ensure compatibility with future releases.
|
||||
""")
|
||||
|
|
@ -331,7 +331,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
|
||||
end)
|
||||
|
||||
Logger.warn(Enum.join([warning_preface | pool_warnings]))
|
||||
Logger.warning(Enum.join([warning_preface | pool_warnings]))
|
||||
|
||||
Config.put(:pools, updated_config)
|
||||
:error
|
||||
|
|
@ -340,7 +340,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
end
|
||||
end
|
||||
|
||||
@spec check_activity_expiration_config() :: :ok | nil
|
||||
@spec check_activity_expiration_config() :: :ok | :error
|
||||
def check_activity_expiration_config do
|
||||
warning_preface = """
|
||||
!!!DEPRECATION WARNING!!!
|
||||
|
|
@ -356,7 +356,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
)
|
||||
end
|
||||
|
||||
@spec check_remote_ip_plug_name() :: :ok | nil
|
||||
@spec check_remote_ip_plug_name() :: :ok | :error
|
||||
def check_remote_ip_plug_name do
|
||||
warning_preface = """
|
||||
!!!DEPRECATION WARNING!!!
|
||||
|
|
@ -372,8 +372,8 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
)
|
||||
end
|
||||
|
||||
@spec check_uploders_s3_public_endpoint() :: :ok | nil
|
||||
def check_uploders_s3_public_endpoint do
|
||||
@spec check_uploaders_s3_public_endpoint() :: :ok | :error
|
||||
def check_uploaders_s3_public_endpoint do
|
||||
s3_config = Pleroma.Config.get([Pleroma.Uploaders.S3])
|
||||
|
||||
use_old_config = Keyword.has_key?(s3_config, :public_endpoint)
|
||||
|
|
@ -393,7 +393,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
end
|
||||
end
|
||||
|
||||
@spec check_old_chat_shoutbox() :: :ok | nil
|
||||
@spec check_old_chat_shoutbox() :: :ok | :error
|
||||
def check_old_chat_shoutbox do
|
||||
instance_config = Pleroma.Config.get([:instance])
|
||||
chat_config = Pleroma.Config.get([:chat]) || []
|
||||
|
|
|
|||
|
|
@ -5,4 +5,11 @@
|
|||
defmodule Pleroma.Config.Getting do
|
||||
@callback get(any()) :: any()
|
||||
@callback get(any(), any()) :: any()
|
||||
|
||||
def get(key), do: get(key, nil)
|
||||
def get(key, default), do: impl().get(key, default)
|
||||
|
||||
def impl do
|
||||
Application.get_env(:pleroma, :config_impl, Pleroma.Config)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ defmodule Pleroma.Config.Oban do
|
|||
You are using old workers in Oban crontab settings, which were removed.
|
||||
Please, remove setting from crontab in your config file (prod.secret.exs): #{inspect(setting)}
|
||||
"""
|
||||
|> Logger.warn()
|
||||
|> Logger.warning()
|
||||
|
||||
List.delete(acc, setting)
|
||||
else
|
||||
|
|
|
|||
|
|
@ -20,6 +20,20 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do
|
|||
|
||||
with_runtime_config =
|
||||
if File.exists?(config_path) do
|
||||
# <https://git.pleroma.social/pleroma/pleroma/-/issues/3135>
|
||||
%File.Stat{mode: mode} = File.stat!(config_path)
|
||||
|
||||
if Bitwise.band(mode, 0o007) > 0 do
|
||||
raise "Configuration at #{config_path} has world-permissions, execute the following: chmod o= #{config_path}"
|
||||
end
|
||||
|
||||
if Bitwise.band(mode, 0o020) > 0 do
|
||||
raise "Configuration at #{config_path} has group-wise write permissions, execute the following: chmod g-w #{config_path}"
|
||||
end
|
||||
|
||||
# Note: Elixir doesn't provides a getuid(2)
|
||||
# so cannot forbid group-read only when config is owned by us
|
||||
|
||||
runtime_config = Config.Reader.read!(config_path)
|
||||
|
||||
with_defaults
|
||||
|
|
|
|||
|
|
@ -55,8 +55,7 @@ defmodule Pleroma.Config.TransferTask do
|
|||
|
||||
started_applications = Application.started_applications()
|
||||
|
||||
# TODO: some problem with prometheus after restart!
|
||||
reject = [nil, :prometheus, :postgrex]
|
||||
reject = [nil, :postgrex]
|
||||
|
||||
reject =
|
||||
if restart_pleroma? do
|
||||
|
|
@ -145,7 +144,7 @@ defmodule Pleroma.Config.TransferTask do
|
|||
error_msg =
|
||||
"updating env causes error, group: #{inspect(group)}, key: #{inspect(key)}, value: #{inspect(value)} error: #{inspect(error)}"
|
||||
|
||||
Logger.warn(error_msg)
|
||||
Logger.warning(error_msg)
|
||||
|
||||
nil
|
||||
end
|
||||
|
|
@ -179,12 +178,12 @@ defmodule Pleroma.Config.TransferTask do
|
|||
:ok = Application.start(app)
|
||||
else
|
||||
nil ->
|
||||
Logger.warn("#{app} is not started.")
|
||||
Logger.warning("#{app} is not started.")
|
||||
|
||||
error ->
|
||||
error
|
||||
|> inspect()
|
||||
|> Logger.warn()
|
||||
|> Logger.warning()
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ defmodule Pleroma.ConfigDB do
|
|||
@spec get_by_params(map()) :: ConfigDB.t() | nil
|
||||
def get_by_params(%{group: _, key: _} = params), do: Repo.get_by(ConfigDB, params)
|
||||
|
||||
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
||||
@spec changeset(ConfigDB.t(), map()) :: Ecto.Changeset.t()
|
||||
def changeset(config, params \\ %{}) do
|
||||
config
|
||||
|> cast(params, [:key, :group, :value])
|
||||
|
|
@ -138,7 +138,7 @@ defmodule Pleroma.ConfigDB do
|
|||
end
|
||||
end
|
||||
|
||||
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Ecto.Changeset.t()}
|
||||
def update_or_create(params) do
|
||||
params = Map.put(params, :value, to_elixir_types(params[:value]))
|
||||
search_opts = Map.take(params, [:group, :key])
|
||||
|
|
@ -175,7 +175,7 @@ defmodule Pleroma.ConfigDB do
|
|||
end)
|
||||
end
|
||||
|
||||
@spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||
@spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Ecto.Changeset.t()}
|
||||
def delete(%ConfigDB{} = config), do: Repo.delete(config)
|
||||
|
||||
def delete(params) do
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@ defmodule Pleroma.Constants do
|
|||
"context_id",
|
||||
"deleted_activity_id",
|
||||
"pleroma_internal",
|
||||
"generator"
|
||||
"generator",
|
||||
"rules"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
@ -42,6 +43,18 @@ defmodule Pleroma.Constants do
|
|||
]
|
||||
)
|
||||
|
||||
const(status_object_types,
|
||||
do: [
|
||||
"Note",
|
||||
"Question",
|
||||
"Audio",
|
||||
"Video",
|
||||
"Event",
|
||||
"Article",
|
||||
"Page"
|
||||
]
|
||||
)
|
||||
|
||||
const(updatable_object_types,
|
||||
do: [
|
||||
"Note",
|
||||
|
|
@ -64,9 +77,34 @@ defmodule Pleroma.Constants do
|
|||
]
|
||||
)
|
||||
|
||||
const(allowed_user_actor_types,
|
||||
do: [
|
||||
"Person",
|
||||
"Service",
|
||||
"Group"
|
||||
]
|
||||
)
|
||||
|
||||
# basic regex, just there to weed out potential mistakes
|
||||
# https://datatracker.ietf.org/doc/html/rfc2045#section-5.1
|
||||
const(mime_regex,
|
||||
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
|
||||
)
|
||||
|
||||
const(upload_object_types, do: ["Document", "Image"])
|
||||
|
||||
const(activity_json_canonical_mime_type,
|
||||
do: "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
|
||||
)
|
||||
|
||||
const(activity_json_mime_types,
|
||||
do: [
|
||||
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
|
||||
"application/activity+json"
|
||||
]
|
||||
)
|
||||
|
||||
const(public_streams,
|
||||
do: ["public", "public:local", "public:media", "public:local:media"]
|
||||
)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ defmodule Pleroma.Conversation do
|
|||
3. Bump all relevant participations to 'unread'
|
||||
"""
|
||||
def create_or_bump_for(activity, opts \\ []) do
|
||||
with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity),
|
||||
with true <- Pleroma.Web.ActivityPub.Visibility.direct?(activity),
|
||||
"Create" <- activity.data["type"],
|
||||
%Object{} = object <- Object.normalize(activity, fetch: false),
|
||||
true <- object.data["type"] in ["Note", "Question"],
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ defmodule Pleroma.DataMigration do
|
|||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
schema "data_migrations" do
|
||||
field(:name, :string)
|
||||
field(:state, State, default: :pending)
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@ defmodule Pleroma.Docs.Generator do
|
|||
:code.all_loaded()
|
||||
|> Enum.filter(fn {module, _} ->
|
||||
# This shouldn't be needed as all modules are expected to have module_info/1,
|
||||
# but in test enviroments some transient modules `:elixir_compiler_XX`
|
||||
# but in test environments some transient modules `:elixir_compiler_XX`
|
||||
# are loaded for some reason (where XX is a random integer).
|
||||
Code.ensure_loaded(module)
|
||||
|
||||
if function_exported?(module, :module_info, 1) do
|
||||
module.module_info(:attributes)
|
||||
|> Keyword.get_values(:behaviour)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ defmodule Pleroma.Docs.JSON do
|
|||
:persistent_term.put(@term, Pleroma.Docs.Generator.convert_to_strings(descriptions))
|
||||
end
|
||||
|
||||
@spec compiled_descriptions :: Map.t()
|
||||
@spec compiled_descriptions :: map()
|
||||
def compiled_descriptions do
|
||||
:persistent_term.get(@term)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -27,3 +27,11 @@ defenum(Pleroma.DataMigration.State,
|
|||
failed: 4,
|
||||
manual: 5
|
||||
)
|
||||
|
||||
defenum(Pleroma.User.Backup.State,
|
||||
pending: 1,
|
||||
running: 2,
|
||||
complete: 3,
|
||||
failed: 4,
|
||||
invalid: 5
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.BareUri do
|
||||
use Ecto.Type
|
||||
|
||||
def type, do: :string
|
||||
|
||||
def cast(uri) when is_binary(uri) do
|
||||
parsed = URI.parse(uri)
|
||||
|
||||
if is_nil(parsed.scheme) do
|
||||
:error
|
||||
else
|
||||
{:ok, uri}
|
||||
end
|
||||
end
|
||||
|
||||
def cast(_), do: :error
|
||||
|
||||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
end
|
||||
|
|
@ -24,6 +24,8 @@ defmodule Pleroma.Emoji do
|
|||
|
||||
defstruct [:code, :file, :tags, :safe_code, :safe_file]
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
@doc "Build emoji struct"
|
||||
def build({code, file, tags}) do
|
||||
%__MODULE__{
|
||||
|
|
@ -49,10 +51,12 @@ defmodule Pleroma.Emoji do
|
|||
end
|
||||
|
||||
@doc "Returns the path of the emoji `name`."
|
||||
@spec get(String.t()) :: String.t() | nil
|
||||
@spec get(String.t()) :: Pleroma.Emoji.t() | nil
|
||||
def get(name) do
|
||||
name = maybe_strip_name(name)
|
||||
|
||||
case :ets.lookup(@ets, name) do
|
||||
[{_, path}] -> path
|
||||
[{_, emoji}] -> emoji
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
|
@ -134,10 +138,61 @@ defmodule Pleroma.Emoji do
|
|||
emojis = emojis ++ regional_indicators
|
||||
|
||||
for emoji <- emojis do
|
||||
def is_unicode_emoji?(unquote(emoji)), do: true
|
||||
def unicode?(unquote(emoji)), do: true
|
||||
end
|
||||
|
||||
def is_unicode_emoji?(_), do: false
|
||||
def unicode?(_), do: false
|
||||
|
||||
@emoji_regex ~r/:[A-Za-z0-9_-]+(@.+)?:/
|
||||
|
||||
def custom?(s) when is_binary(s), do: Regex.match?(@emoji_regex, s)
|
||||
|
||||
def custom?(_), do: false
|
||||
|
||||
def maybe_strip_name(name) when is_binary(name), do: String.trim(name, ":")
|
||||
|
||||
def maybe_strip_name(name), do: name
|
||||
|
||||
def maybe_quote(name) when is_binary(name) do
|
||||
if unicode?(name) do
|
||||
name
|
||||
else
|
||||
if String.starts_with?(name, ":") do
|
||||
name
|
||||
else
|
||||
":#{name}:"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_quote(name), do: name
|
||||
|
||||
def emoji_url(%{"type" => "EmojiReact", "content" => _, "tag" => []}), do: nil
|
||||
|
||||
def emoji_url(%{"type" => "EmojiReact", "content" => emoji, "tag" => tags}) do
|
||||
emoji = maybe_strip_name(emoji)
|
||||
|
||||
tag =
|
||||
tags
|
||||
|> Enum.find(fn tag ->
|
||||
tag["type"] == "Emoji" && !is_nil(tag["name"]) && tag["name"] == emoji
|
||||
end)
|
||||
|
||||
if is_nil(tag) do
|
||||
nil
|
||||
else
|
||||
tag
|
||||
|> Map.get("icon")
|
||||
|> Map.get("url")
|
||||
end
|
||||
end
|
||||
|
||||
def emoji_url(_), do: nil
|
||||
|
||||
def emoji_name_with_instance(name, url) do
|
||||
url = url |> URI.parse() |> Map.get(:host)
|
||||
"#{name}@#{url}"
|
||||
end
|
||||
|
||||
emoji_qualification_map =
|
||||
emojis
|
||||
|
|
|
|||
|
|
@ -15,8 +15,6 @@ defmodule Pleroma.Emoji.Loader do
|
|||
|
||||
require Logger
|
||||
|
||||
@mix_env Mix.env()
|
||||
|
||||
@type pattern :: Regex.t() | module() | String.t()
|
||||
@type patterns :: pattern() | [pattern()]
|
||||
@type group_patterns :: keyword(patterns())
|
||||
|
|
@ -59,7 +57,7 @@ defmodule Pleroma.Emoji.Loader do
|
|||
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
|
||||
|
||||
if not Enum.empty?(files) do
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Found files in the emoji folder. These will be ignored, please move them to a subdirectory\nFound files: #{Enum.join(files, ", ")}"
|
||||
)
|
||||
end
|
||||
|
|
@ -79,7 +77,7 @@ defmodule Pleroma.Emoji.Loader do
|
|||
|
||||
# for testing emoji.txt entries we do not want exposed in normal operation
|
||||
test_emoji =
|
||||
if @mix_env == :test do
|
||||
if Application.get_env(:pleroma, __MODULE__)[:test_emoji] do
|
||||
load_from_file("test/config/emoji.txt", emoji_groups)
|
||||
else
|
||||
[]
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{:ok, _emoji_files} =
|
||||
:zip.unzip(
|
||||
to_charlist(file.path),
|
||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, tmp_dir}]
|
||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
|
||||
)
|
||||
|
||||
{_, updated_pack} =
|
||||
|
|
@ -209,7 +209,9 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
with :ok <- validate_shareable_packs_available(uri) do
|
||||
uri
|
||||
|> URI.merge("/api/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}")
|
||||
|> URI.merge(
|
||||
"/api/v1/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}"
|
||||
)
|
||||
|> http_get()
|
||||
end
|
||||
end
|
||||
|
|
@ -249,8 +251,12 @@ defmodule Pleroma.Emoji.Pack do
|
|||
uri = url |> String.trim() |> URI.parse()
|
||||
|
||||
with :ok <- validate_shareable_packs_available(uri),
|
||||
{:ok, %{"files_count" => files_count}} <-
|
||||
uri |> URI.merge("/api/v1/pleroma/emoji/pack?name=#{name}&page_size=0") |> http_get(),
|
||||
{:ok, remote_pack} <-
|
||||
uri |> URI.merge("/api/pleroma/emoji/pack?name=#{name}") |> http_get(),
|
||||
uri
|
||||
|> URI.merge("/api/v1/pleroma/emoji/pack?name=#{name}&page_size=#{files_count}")
|
||||
|> http_get(),
|
||||
{:ok, %{sha: sha, url: url} = pack_info} <- fetch_pack_info(remote_pack, uri, name),
|
||||
{:ok, archive} <- download_archive(url, sha),
|
||||
pack <- copy_as(remote_pack, as || name),
|
||||
|
|
@ -285,6 +291,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
|
||||
def load_pack(name) do
|
||||
name = Path.basename(name)
|
||||
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
||||
|
||||
with {:ok, _} <- File.stat(pack_file),
|
||||
|
|
@ -591,7 +598,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{:ok,
|
||||
%{
|
||||
sha: sha,
|
||||
url: URI.merge(uri, "/api/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
|
||||
url: URI.merge(uri, "/api/v1/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
|
||||
}}
|
||||
|
||||
%{"fallback-src" => src, "fallback-src-sha256" => sha} when is_binary(src) ->
|
||||
|
|
|
|||
|
|
@ -216,9 +216,6 @@ defmodule Pleroma.Filter do
|
|||
|
||||
:re ->
|
||||
~r/\b#{phrases}\b/i
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -241,13 +241,13 @@ defmodule Pleroma.FollowingRelationship do
|
|||
end
|
||||
|
||||
@doc """
|
||||
For a query with joined activity,
|
||||
keeps rows where activity's actor is followed by user -or- is NOT domain-blocked by user.
|
||||
For a query with joined activity's actor,
|
||||
keeps rows where actor is followed by user -or- is NOT domain-blocked by user.
|
||||
"""
|
||||
def keep_following_or_not_domain_blocked(query, user) do
|
||||
where(
|
||||
query,
|
||||
[_, activity],
|
||||
[_, user_actor: user_actor],
|
||||
fragment(
|
||||
# "(actor's domain NOT in domain_blocks) OR (actor IS in followed AP IDs)"
|
||||
"""
|
||||
|
|
@ -255,9 +255,9 @@ defmodule Pleroma.FollowingRelationship do
|
|||
? = ANY(SELECT ap_id FROM users AS u INNER JOIN following_relationships AS fr
|
||||
ON u.id = fr.following_id WHERE fr.follower_id = ? AND fr.state = ?)
|
||||
""",
|
||||
activity.actor,
|
||||
user_actor.ap_id,
|
||||
^user.domain_blocks,
|
||||
activity.actor,
|
||||
user_actor.ap_id,
|
||||
^User.binary_id(user.id),
|
||||
^accept_state_code()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ defmodule Pleroma.Formatter do
|
|||
end
|
||||
|
||||
def markdown_to_html(text) do
|
||||
Earmark.as_html!(text, %Earmark.Options{compact_output: true})
|
||||
Earmark.as_html!(text, %Earmark.Options{compact_output: true, smartypants: false})
|
||||
end
|
||||
|
||||
def html_escape({text, mentions, hashtags}, type) do
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ defmodule Pleroma.Gopher.Server.ProtocolHandler do
|
|||
|
||||
def response("/notices/" <> id) do
|
||||
with %Activity{} = activity <- Activity.get_by_id(id),
|
||||
true <- Visibility.is_public?(activity) do
|
||||
true <- Visibility.public?(activity) do
|
||||
activities =
|
||||
ActivityPub.fetch_activities_for_context(activity.data["context"])
|
||||
|> render_activities
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ defmodule Pleroma.Gun.Conn do
|
|||
{:ok, conn, protocol}
|
||||
else
|
||||
error ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
|
||||
)
|
||||
|
||||
|
|
@ -90,7 +90,7 @@ defmodule Pleroma.Gun.Conn do
|
|||
{:ok, conn, protocol}
|
||||
else
|
||||
error ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
|
||||
)
|
||||
|
||||
|
|
@ -106,7 +106,7 @@ defmodule Pleroma.Gun.Conn do
|
|||
{:ok, conn, protocol}
|
||||
else
|
||||
error ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Gun.ConnectionPool.Reclaimer do
|
|||
|
||||
def start_monitor do
|
||||
pid =
|
||||
case :gen_server.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
{:ok, pid} ->
|
||||
pid
|
||||
|
||||
|
|
|
|||
|
|
@ -18,10 +18,12 @@ defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
|
|||
)
|
||||
end
|
||||
|
||||
def start_worker(opts, retry \\ false) do
|
||||
def start_worker(opts, last_attempt \\ false) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
if retry or free_pool() == :error do
|
||||
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
|
||||
|
||||
if Enum.any?(funs, fn fun -> fun.() end) do
|
||||
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
|
||||
{:error, :pool_full}
|
||||
else
|
||||
|
|
|
|||
|
|
@ -8,11 +8,14 @@ defmodule Pleroma.Helpers.MediaHelper do
|
|||
"""
|
||||
|
||||
alias Pleroma.HTTP
|
||||
alias Vix.Vips.Operation
|
||||
|
||||
require Logger
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def missing_dependencies do
|
||||
Enum.reduce([imagemagick: "convert", ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
|
||||
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
|
||||
if Pleroma.Utils.command_available?(executable) do
|
||||
acc
|
||||
else
|
||||
|
|
@ -22,141 +25,63 @@ defmodule Pleroma.Helpers.MediaHelper do
|
|||
end
|
||||
|
||||
def image_resize(url, options) do
|
||||
with executable when is_binary(executable) <- System.find_executable("convert"),
|
||||
{:ok, args} <- prepare_image_resize_args(options),
|
||||
{:ok, env} <- HTTP.get(url, [], pool: :media),
|
||||
{:ok, fifo_path} <- mkfifo() do
|
||||
args = List.flatten([fifo_path, args])
|
||||
run_fifo(fifo_path, env, executable, args)
|
||||
with {:ok, env} <- HTTP.get(url, [], pool: :media),
|
||||
{:ok, resized} <-
|
||||
Operation.thumbnail_buffer(env.body, options.max_width,
|
||||
height: options.max_height,
|
||||
size: :VIPS_SIZE_DOWN
|
||||
) do
|
||||
if options[:format] == "png" do
|
||||
Operation.pngsave_buffer(resized, Q: options[:quality])
|
||||
else
|
||||
Operation.jpegsave_buffer(resized, Q: options[:quality], interlace: true)
|
||||
end
|
||||
else
|
||||
nil -> {:error, {:convert, :command_not_found}}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
|
||||
defp prepare_image_resize_args(
|
||||
%{max_width: max_width, max_height: max_height, format: "png"} = options
|
||||
) do
|
||||
quality = options[:quality] || 85
|
||||
resize = Enum.join([max_width, "x", max_height, ">"])
|
||||
|
||||
args = [
|
||||
"-resize",
|
||||
resize,
|
||||
"-quality",
|
||||
to_string(quality),
|
||||
"png:-"
|
||||
]
|
||||
|
||||
{:ok, args}
|
||||
end
|
||||
|
||||
defp prepare_image_resize_args(%{max_width: max_width, max_height: max_height} = options) do
|
||||
quality = options[:quality] || 85
|
||||
resize = Enum.join([max_width, "x", max_height, ">"])
|
||||
|
||||
args = [
|
||||
"-interlace",
|
||||
"Plane",
|
||||
"-resize",
|
||||
resize,
|
||||
"-quality",
|
||||
to_string(quality),
|
||||
"jpg:-"
|
||||
]
|
||||
|
||||
{:ok, args}
|
||||
end
|
||||
|
||||
defp prepare_image_resize_args(_), do: {:error, :missing_options}
|
||||
|
||||
# Note: video thumbnail is intentionally not resized (always has original dimensions)
|
||||
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
|
||||
def video_framegrab(url) do
|
||||
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
|
||||
false <- @cachex.exists?(:failed_media_helper_cache, url),
|
||||
{:ok, env} <- HTTP.get(url, [], pool: :media),
|
||||
{:ok, fifo_path} <- mkfifo(),
|
||||
args = [
|
||||
"-y",
|
||||
"-i",
|
||||
fifo_path,
|
||||
"-vframes",
|
||||
"1",
|
||||
"-f",
|
||||
"mjpeg",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-"
|
||||
] do
|
||||
run_fifo(fifo_path, env, executable, args)
|
||||
{:ok, pid} <- StringIO.open(env.body) do
|
||||
body_stream = IO.binstream(pid, 1)
|
||||
|
||||
task =
|
||||
Task.async(fn ->
|
||||
Exile.stream!(
|
||||
[
|
||||
executable,
|
||||
"-i",
|
||||
"pipe:0",
|
||||
"-vframes",
|
||||
"1",
|
||||
"-f",
|
||||
"mjpeg",
|
||||
"pipe:1"
|
||||
],
|
||||
input: body_stream,
|
||||
ignore_epipe: true,
|
||||
stderr: :disable
|
||||
)
|
||||
|> Enum.into(<<>>)
|
||||
end)
|
||||
|
||||
case Task.yield(task, 5_000) do
|
||||
nil ->
|
||||
Task.shutdown(task)
|
||||
@cachex.put(:failed_media_helper_cache, url, nil)
|
||||
{:error, {:ffmpeg, :timeout}}
|
||||
|
||||
result ->
|
||||
{:ok, result}
|
||||
end
|
||||
else
|
||||
nil -> {:error, {:ffmpeg, :command_not_found}}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
|
||||
defp run_fifo(fifo_path, env, executable, args) do
|
||||
pid =
|
||||
Port.open({:spawn_executable, executable}, [
|
||||
:use_stdio,
|
||||
:stream,
|
||||
:exit_status,
|
||||
:binary,
|
||||
args: args
|
||||
])
|
||||
|
||||
fifo = Port.open(to_charlist(fifo_path), [:eof, :binary, :stream, :out])
|
||||
fix = Pleroma.Helpers.QtFastStart.fix(env.body)
|
||||
true = Port.command(fifo, fix)
|
||||
:erlang.port_close(fifo)
|
||||
loop_recv(pid)
|
||||
after
|
||||
File.rm(fifo_path)
|
||||
end
|
||||
|
||||
defp mkfifo do
|
||||
path = Path.join(System.tmp_dir!(), "pleroma-media-preview-pipe-#{Ecto.UUID.generate()}")
|
||||
|
||||
case System.cmd("mkfifo", [path]) do
|
||||
{_, 0} ->
|
||||
spawn(fifo_guard(path))
|
||||
{:ok, path}
|
||||
|
||||
{_, err} ->
|
||||
{:error, {:fifo_failed, err}}
|
||||
end
|
||||
end
|
||||
|
||||
defp fifo_guard(path) do
|
||||
pid = self()
|
||||
|
||||
fn ->
|
||||
ref = Process.monitor(pid)
|
||||
|
||||
receive do
|
||||
{:DOWN, ^ref, :process, ^pid, _} ->
|
||||
File.rm(path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp loop_recv(pid) do
|
||||
loop_recv(pid, <<>>)
|
||||
end
|
||||
|
||||
defp loop_recv(pid, acc) do
|
||||
receive do
|
||||
{^pid, {:data, data}} ->
|
||||
loop_recv(pid, acc <> data)
|
||||
|
||||
{^pid, {:exit_status, 0}} ->
|
||||
{:ok, acc}
|
||||
|
||||
{^pid, {:exit_status, status}} ->
|
||||
{:error, status}
|
||||
after
|
||||
5000 ->
|
||||
:erlang.port_close(pid)
|
||||
{:error, :timeout}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -40,16 +40,21 @@ defmodule Pleroma.Helpers.QtFastStart do
|
|||
got_mdat,
|
||||
acc
|
||||
) do
|
||||
full_size = (size - 8) * 8
|
||||
<<data::bits-size(full_size), rest::bits>> = rest
|
||||
try do
|
||||
full_size = (size - 8) * 8
|
||||
<<data::bits-size(full_size), rest::bits>> = rest
|
||||
|
||||
acc = [
|
||||
{fourcc, pos, pos + size, size,
|
||||
<<size::integer-big-size(32), fourcc::bits-size(32), data::bits>>}
|
||||
| acc
|
||||
]
|
||||
acc = [
|
||||
{fourcc, pos, pos + size, size,
|
||||
<<size::integer-big-size(32), fourcc::bits-size(32), data::bits>>}
|
||||
| acc
|
||||
]
|
||||
|
||||
fix(rest, pos + size, got_moov || fourcc == "moov", got_mdat || fourcc == "mdat", acc)
|
||||
fix(rest, pos + size, got_moov || fourcc == "moov", got_mdat || fourcc == "mdat", acc)
|
||||
rescue
|
||||
_ ->
|
||||
:abort
|
||||
end
|
||||
end
|
||||
|
||||
defp fix(<<>>, _pos, _, _, acc) do
|
||||
|
|
@ -121,9 +126,15 @@ defmodule Pleroma.Helpers.QtFastStart do
|
|||
<<pos::integer-big-size(unquote(size)), rest::bits>>,
|
||||
acc
|
||||
) do
|
||||
rewrite_entries(unquote(size), offset, rest, [
|
||||
acc | <<pos + offset::integer-big-size(unquote(size))>>
|
||||
])
|
||||
rewrite_entries(
|
||||
unquote(size),
|
||||
offset,
|
||||
rest,
|
||||
acc ++
|
||||
[
|
||||
<<pos + offset::integer-big-size(unquote(size))>>
|
||||
]
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -6,8 +6,6 @@ defmodule Pleroma.HTML do
|
|||
# Scrubbers are compiled on boot so they can be configured in OTP releases
|
||||
# @on_load :compile_scrubbers
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def compile_scrubbers do
|
||||
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
|
||||
|
||||
|
|
@ -67,22 +65,9 @@ defmodule Pleroma.HTML do
|
|||
end
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
|
||||
@spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
|
||||
def extract_first_external_url_from_object(%{data: %{"content" => content}})
|
||||
when is_binary(content) do
|
||||
unless object.data["fake"] do
|
||||
key = "URL|#{object.id}"
|
||||
|
||||
@cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||
{:commit, {:ok, extract_first_external_url(content)}}
|
||||
end)
|
||||
else
|
||||
{:ok, extract_first_external_url(content)}
|
||||
end
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(_), do: {:error, :no_content}
|
||||
|
||||
def extract_first_external_url(content) do
|
||||
content
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||
|
|
@ -90,4 +75,6 @@ defmodule Pleroma.HTML do
|
|||
|> Floki.attribute("href")
|
||||
|> Enum.at(0)
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(_), do: nil
|
||||
end
|
||||
|
|
|
|||
|
|
@ -106,6 +106,10 @@ defmodule Pleroma.HTTP do
|
|||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
|
||||
end
|
||||
|
||||
defp adapter_middlewares({Tesla.Adapter.Finch, _}) do
|
||||
[Tesla.Middleware.FollowRedirects]
|
||||
end
|
||||
|
||||
defp adapter_middlewares(_) do
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
# Emulate redirects in test env, which are handled by adapters in other environments
|
||||
|
|
|
|||
|
|
@ -15,8 +15,8 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
require Logger
|
||||
|
||||
@type proxy ::
|
||||
{Connection.host(), pos_integer()}
|
||||
| {Connection.proxy_type(), Connection.host(), pos_integer()}
|
||||
{host(), pos_integer()}
|
||||
| {proxy_type(), host(), pos_integer()}
|
||||
|
||||
@callback options(keyword(), URI.t()) :: keyword()
|
||||
|
||||
|
|
@ -70,15 +70,15 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
{:ok, parse_host(host), port}
|
||||
else
|
||||
{_, _} ->
|
||||
Logger.warn("Parsing port failed #{inspect(proxy)}")
|
||||
Logger.warning("Parsing port failed #{inspect(proxy)}")
|
||||
{:error, :invalid_proxy_port}
|
||||
|
||||
:error ->
|
||||
Logger.warn("Parsing port failed #{inspect(proxy)}")
|
||||
Logger.warning("Parsing port failed #{inspect(proxy)}")
|
||||
{:error, :invalid_proxy_port}
|
||||
|
||||
_ ->
|
||||
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
|
||||
Logger.warning("Parsing proxy failed #{inspect(proxy)}")
|
||||
{:error, :invalid_proxy}
|
||||
end
|
||||
end
|
||||
|
|
@ -88,7 +88,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
{:ok, type, parse_host(host), port}
|
||||
else
|
||||
_ ->
|
||||
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
|
||||
Logger.warning("Parsing proxy failed #{inspect(proxy)}")
|
||||
{:error, :invalid_proxy}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -54,12 +54,12 @@ defmodule Pleroma.HTTP.RequestBuilder do
|
|||
@doc """
|
||||
Add optional parameters to the request
|
||||
"""
|
||||
@spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
|
||||
@spec add_param(Request.t(), atom(), atom() | String.t(), any()) :: Request.t()
|
||||
def add_param(request, :query, :query, values), do: %{request | query: values}
|
||||
|
||||
def add_param(request, :body, :body, value), do: %{request | body: value}
|
||||
|
||||
def add_param(request, :body, key, value) do
|
||||
def add_param(request, :body, key, value) when is_binary(key) do
|
||||
request
|
||||
|> Map.put(:body, Multipart.new())
|
||||
|> Map.update!(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,11 @@ defmodule Pleroma.HTTP.WebPush do
|
|||
@moduledoc false
|
||||
|
||||
def post(url, payload, headers, options \\ []) do
|
||||
list_headers = Map.to_list(headers)
|
||||
list_headers =
|
||||
headers
|
||||
|> Map.to_list()
|
||||
|> Kernel.++([{"content-type", "octet-stream"}])
|
||||
|
||||
Pleroma.HTTP.post(url, payload, list_headers, options)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,16 +7,15 @@ defmodule Pleroma.Instances do
|
|||
|
||||
alias Pleroma.Instances.Instance
|
||||
|
||||
def filter_reachable(urls_or_hosts), do: Instance.filter_reachable(urls_or_hosts)
|
||||
defdelegate filter_reachable(urls_or_hosts), to: Instance
|
||||
|
||||
def reachable?(url_or_host), do: Instance.reachable?(url_or_host)
|
||||
defdelegate reachable?(url_or_host), to: Instance
|
||||
|
||||
def set_reachable(url_or_host), do: Instance.set_reachable(url_or_host)
|
||||
defdelegate set_reachable(url_or_host), to: Instance
|
||||
|
||||
def set_unreachable(url_or_host, unreachable_since \\ nil),
|
||||
do: Instance.set_unreachable(url_or_host, unreachable_since)
|
||||
defdelegate set_unreachable(url_or_host, unreachable_since \\ nil), to: Instance
|
||||
|
||||
def get_consistently_unreachable, do: Instance.get_consistently_unreachable()
|
||||
defdelegate get_consistently_unreachable, to: Instance
|
||||
|
||||
def set_consistently_unreachable(url_or_host),
|
||||
do: set_unreachable(url_or_host, reachability_datetime_threshold())
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
|
||||
alias Pleroma.Instances
|
||||
alias Pleroma.Instances.Instance
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Workers.BackgroundWorker
|
||||
|
|
@ -24,6 +25,14 @@ defmodule Pleroma.Instances.Instance do
|
|||
field(:favicon, :string)
|
||||
field(:favicon_updated_at, :naive_datetime)
|
||||
|
||||
embeds_one :metadata, Pleroma.Instances.Metadata, primary_key: false do
|
||||
field(:software_name, :string)
|
||||
field(:software_version, :string)
|
||||
field(:software_repository, :string)
|
||||
end
|
||||
|
||||
field(:metadata_updated_at, :utc_datetime)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
|
|
@ -31,11 +40,17 @@ defmodule Pleroma.Instances.Instance do
|
|||
|
||||
def changeset(struct, params \\ %{}) do
|
||||
struct
|
||||
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
|
||||
|> cast(params, __schema__(:fields) -- [:metadata])
|
||||
|> cast_embed(:metadata, with: &metadata_changeset/2)
|
||||
|> validate_required([:host])
|
||||
|> unique_constraint(:host)
|
||||
end
|
||||
|
||||
def metadata_changeset(struct, params \\ %{}) do
|
||||
struct
|
||||
|> cast(params, [:software_name, :software_version, :software_repository])
|
||||
end
|
||||
|
||||
def filter_reachable([]), do: %{}
|
||||
|
||||
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
|
||||
|
|
@ -82,13 +97,9 @@ defmodule Pleroma.Instances.Instance do
|
|||
def reachable?(url_or_host) when is_binary(url_or_host), do: true
|
||||
|
||||
def set_reachable(url_or_host) when is_binary(url_or_host) do
|
||||
with host <- host(url_or_host),
|
||||
%Instance{} = existing_record <- Repo.get_by(Instance, %{host: host}) do
|
||||
{:ok, _instance} =
|
||||
existing_record
|
||||
|> changeset(%{unreachable_since: nil})
|
||||
|> Repo.update()
|
||||
end
|
||||
%Instance{host: host(url_or_host)}
|
||||
|> changeset(%{unreachable_since: nil})
|
||||
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
|
||||
end
|
||||
|
||||
def set_reachable(_), do: {:error, nil}
|
||||
|
|
@ -162,7 +173,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
|
||||
Logger.warning("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
|
||||
nil
|
||||
end
|
||||
|
||||
|
|
@ -190,7 +201,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
|
||||
)
|
||||
|
||||
|
|
@ -198,6 +209,89 @@ defmodule Pleroma.Instances.Instance do
|
|||
end
|
||||
end
|
||||
|
||||
def get_or_update_metadata(%URI{host: host} = instance_uri) do
|
||||
existing_record = Repo.get_by(Instance, %{host: host})
|
||||
now = NaiveDateTime.utc_now()
|
||||
|
||||
if existing_record && existing_record.metadata_updated_at &&
|
||||
NaiveDateTime.diff(now, existing_record.metadata_updated_at) < 86_400 do
|
||||
existing_record.metadata
|
||||
else
|
||||
metadata = scrape_metadata(instance_uri)
|
||||
|
||||
if existing_record do
|
||||
existing_record
|
||||
|> changeset(%{metadata: metadata, metadata_updated_at: now})
|
||||
|> Repo.update()
|
||||
else
|
||||
%Instance{}
|
||||
|> changeset(%{host: host, metadata: metadata, metadata_updated_at: now})
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
metadata
|
||||
end
|
||||
end
|
||||
|
||||
defp get_nodeinfo_uri(well_known) do
|
||||
links = Map.get(well_known, "links", [])
|
||||
|
||||
nodeinfo21 =
|
||||
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.1"))["href"]
|
||||
|
||||
nodeinfo20 =
|
||||
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))["href"]
|
||||
|
||||
cond do
|
||||
is_binary(nodeinfo21) -> {:ok, nodeinfo21}
|
||||
is_binary(nodeinfo20) -> {:ok, nodeinfo20}
|
||||
true -> {:error, :no_links}
|
||||
end
|
||||
end
|
||||
|
||||
defp scrape_metadata(%URI{} = instance_uri) do
|
||||
try do
|
||||
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
|
||||
{:ok, %Tesla.Env{body: well_known_body}} <-
|
||||
instance_uri
|
||||
|> URI.merge("/.well-known/nodeinfo")
|
||||
|> to_string()
|
||||
|> Pleroma.HTTP.get([{"accept", "application/json"}]),
|
||||
{:ok, well_known_json} <- Jason.decode(well_known_body),
|
||||
{:ok, nodeinfo_uri} <- get_nodeinfo_uri(well_known_json),
|
||||
{:ok, %Tesla.Env{body: nodeinfo_body}} <-
|
||||
Pleroma.HTTP.get(nodeinfo_uri, [{"accept", "application/json"}]),
|
||||
{:ok, nodeinfo} <- Jason.decode(nodeinfo_body) do
|
||||
# Can extract more metadata from NodeInfo but need to be careful about it's size,
|
||||
# can't just dump the entire thing
|
||||
software = Map.get(nodeinfo, "software", %{})
|
||||
|
||||
%{
|
||||
software_name: software["name"],
|
||||
software_version: software["version"]
|
||||
}
|
||||
|> Maps.put_if_present(:software_repository, software["repository"])
|
||||
else
|
||||
{:reachable, false} ->
|
||||
Logger.debug(
|
||||
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") ignored unreachable host"
|
||||
)
|
||||
|
||||
nil
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.warning(
|
||||
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
|
||||
)
|
||||
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes all users from an instance in a background task, thus also deleting
|
||||
all of those users' activities and notifications.
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ defmodule Pleroma.Maintenance do
|
|||
"full" ->
|
||||
Logger.info("Running VACUUM FULL.")
|
||||
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"Re-packing your entire database may take a while and will consume extra disk space during the process."
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Maps do
|
||||
|
|
@ -18,4 +18,17 @@ defmodule Pleroma.Maps do
|
|||
rescue
|
||||
_ -> data
|
||||
end
|
||||
|
||||
def filter_empty_values(data) do
|
||||
# TODO: Change to Map.filter in Elixir 1.13+
|
||||
data
|
||||
|> Enum.filter(fn
|
||||
{_k, nil} -> false
|
||||
{_k, ""} -> false
|
||||
{_k, []} -> false
|
||||
{_k, %{} = v} -> Map.keys(v) != []
|
||||
{_k, _v} -> true
|
||||
end)
|
||||
|> Map.new()
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ defmodule Pleroma.MFA do
|
|||
{:ok, codes}
|
||||
else
|
||||
{:error, msg} ->
|
||||
%{error: msg}
|
||||
{:error, msg}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ defmodule Pleroma.MFA.TOTP do
|
|||
@doc """
|
||||
https://github.com/google/google-authenticator/wiki/Key-Uri-Format
|
||||
"""
|
||||
@spec provisioning_uri(String.t(), String.t(), list()) :: String.t()
|
||||
def provisioning_uri(secret, label, opts \\ []) do
|
||||
query =
|
||||
%{
|
||||
|
|
@ -27,7 +28,7 @@ defmodule Pleroma.MFA.TOTP do
|
|||
|> URI.encode_query()
|
||||
|
||||
%URI{scheme: "otpauth", host: "totp", path: "/" <> label, query: query}
|
||||
|> URI.to_string()
|
||||
|> to_string()
|
||||
end
|
||||
|
||||
defp default_period, do: Config.get(@config_ns ++ [:period])
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
|
|||
|> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id))
|
||||
end
|
||||
|
||||
@spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()}
|
||||
@spec transfer_object_hashtags(map()) :: {:noop | :ok | :error, integer()}
|
||||
defp transfer_object_hashtags(object) do
|
||||
embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"]
|
||||
hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags})
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
|
|||
data_migration.state == :manual or data_migration.name in manual_migrations ->
|
||||
message = "Data migration is in manual execution or manual fix mode."
|
||||
update_status(:manual, message)
|
||||
Logger.warn("#{__MODULE__}: #{message}")
|
||||
Logger.warning("#{__MODULE__}: #{message}")
|
||||
|
||||
data_migration.state == :complete ->
|
||||
on_complete(data_migration)
|
||||
|
|
@ -109,7 +109,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
|
|||
Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`.
|
||||
"""
|
||||
|
||||
Logger.warn("#{__MODULE__}: #{message}")
|
||||
Logger.warning("#{__MODULE__}: #{message}")
|
||||
update_status(:manual, message)
|
||||
on_complete(data_migration())
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
|
|||
|
||||
defp on_complete(data_migration) do
|
||||
if data_migration.feature_lock || feature_state() == :disabled do
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"#{__MODULE__}: migration complete but feature is locked; consider enabling."
|
||||
)
|
||||
|
||||
|
|
@ -188,10 +188,11 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
|
|||
end
|
||||
|
||||
defp fault_rate do
|
||||
with failures_count when is_integer(failures_count) <- failures_count() do
|
||||
with failures_count when is_integer(failures_count) <- failures_count(),
|
||||
true <- failures_count > 0 do
|
||||
failures_count / Enum.max([get_stat(:affected_count, 0), 1])
|
||||
else
|
||||
_ -> :error
|
||||
_ -> 0
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ defmodule Pleroma.ModerationLog do
|
|||
|
||||
defp prepare_log_data(attrs), do: attrs
|
||||
|
||||
@spec insert_log(log_params()) :: {:ok, ModerationLog} | {:error, any}
|
||||
@spec insert_log(log_params()) :: {:ok, ModerationLog.t()} | {:error, any}
|
||||
def insert_log(%{actor: %User{}, subject: subjects, permission: permission} = attrs) do
|
||||
data =
|
||||
attrs
|
||||
|
|
@ -248,7 +248,8 @@ defmodule Pleroma.ModerationLog do
|
|||
|> insert_log_entry_with_message()
|
||||
end
|
||||
|
||||
@spec insert_log_entry_with_message(ModerationLog) :: {:ok, ModerationLog} | {:error, any}
|
||||
@spec insert_log_entry_with_message(ModerationLog.t()) ::
|
||||
{:ok, ModerationLog.t()} | {:error, any}
|
||||
defp insert_log_entry_with_message(entry) do
|
||||
entry.data["message"]
|
||||
|> put_in(get_log_entry_message(entry))
|
||||
|
|
|
|||
|
|
@ -73,6 +73,7 @@ defmodule Pleroma.Notification do
|
|||
pleroma:report
|
||||
reblog
|
||||
poll
|
||||
status
|
||||
}
|
||||
|
||||
def changeset(%Notification{} = notification, attrs) do
|
||||
|
|
@ -88,7 +89,7 @@ defmodule Pleroma.Notification do
|
|||
where: q.seen == true,
|
||||
select: type(q.id, :string),
|
||||
limit: 1,
|
||||
order_by: [desc: :id]
|
||||
order_by: fragment("? desc nulls last", q.id)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -137,7 +138,7 @@ defmodule Pleroma.Notification do
|
|||
blocked_ap_ids = opts[:blocked_users_ap_ids] || User.blocked_users_ap_ids(user)
|
||||
|
||||
query
|
||||
|> where([n, a], a.actor not in ^blocked_ap_ids)
|
||||
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocked_ap_ids)
|
||||
|> FollowingRelationship.keep_following_or_not_domain_blocked(user)
|
||||
end
|
||||
|
||||
|
|
@ -148,7 +149,7 @@ defmodule Pleroma.Notification do
|
|||
blocker_ap_ids = User.incoming_relationships_ungrouped_ap_ids(user, [:block])
|
||||
|
||||
query
|
||||
|> where([n, a], a.actor not in ^blocker_ap_ids)
|
||||
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^blocker_ap_ids)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -161,7 +162,7 @@ defmodule Pleroma.Notification do
|
|||
opts[:notification_muted_users_ap_ids] || User.notification_muted_users_ap_ids(user)
|
||||
|
||||
query
|
||||
|> where([n, a], a.actor not in ^notification_muted_ap_ids)
|
||||
|> where([..., user_actor: user_actor], user_actor.ap_id not in ^notification_muted_ap_ids)
|
||||
|> join(:left, [n, a], tm in ThreadMute,
|
||||
on: tm.user_id == ^user.id and tm.context == fragment("?->>'context'", a.data),
|
||||
as: :thread_mute
|
||||
|
|
@ -280,15 +281,10 @@ defmodule Pleroma.Notification do
|
|||
select: n.id
|
||||
)
|
||||
|
||||
{:ok, %{ids: {_, notification_ids}}} =
|
||||
Multi.new()
|
||||
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|
||||
for_user_query(user)
|
||||
|> where([n], n.id in ^notification_ids)
|
||||
|> Repo.all()
|
||||
Multi.new()
|
||||
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
end
|
||||
|
||||
@spec read_one(User.t(), String.t()) ::
|
||||
|
|
@ -299,10 +295,6 @@ defmodule Pleroma.Notification do
|
|||
|> Multi.update(:update, changeset(notification, %{seen: true}))
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|> case do
|
||||
{:ok, %{update: notification}} -> {:ok, notification}
|
||||
{:error, :update, changeset, _} -> {:error, changeset}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -361,37 +353,38 @@ defmodule Pleroma.Notification do
|
|||
end
|
||||
end
|
||||
|
||||
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
|
||||
def create_notifications(activity, options \\ [])
|
||||
@spec create_notifications(Activity.t()) :: {:ok, [Notification.t()] | []}
|
||||
def create_notifications(activity)
|
||||
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
if object && object.data["type"] == "Answer" do
|
||||
{:ok, []}
|
||||
else
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
end
|
||||
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
||||
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
|
||||
def create_notifications(_, _), do: {:ok, []}
|
||||
def create_notifications(_), do: {:ok, []}
|
||||
|
||||
defp do_create_notifications(%Activity{} = activity, options) do
|
||||
do_send = Keyword.get(options, :do_send, true)
|
||||
defp do_create_notifications(%Activity{} = activity) do
|
||||
enabled_receivers = get_notified_from_activity(activity)
|
||||
|
||||
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
|
||||
potential_receivers = enabled_receivers ++ disabled_receivers
|
||||
enabled_subscribers = get_notified_subscribers_from_activity(activity)
|
||||
|
||||
notifications =
|
||||
Enum.map(potential_receivers, fn user ->
|
||||
do_send = do_send && user in enabled_receivers
|
||||
create_notification(activity, user, do_send: do_send)
|
||||
end)
|
||||
(Enum.map(enabled_receivers, fn user ->
|
||||
create_notification(activity, user)
|
||||
end) ++
|
||||
Enum.map(enabled_subscribers -- enabled_receivers, fn user ->
|
||||
create_notification(activity, user, type: "status")
|
||||
end))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
{:ok, notifications}
|
||||
|
|
@ -450,7 +443,6 @@ defmodule Pleroma.Notification do
|
|||
|
||||
# TODO move to sql, too.
|
||||
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|
||||
do_send = Keyword.get(opts, :do_send, true)
|
||||
type = Keyword.get(opts, :type, type_from_activity(activity))
|
||||
|
||||
unless skip?(activity, user, opts) do
|
||||
|
|
@ -465,11 +457,6 @@ defmodule Pleroma.Notification do
|
|||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|
||||
if do_send do
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end
|
||||
|
||||
notification
|
||||
end
|
||||
end
|
||||
|
|
@ -527,13 +514,28 @@ defmodule Pleroma.Notification do
|
|||
|> exclude_relationship_restricted_ap_ids(activity)
|
||||
|> exclude_thread_muter_ap_ids(activity)
|
||||
|
||||
notification_enabled_users =
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
|
||||
{notification_enabled_users, potential_receivers -- notification_enabled_users}
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_from_activity(_, _local_only), do: {[], []}
|
||||
def get_notified_from_activity(_, _local_only), do: []
|
||||
|
||||
def get_notified_subscribers_from_activity(activity, local_only \\ true)
|
||||
|
||||
def get_notified_subscribers_from_activity(
|
||||
%Activity{data: %{"type" => "Create"}} = activity,
|
||||
local_only
|
||||
) do
|
||||
notification_enabled_ap_ids =
|
||||
[]
|
||||
|> Utils.maybe_notify_subscribers(activity)
|
||||
|
||||
potential_receivers =
|
||||
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
|
||||
|
||||
Enum.filter(potential_receivers, fn u -> u.ap_id in notification_enabled_ap_ids end)
|
||||
end
|
||||
|
||||
def get_notified_subscribers_from_activity(_, _), do: []
|
||||
|
||||
# For some activities, only notify the author of the object
|
||||
def get_potential_receiver_ap_ids(%{data: %{"type" => type, "object" => object_id}})
|
||||
|
|
@ -576,7 +578,6 @@ defmodule Pleroma.Notification do
|
|||
[]
|
||||
|> Utils.maybe_notify_to_recipients(activity)
|
||||
|> Utils.maybe_notify_mentioned_recipients(activity)
|
||||
|> Utils.maybe_notify_subscribers(activity)
|
||||
|> Utils.maybe_notify_followers(activity)
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
|
@ -643,6 +644,7 @@ defmodule Pleroma.Notification do
|
|||
def skip?(%Activity{} = activity, %User{} = user, opts) do
|
||||
[
|
||||
:self,
|
||||
:internal,
|
||||
:invisible,
|
||||
:block_from_strangers,
|
||||
:recently_followed,
|
||||
|
|
@ -662,6 +664,12 @@ defmodule Pleroma.Notification do
|
|||
end
|
||||
end
|
||||
|
||||
def skip?(:internal, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
User.internal?(user)
|
||||
end
|
||||
|
||||
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
|
|
@ -748,4 +756,12 @@ defmodule Pleroma.Notification do
|
|||
)
|
||||
|> Repo.update_all(set: [seen: true])
|
||||
end
|
||||
|
||||
@spec send(list(Notification.t())) :: :ok
|
||||
def send(notifications) do
|
||||
Enum.each(notifications, fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -177,7 +177,10 @@ defmodule Pleroma.Object do
|
|||
ap_id
|
||||
|
||||
Keyword.get(options, :fetch) ->
|
||||
Fetcher.fetch_object_from_id!(ap_id, options)
|
||||
case Fetcher.fetch_object_from_id(ap_id, options) do
|
||||
{:ok, object} -> object
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
true ->
|
||||
get_cached_by_ap_id(ap_id)
|
||||
|
|
@ -239,17 +242,17 @@ defmodule Pleroma.Object do
|
|||
{:ok, _} <- invalid_object_cache(object) do
|
||||
cleanup_attachments(
|
||||
Config.get([:instance, :cleanup_attachments]),
|
||||
%{"object" => object}
|
||||
object
|
||||
)
|
||||
|
||||
{:ok, object, deleted_activity}
|
||||
end
|
||||
end
|
||||
|
||||
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
|
||||
@spec cleanup_attachments(boolean(), Object.t()) ::
|
||||
{:ok, Oban.Job.t() | nil}
|
||||
def cleanup_attachments(true, %{"object" => _} = params) do
|
||||
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
|
||||
def cleanup_attachments(true, %Object{} = object) do
|
||||
AttachmentsCleanupWorker.enqueue("cleanup_attachments", %{"object" => object})
|
||||
end
|
||||
|
||||
def cleanup_attachments(_, _), do: {:ok, nil}
|
||||
|
|
@ -328,6 +331,52 @@ defmodule Pleroma.Object do
|
|||
end
|
||||
end
|
||||
|
||||
def increase_quotes_count(ap_id) do
|
||||
Object
|
||||
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|
||||
|> update([o],
|
||||
set: [
|
||||
data:
|
||||
fragment(
|
||||
"""
|
||||
safe_jsonb_set(?, '{quotesCount}',
|
||||
(coalesce((?->>'quotesCount')::int, 0) + 1)::varchar::jsonb, true)
|
||||
""",
|
||||
o.data,
|
||||
o.data
|
||||
)
|
||||
]
|
||||
)
|
||||
|> Repo.update_all([])
|
||||
|> case do
|
||||
{1, [object]} -> set_cache(object)
|
||||
_ -> {:error, "Not found"}
|
||||
end
|
||||
end
|
||||
|
||||
def decrease_quotes_count(ap_id) do
|
||||
Object
|
||||
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|
||||
|> update([o],
|
||||
set: [
|
||||
data:
|
||||
fragment(
|
||||
"""
|
||||
safe_jsonb_set(?, '{quotesCount}',
|
||||
(greatest(0, (?->>'quotesCount')::int - 1))::varchar::jsonb, true)
|
||||
""",
|
||||
o.data,
|
||||
o.data
|
||||
)
|
||||
]
|
||||
)
|
||||
|> Repo.update_all([])
|
||||
|> case do
|
||||
{1, [object]} -> set_cache(object)
|
||||
_ -> {:error, "Not found"}
|
||||
end
|
||||
end
|
||||
|
||||
def increase_vote_count(ap_id, name, actor) do
|
||||
with %Object{} = object <- Object.normalize(ap_id, fetch: false),
|
||||
"Question" <- object.data["type"] do
|
||||
|
|
@ -425,4 +474,30 @@ defmodule Pleroma.Object do
|
|||
end
|
||||
|
||||
def object_data_hashtags(_), do: []
|
||||
|
||||
def get_emoji_reactions(object) do
|
||||
reactions = object.data["reactions"]
|
||||
|
||||
if is_list(reactions) or is_map(reactions) do
|
||||
reactions
|
||||
|> Enum.map(fn
|
||||
[_emoji, users, _maybe_url] = item when is_list(users) ->
|
||||
item
|
||||
|
||||
[emoji, users] when is_list(users) ->
|
||||
[emoji, users, nil]
|
||||
|
||||
# This case is here to process the Map situation, which will happen
|
||||
# only with the legacy two-value format.
|
||||
{emoji, users} when is_list(users) ->
|
||||
[emoji, users, nil]
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,77 +8,30 @@ defmodule Pleroma.Object.Fetcher do
|
|||
alias Pleroma.Maps
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Containment
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Signature
|
||||
alias Pleroma.Web.ActivityPub.InternalFetchActor
|
||||
alias Pleroma.Web.ActivityPub.MRF
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||
alias Pleroma.Web.ActivityPub.Pipeline
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.Federator
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
defp touch_changeset(changeset) do
|
||||
updated_at =
|
||||
NaiveDateTime.utc_now()
|
||||
|> NaiveDateTime.truncate(:second)
|
||||
|
||||
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
|
||||
end
|
||||
|
||||
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
|
||||
has_history? = fn
|
||||
%{"formerRepresentations" => %{"orderedItems" => list}} when is_list(list) -> true
|
||||
_ -> false
|
||||
end
|
||||
|
||||
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
|
||||
|
||||
remote_history_exists? = has_history?.(new_data)
|
||||
|
||||
# If the remote history exists, we treat that as the only source of truth.
|
||||
new_data =
|
||||
if has_history?.(old_data) and not remote_history_exists? do
|
||||
Map.put(new_data, "formerRepresentations", old_data["formerRepresentations"])
|
||||
else
|
||||
new_data
|
||||
end
|
||||
|
||||
# If the remote does not have history information, we need to manage it ourselves
|
||||
new_data =
|
||||
if not remote_history_exists? do
|
||||
changed? =
|
||||
Pleroma.Constants.status_updatable_fields()
|
||||
|> Enum.any?(fn field -> Map.get(old_data, field) != Map.get(new_data, field) end)
|
||||
|
||||
%{updated_object: updated_object} =
|
||||
new_data
|
||||
|> Object.Updater.maybe_update_history(old_data,
|
||||
updated: changed?,
|
||||
use_history_in_new_object?: false
|
||||
)
|
||||
|
||||
updated_object
|
||||
else
|
||||
new_data
|
||||
end
|
||||
|
||||
Map.merge(new_data, internal_fields)
|
||||
end
|
||||
|
||||
defp maybe_reinject_internal_fields(_, new_data), do: new_data
|
||||
|
||||
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
|
||||
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
|
||||
defp reinject_object(%Object{data: %{}} = object, new_data) do
|
||||
Logger.debug("Reinjecting object #{new_data["id"]}")
|
||||
|
||||
with data <- maybe_reinject_internal_fields(object, new_data),
|
||||
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
|
||||
changeset <- Object.change(object, %{data: data}),
|
||||
changeset <- touch_changeset(changeset),
|
||||
{:ok, object} <- Repo.insert_or_update(changeset),
|
||||
{:ok, object} <- Object.set_cache(object) do
|
||||
{:ok, object}
|
||||
with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
|
||||
{:ok, new_data} <- MRF.filter(new_data),
|
||||
{:ok, new_object, _} <-
|
||||
Object.Updater.do_update_and_invalidate_cache(
|
||||
object,
|
||||
new_data,
|
||||
_touch_changeset? = true
|
||||
) do
|
||||
{:ok, new_object}
|
||||
else
|
||||
e ->
|
||||
Logger.error("Error while processing object: #{inspect(e)}")
|
||||
|
|
@ -86,20 +39,11 @@ defmodule Pleroma.Object.Fetcher do
|
|||
end
|
||||
end
|
||||
|
||||
defp reinject_object(%Object{} = object, new_data) do
|
||||
Logger.debug("Reinjecting object #{new_data["id"]}")
|
||||
|
||||
with new_data <- Transmogrifier.fix_object(new_data),
|
||||
data <- maybe_reinject_internal_fields(object, new_data),
|
||||
changeset <- Object.change(object, %{data: data}),
|
||||
changeset <- touch_changeset(changeset),
|
||||
{:ok, object} <- Repo.insert_or_update(changeset),
|
||||
{:ok, object} <- Object.set_cache(object) do
|
||||
defp reinject_object(_, new_data) do
|
||||
with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
|
||||
{:ok, object}
|
||||
else
|
||||
e ->
|
||||
Logger.error("Error while processing object: #{inspect(e)}")
|
||||
{:error, e}
|
||||
e -> e
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -128,20 +72,25 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:object, data, Object.normalize(activity, fetch: false)} do
|
||||
{:ok, object}
|
||||
else
|
||||
{:allowed_depth, false} ->
|
||||
{:error, "Max thread distance exceeded."}
|
||||
{:allowed_depth, false} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, :allowed_depth}
|
||||
|
||||
{:containment, _} ->
|
||||
{:error, "Object containment failed."}
|
||||
{:containment, reason} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, reason}
|
||||
|
||||
{:transmogrifier, {:error, {:reject, e}}} ->
|
||||
{:reject, e}
|
||||
{:transmogrifier, {:error, {:reject, reason}}} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:reject, reason}
|
||||
|
||||
{:transmogrifier, {:reject, e}} ->
|
||||
{:reject, e}
|
||||
{:transmogrifier, {:reject, reason}} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:reject, reason}
|
||||
|
||||
{:transmogrifier, _} = e ->
|
||||
{:error, e}
|
||||
{:transmogrifier, reason} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, reason}
|
||||
|
||||
{:object, data, nil} ->
|
||||
reinject_object(%Object{}, data)
|
||||
|
|
@ -152,14 +101,21 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:fetch_object, %Object{} = object} ->
|
||||
{:ok, object}
|
||||
|
||||
{:fetch, {:error, error}} ->
|
||||
{:error, error}
|
||||
{:fetch, {:error, reason}} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, reason}
|
||||
|
||||
e ->
|
||||
e
|
||||
log_fetch_error(id, e)
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
defp log_fetch_error(id, error) do
|
||||
Logger.metadata(object: id)
|
||||
Logger.error("Object rejected while fetching #{id} #{inspect(error)}")
|
||||
end
|
||||
|
||||
defp prepare_activity_params(data) do
|
||||
%{
|
||||
"type" => "Create",
|
||||
|
|
@ -173,26 +129,6 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|> Maps.put_if_present("bcc", data["bcc"])
|
||||
end
|
||||
|
||||
def fetch_object_from_id!(id, options \\ []) do
|
||||
with {:ok, object} <- fetch_object_from_id(id, options) do
|
||||
object
|
||||
else
|
||||
{:error, %Tesla.Mock.Error{}} ->
|
||||
nil
|
||||
|
||||
{:error, "Object has been deleted"} ->
|
||||
nil
|
||||
|
||||
{:reject, reason} ->
|
||||
Logger.info("Rejected #{id} while fetching: #{inspect(reason)}")
|
||||
nil
|
||||
|
||||
e ->
|
||||
Logger.error("Error while fetching #{id}: #{inspect(e)}")
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp make_signature(id, date) do
|
||||
uri = URI.parse(id)
|
||||
|
||||
|
|
@ -283,8 +219,11 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:error, {:content_type, nil}}
|
||||
end
|
||||
|
||||
{:ok, %{status: code}} when code in [401, 403] ->
|
||||
{:error, :forbidden}
|
||||
|
||||
{:ok, %{status: code}} when code in [404, 410] ->
|
||||
{:error, "Object has been deleted"}
|
||||
{:error, :not_found}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@
|
|||
defmodule Pleroma.Object.Updater do
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
|
||||
def update_content_fields(orig_object_data, updated_object) do
|
||||
Pleroma.Constants.status_updatable_fields()
|
||||
|> Enum.reduce(
|
||||
|
|
@ -97,12 +100,14 @@ defmodule Pleroma.Object.Updater do
|
|||
end
|
||||
|
||||
defp maybe_update_poll(to_be_updated, updated_object) do
|
||||
choice_key = fn data ->
|
||||
if Map.has_key?(data, "anyOf"), do: "anyOf", else: "oneOf"
|
||||
choice_key = fn
|
||||
%{"anyOf" => [_ | _]} -> "anyOf"
|
||||
%{"oneOf" => [_ | _]} -> "oneOf"
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
with true <- to_be_updated["type"] == "Question",
|
||||
key <- choice_key.(updated_object),
|
||||
key when not is_nil(key) <- choice_key.(updated_object),
|
||||
true <- key == choice_key.(to_be_updated),
|
||||
orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])),
|
||||
new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])),
|
||||
|
|
@ -237,4 +242,49 @@ defmodule Pleroma.Object.Updater do
|
|||
{:history_items, e} -> e
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_touch_changeset(changeset, true) do
|
||||
updated_at =
|
||||
NaiveDateTime.utc_now()
|
||||
|> NaiveDateTime.truncate(:second)
|
||||
|
||||
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
|
||||
end
|
||||
|
||||
defp maybe_touch_changeset(changeset, _), do: changeset
|
||||
|
||||
def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
|
||||
orig_object_ap_id = updated_object["id"]
|
||||
orig_object_data = orig_object.data
|
||||
|
||||
%{
|
||||
updated_data: updated_object_data,
|
||||
updated: updated,
|
||||
used_history_in_new_object?: used_history_in_new_object?
|
||||
} = make_new_object_data_from_update_object(orig_object_data, updated_object)
|
||||
|
||||
changeset =
|
||||
orig_object
|
||||
|> Repo.preload(:hashtags)
|
||||
|> Object.change(%{data: updated_object_data})
|
||||
|> maybe_touch_changeset(touch_changeset?)
|
||||
|
||||
with {:ok, new_object} <- Repo.update(changeset),
|
||||
{:ok, _} <- Object.invalid_object_cache(new_object),
|
||||
{:ok, _} <- Object.set_cache(new_object),
|
||||
# The metadata/utils.ex uses the object id for the cache.
|
||||
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
|
||||
if used_history_in_new_object? do
|
||||
with create_activity when not is_nil(create_activity) <-
|
||||
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
|
||||
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
|
||||
nil
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
{:ok, new_object, updated}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -61,15 +61,16 @@ defmodule Pleroma.Pagination do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
||||
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
|
||||
|
||||
def paginate(list, options, _method, _table_binding) when is_list(list) do
|
||||
@spec paginate_list(list(), keyword()) :: list()
|
||||
def paginate_list(list, options) do
|
||||
offset = options[:offset] || 0
|
||||
limit = options[:limit] || 0
|
||||
Enum.slice(list, offset, limit)
|
||||
end
|
||||
|
||||
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
|
||||
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
|
||||
|
||||
def paginate(query, options, :keyset, table_binding) do
|
||||
query
|
||||
|> restrict(:min_id, options, table_binding)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ defmodule Pleroma.Password.Pbkdf2 do
|
|||
|
||||
iterations = String.to_integer(iterations)
|
||||
|
||||
digest = String.to_atom(digest)
|
||||
digest = String.to_existing_atom(digest)
|
||||
|
||||
binary_hash =
|
||||
KeyGenerator.generate(password, salt, digest: digest, iterations: iterations, length: 64)
|
||||
|
|
|
|||
49
lib/pleroma/prom_ex.ex
Normal file
49
lib/pleroma/prom_ex.ex
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
defmodule Pleroma.PromEx do
|
||||
use PromEx, otp_app: :pleroma
|
||||
|
||||
alias PromEx.Plugins
|
||||
|
||||
@impl true
|
||||
def plugins do
|
||||
[
|
||||
# PromEx built in plugins
|
||||
Plugins.Application,
|
||||
Plugins.Beam,
|
||||
{Plugins.Phoenix, router: Pleroma.Web.Router, endpoint: Pleroma.Web.Endpoint},
|
||||
Plugins.Ecto,
|
||||
Plugins.Oban
|
||||
# Plugins.PhoenixLiveView,
|
||||
# Plugins.Absinthe,
|
||||
# Plugins.Broadway,
|
||||
|
||||
# Add your own PromEx metrics plugins
|
||||
# Pleroma.Users.PromExPlugin
|
||||
]
|
||||
end
|
||||
|
||||
@impl true
|
||||
def dashboard_assigns do
|
||||
[
|
||||
datasource_id: Pleroma.Config.get([Pleroma.PromEx, :datasource]),
|
||||
default_selected_interval: "30s"
|
||||
]
|
||||
end
|
||||
|
||||
@impl true
|
||||
def dashboards do
|
||||
[
|
||||
# PromEx built in Grafana dashboards
|
||||
{:prom_ex, "application.json"},
|
||||
{:prom_ex, "beam.json"},
|
||||
{:prom_ex, "phoenix.json"},
|
||||
{:prom_ex, "ecto.json"},
|
||||
{:prom_ex, "oban.json"}
|
||||
# {:prom_ex, "phoenix_live_view.json"},
|
||||
# {:prom_ex, "absinthe.json"},
|
||||
# {:prom_ex, "broadway.json"},
|
||||
|
||||
# Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
|
||||
# {:pleroma, "/grafana_dashboards/user_metrics.json"}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
|
@ -55,12 +55,6 @@ defmodule Pleroma.ReleaseTasks do
|
|||
|
||||
{:error, term} when is_binary(term) ->
|
||||
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
|
||||
|
||||
{:error, term} ->
|
||||
IO.puts(
|
||||
:stderr,
|
||||
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -11,8 +11,6 @@ defmodule Pleroma.Repo do
|
|||
import Ecto.Query
|
||||
require Logger
|
||||
|
||||
defmodule Instrumenter, do: use(Prometheus.EctoInstrumenter)
|
||||
|
||||
@doc """
|
||||
Dynamically loads the repository url from the
|
||||
DATABASE_URL environment variable.
|
||||
|
|
|
|||
|
|
@ -23,8 +23,8 @@ defmodule Pleroma.ReportNote do
|
|||
timestamps()
|
||||
end
|
||||
|
||||
@spec create(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t(), String.t()) ::
|
||||
{:ok, ReportNote.t()} | {:error, Changeset.t()}
|
||||
@spec create(Ecto.UUID.t(), Ecto.UUID.t(), String.t()) ::
|
||||
{:ok, ReportNote.t()} | {:error, Ecto.Changeset.t()}
|
||||
def create(user_id, activity_id, content) do
|
||||
attrs = %{
|
||||
user_id: user_id,
|
||||
|
|
@ -38,8 +38,8 @@ defmodule Pleroma.ReportNote do
|
|||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@spec destroy(FlakeId.Ecto.CompatType.t()) ::
|
||||
{:ok, ReportNote.t()} | {:error, Changeset.t()}
|
||||
@spec destroy(Ecto.UUID.t()) ::
|
||||
{:ok, ReportNote.t()} | {:error, Ecto.Changeset.t()}
|
||||
def destroy(id) do
|
||||
from(r in ReportNote, where: r.id == ^id)
|
||||
|> Repo.one()
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ defmodule Pleroma.ReverseProxy do
|
|||
~w(if-unmodified-since if-none-match) ++ @range_headers
|
||||
@resp_cache_headers ~w(etag date last-modified)
|
||||
@keep_resp_headers @resp_cache_headers ++
|
||||
~w(content-length content-type content-disposition content-encoding) ++
|
||||
~w(content-type content-disposition content-encoding) ++
|
||||
~w(content-range accept-ranges vary)
|
||||
@default_cache_control_header "public, max-age=1209600"
|
||||
@valid_resp_codes [200, 206, 304]
|
||||
|
|
@ -81,16 +81,16 @@ defmodule Pleroma.ReverseProxy do
|
|||
import Plug.Conn
|
||||
|
||||
@type option() ::
|
||||
{:max_read_duration, :timer.time() | :infinity}
|
||||
{:max_read_duration, non_neg_integer() | :infinity}
|
||||
| {:max_body_length, non_neg_integer() | :infinity}
|
||||
| {:failed_request_ttl, :timer.time() | :infinity}
|
||||
| {:http, []}
|
||||
| {:failed_request_ttl, non_neg_integer() | :infinity}
|
||||
| {:http, keyword()}
|
||||
| {:req_headers, [{String.t(), String.t()}]}
|
||||
| {:resp_headers, [{String.t(), String.t()}]}
|
||||
| {:inline_content_types, boolean() | [String.t()]}
|
||||
| {:inline_content_types, boolean() | list(String.t())}
|
||||
| {:redirect_on_failure, boolean()}
|
||||
|
||||
@spec call(Plug.Conn.t(), url :: String.t(), [option()]) :: Plug.Conn.t()
|
||||
@spec call(Plug.Conn.t(), String.t(), list(option())) :: Plug.Conn.t()
|
||||
def call(_conn, _url, _opts \\ [])
|
||||
|
||||
def call(conn = %{method: method}, url, opts) when method in @methods do
|
||||
|
|
@ -192,7 +192,7 @@ defmodule Pleroma.ReverseProxy do
|
|||
halt(conn)
|
||||
|
||||
{:error, error, conn} ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
||||
)
|
||||
|
||||
|
|
@ -388,8 +388,6 @@ defmodule Pleroma.ReverseProxy do
|
|||
|
||||
defp body_size_constraint(_, _), do: :ok
|
||||
|
||||
defp check_read_duration(nil = _duration, max), do: check_read_duration(@max_read_duration, max)
|
||||
|
||||
defp check_read_duration(duration, max)
|
||||
when is_integer(duration) and is_integer(max) and max > 0 do
|
||||
if duration > max do
|
||||
|
|
@ -407,10 +405,6 @@ defmodule Pleroma.ReverseProxy do
|
|||
{:ok, previous_duration + duration}
|
||||
end
|
||||
|
||||
defp increase_read_duration(_) do
|
||||
{:ok, :no_duration_limit, :no_duration_limit}
|
||||
end
|
||||
|
||||
defp client, do: Pleroma.ReverseProxy.Client.Wrapper
|
||||
|
||||
defp track_failed_url(url, error, opts) do
|
||||
|
|
|
|||
68
lib/pleroma/rule.ex
Normal file
68
lib/pleroma/rule.ex
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Rule do
|
||||
use Ecto.Schema
|
||||
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Rule
|
||||
|
||||
schema "rules" do
|
||||
field(:priority, :integer, default: 0)
|
||||
field(:text, :string)
|
||||
field(:hint, :string)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
def changeset(%Rule{} = rule, params \\ %{}) do
|
||||
rule
|
||||
|> cast(params, [:priority, :text, :hint])
|
||||
|> validate_required([:text])
|
||||
end
|
||||
|
||||
def query do
|
||||
Rule
|
||||
|> order_by(asc: :priority)
|
||||
|> order_by(asc: :id)
|
||||
end
|
||||
|
||||
def get(ids) when is_list(ids) do
|
||||
from(r in __MODULE__, where: r.id in ^ids)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get(id), do: Repo.get(__MODULE__, id)
|
||||
|
||||
def exists?(id) do
|
||||
from(r in __MODULE__, where: r.id == ^id)
|
||||
|> Repo.exists?()
|
||||
end
|
||||
|
||||
def create(params) do
|
||||
{:ok, rule} =
|
||||
%Rule{}
|
||||
|> changeset(params)
|
||||
|> Repo.insert()
|
||||
|
||||
rule
|
||||
end
|
||||
|
||||
def update(params, id) do
|
||||
{:ok, rule} =
|
||||
get(id)
|
||||
|> changeset(params)
|
||||
|> Repo.update()
|
||||
|
||||
rule
|
||||
end
|
||||
|
||||
def delete(id) do
|
||||
get(id)
|
||||
|> Repo.delete()
|
||||
end
|
||||
end
|
||||
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.ScheduledActivity do
|
|||
use Ecto.Schema
|
||||
|
||||
alias Ecto.Multi
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.ScheduledActivity
|
||||
alias Pleroma.User
|
||||
|
|
@ -20,6 +19,8 @@ defmodule Pleroma.ScheduledActivity do
|
|||
|
||||
@min_offset :timer.minutes(5)
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
schema "scheduled_activities" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
field(:scheduled_at, :naive_datetime)
|
||||
|
|
@ -40,7 +41,11 @@ defmodule Pleroma.ScheduledActivity do
|
|||
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
|
||||
)
|
||||
when is_list(media_ids) do
|
||||
media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids})
|
||||
media_attachments =
|
||||
Utils.attachments_from_ids(
|
||||
%{media_ids: media_ids},
|
||||
User.get_cached_by_id(changeset.data.user_id)
|
||||
)
|
||||
|
||||
params =
|
||||
params
|
||||
|
|
@ -83,7 +88,7 @@ defmodule Pleroma.ScheduledActivity do
|
|||
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|
||||
|> select([sa], count(sa.id))
|
||||
|> Repo.one()
|
||||
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
|
||||
|> Kernel.>=(@config_impl.get([ScheduledActivity, :daily_user_limit]))
|
||||
end
|
||||
|
||||
def exceeds_total_user_limit?(user_id) do
|
||||
|
|
@ -91,7 +96,7 @@ defmodule Pleroma.ScheduledActivity do
|
|||
|> where(user_id: ^user_id)
|
||||
|> select([sa], count(sa.id))
|
||||
|> Repo.one()
|
||||
|> Kernel.>=(Config.get([ScheduledActivity, :total_user_limit]))
|
||||
|> Kernel.>=(@config_impl.get([ScheduledActivity, :total_user_limit]))
|
||||
end
|
||||
|
||||
def far_enough?(scheduled_at) when is_binary(scheduled_at) do
|
||||
|
|
@ -119,7 +124,7 @@ defmodule Pleroma.ScheduledActivity do
|
|||
def create(%User{} = user, attrs) do
|
||||
Multi.new()
|
||||
|> Multi.insert(:scheduled_activity, new(user, attrs))
|
||||
|> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|
||||
|> maybe_add_jobs(@config_impl.get([ScheduledActivity, :enabled]))
|
||||
|> Repo.transaction()
|
||||
|> transaction_response
|
||||
end
|
||||
|
|
|
|||
21
lib/pleroma/search.ex
Normal file
21
lib/pleroma/search.ex
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
defmodule Pleroma.Search do
|
||||
alias Pleroma.Workers.SearchIndexingWorker
|
||||
|
||||
def add_to_index(%Pleroma.Activity{id: activity_id}) do
|
||||
SearchIndexingWorker.enqueue("add_to_index", %{"activity" => activity_id})
|
||||
end
|
||||
|
||||
def remove_from_index(%Pleroma.Object{id: object_id}) do
|
||||
SearchIndexingWorker.enqueue("remove_from_index", %{"object" => object_id})
|
||||
end
|
||||
|
||||
def search(query, options) do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.search(options[:for_user], query, options)
|
||||
end
|
||||
|
||||
def healthcheck_endpoints do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.healthcheck_endpoints
|
||||
end
|
||||
end
|
||||
|
|
@ -1,9 +1,10 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Activity.Search do
|
||||
defmodule Pleroma.Search.DatabaseSearch do
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Object.Fetcher
|
||||
alias Pleroma.Pagination
|
||||
alias Pleroma.User
|
||||
|
|
@ -13,25 +14,21 @@ defmodule Pleroma.Activity.Search do
|
|||
|
||||
import Ecto.Query
|
||||
|
||||
@behaviour Pleroma.Search.SearchBackend
|
||||
|
||||
@impl true
|
||||
def search(user, search_query, options \\ []) do
|
||||
index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
|
||||
index_type = if Config.get([:database, :rum_enabled]), do: :rum, else: :gin
|
||||
limit = Enum.min([Keyword.get(options, :limit), 40])
|
||||
offset = Keyword.get(options, :offset, 0)
|
||||
author = Keyword.get(options, :author)
|
||||
|
||||
search_function =
|
||||
if :persistent_term.get({Pleroma.Repo, :postgres_version}) >= 11 do
|
||||
:websearch
|
||||
else
|
||||
:plain
|
||||
end
|
||||
|
||||
try do
|
||||
Activity
|
||||
|> Activity.with_preloaded_object()
|
||||
|> Activity.restrict_deactivated_users()
|
||||
|> restrict_public(user)
|
||||
|> query_with(index_type, search_query, search_function)
|
||||
|> query_with(index_type, search_query, :websearch)
|
||||
|> maybe_restrict_local(user)
|
||||
|> maybe_restrict_author(author)
|
||||
|> maybe_restrict_blocked(user)
|
||||
|
|
@ -45,6 +42,15 @@ defmodule Pleroma.Activity.Search do
|
|||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def add_to_index(_activity), do: :ok
|
||||
|
||||
@impl true
|
||||
def remove_from_index(_object), do: :ok
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints, do: nil
|
||||
|
||||
def maybe_restrict_author(query, %User{} = author) do
|
||||
Activity.Queries.by_author(query, author)
|
||||
end
|
||||
|
|
@ -136,8 +142,8 @@ defmodule Pleroma.Activity.Search do
|
|||
)
|
||||
end
|
||||
|
||||
defp maybe_restrict_local(q, user) do
|
||||
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||
def maybe_restrict_local(q, user) do
|
||||
limit = Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||
|
||||
case {limit, user} do
|
||||
{:all, _} -> restrict_local(q)
|
||||
|
|
@ -149,7 +155,7 @@ defmodule Pleroma.Activity.Search do
|
|||
|
||||
defp restrict_local(q), do: where(q, local: true)
|
||||
|
||||
defp maybe_fetch(activities, user, search_query) do
|
||||
def maybe_fetch(activities, user, search_query) do
|
||||
with true <- Regex.match?(~r/https?:/, search_query),
|
||||
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
||||
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
||||
86
lib/pleroma/search/healthcheck.ex
Normal file
86
lib/pleroma/search/healthcheck.ex
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
defmodule Pleroma.Search.Healthcheck do
|
||||
@doc """
|
||||
Monitors health of search backend to control processing of events based on health and availability.
|
||||
"""
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
@queue :search_indexing
|
||||
@tick :timer.seconds(5)
|
||||
@timeout :timer.seconds(2)
|
||||
|
||||
def start_link(_) do
|
||||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_) do
|
||||
state = %{healthy: false}
|
||||
{:ok, state, {:continue, :start}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_continue(:start, state) do
|
||||
tick()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:check, state) do
|
||||
urls = Pleroma.Search.healthcheck_endpoints()
|
||||
|
||||
new_state =
|
||||
if check(urls) do
|
||||
Oban.resume_queue(queue: @queue)
|
||||
Map.put(state, :healthy, true)
|
||||
else
|
||||
Oban.pause_queue(queue: @queue)
|
||||
Map.put(state, :healthy, false)
|
||||
end
|
||||
|
||||
maybe_log_state_change(state, new_state)
|
||||
|
||||
tick()
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:state, _from, state) do
|
||||
{:reply, state, state, :hibernate}
|
||||
end
|
||||
|
||||
def state, do: GenServer.call(__MODULE__, :state)
|
||||
|
||||
def check([]), do: true
|
||||
|
||||
def check(urls) when is_list(urls) do
|
||||
Enum.all?(
|
||||
urls,
|
||||
fn url ->
|
||||
case Pleroma.HTTP.get(url, [], recv_timeout: @timeout) do
|
||||
{:ok, %{status: 200}} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
def check(_), do: true
|
||||
|
||||
defp tick do
|
||||
Process.send_after(self(), :check, @tick)
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: true}, %{healthy: false}) do
|
||||
Logger.error("Pausing Oban queue #{@queue} due to search backend healthcheck failure")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(%{healthy: false}, %{healthy: true}) do
|
||||
Logger.info("Resuming Oban queue #{@queue} due to search backend healthcheck pass")
|
||||
end
|
||||
|
||||
defp maybe_log_state_change(_, _), do: :ok
|
||||
end
|
||||
192
lib/pleroma/search/meilisearch.ex
Normal file
192
lib/pleroma/search/meilisearch.ex
Normal file
|
|
@ -0,0 +1,192 @@
|
|||
defmodule Pleroma.Search.Meilisearch do
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
import Pleroma.Search.DatabaseSearch
|
||||
import Ecto.Query
|
||||
|
||||
@behaviour Pleroma.Search.SearchBackend
|
||||
|
||||
defp meili_headers do
|
||||
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
|
||||
|
||||
[{"Content-Type", "application/json"}] ++
|
||||
if is_nil(private_key), do: [], else: [{"Authorization", "Bearer #{private_key}"}]
|
||||
end
|
||||
|
||||
def meili_get(path) do
|
||||
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|
||||
result =
|
||||
Pleroma.HTTP.get(
|
||||
Path.join(endpoint, path),
|
||||
meili_headers()
|
||||
)
|
||||
|
||||
with {:ok, res} <- result do
|
||||
{:ok, Jason.decode!(res.body)}
|
||||
end
|
||||
end
|
||||
|
||||
def meili_post(path, params) do
|
||||
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|
||||
result =
|
||||
Pleroma.HTTP.post(
|
||||
Path.join(endpoint, path),
|
||||
Jason.encode!(params),
|
||||
meili_headers()
|
||||
)
|
||||
|
||||
with {:ok, res} <- result do
|
||||
{:ok, Jason.decode!(res.body)}
|
||||
end
|
||||
end
|
||||
|
||||
def meili_put(path, params) do
|
||||
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|
||||
result =
|
||||
Pleroma.HTTP.request(
|
||||
:put,
|
||||
Path.join(endpoint, path),
|
||||
Jason.encode!(params),
|
||||
meili_headers(),
|
||||
[]
|
||||
)
|
||||
|
||||
with {:ok, res} <- result do
|
||||
{:ok, Jason.decode!(res.body)}
|
||||
end
|
||||
end
|
||||
|
||||
def meili_delete(path) do
|
||||
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|
||||
with {:ok, _} <-
|
||||
Pleroma.HTTP.request(
|
||||
:delete,
|
||||
Path.join(endpoint, path),
|
||||
"",
|
||||
meili_headers(),
|
||||
[]
|
||||
) do
|
||||
:ok
|
||||
else
|
||||
_ -> {:error, "Could not remove from index"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def search(user, query, options \\ []) do
|
||||
limit = Enum.min([Keyword.get(options, :limit), 40])
|
||||
offset = Keyword.get(options, :offset, 0)
|
||||
author = Keyword.get(options, :author)
|
||||
|
||||
res =
|
||||
meili_post(
|
||||
"/indexes/objects/search",
|
||||
%{q: query, offset: offset, limit: limit}
|
||||
)
|
||||
|
||||
with {:ok, result} <- res do
|
||||
hits = result["hits"] |> Enum.map(& &1["ap"])
|
||||
|
||||
try do
|
||||
hits
|
||||
|> Activity.create_by_object_ap_id()
|
||||
|> Activity.with_preloaded_object()
|
||||
|> Activity.restrict_deactivated_users()
|
||||
|> maybe_restrict_local(user)
|
||||
|> maybe_restrict_author(author)
|
||||
|> maybe_restrict_blocked(user)
|
||||
|> maybe_fetch(user, query)
|
||||
|> order_by([object: obj], desc: obj.data["published"])
|
||||
|> Pleroma.Repo.all()
|
||||
rescue
|
||||
_ -> maybe_fetch([], user, query)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def object_to_search_data(object) do
|
||||
# Only index public or unlisted Notes
|
||||
if not is_nil(object) and object.data["type"] == "Note" and
|
||||
not is_nil(object.data["content"]) and
|
||||
(Pleroma.Constants.as_public() in object.data["to"] or
|
||||
Pleroma.Constants.as_public() in object.data["cc"]) and
|
||||
object.data["content"] not in ["", "."] do
|
||||
data = object.data
|
||||
|
||||
content_str =
|
||||
case data["content"] do
|
||||
[nil | rest] -> to_string(rest)
|
||||
str -> str
|
||||
end
|
||||
|
||||
content =
|
||||
with {:ok, scrubbed} <-
|
||||
FastSanitize.Sanitizer.scrub(content_str, Pleroma.HTML.Scrubber.SearchIndexing),
|
||||
trimmed <- String.trim(scrubbed) do
|
||||
trimmed
|
||||
end
|
||||
|
||||
# Make sure we have a non-empty string
|
||||
if content != "" do
|
||||
{:ok, published, _} = DateTime.from_iso8601(data["published"])
|
||||
|
||||
%{
|
||||
id: object.id,
|
||||
content: content,
|
||||
ap: data["id"],
|
||||
published: published |> DateTime.to_unix()
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def add_to_index(activity) do
|
||||
maybe_search_data = object_to_search_data(activity.object)
|
||||
|
||||
if activity.data["type"] == "Create" and maybe_search_data do
|
||||
result =
|
||||
meili_put(
|
||||
"/indexes/objects/documents",
|
||||
[maybe_search_data]
|
||||
)
|
||||
|
||||
with {:ok, %{"status" => "enqueued"}} <- result do
|
||||
# Added successfully
|
||||
:ok
|
||||
else
|
||||
_ ->
|
||||
# There was an error, report it
|
||||
Logger.error("Failed to add activity #{activity.id} to index: #{inspect(result)}")
|
||||
{:error, result}
|
||||
end
|
||||
else
|
||||
# The post isn't something we can search, that's ok
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def remove_from_index(object) do
|
||||
meili_delete("/indexes/objects/documents/#{object.id}")
|
||||
end
|
||||
|
||||
@impl true
|
||||
def healthcheck_endpoints do
|
||||
endpoint =
|
||||
Config.get([Pleroma.Search.Meilisearch, :url])
|
||||
|> URI.parse()
|
||||
|> Map.put(:path, "/health")
|
||||
|> URI.to_string()
|
||||
|
||||
[endpoint]
|
||||
end
|
||||
end
|
||||
32
lib/pleroma/search/search_backend.ex
Normal file
32
lib/pleroma/search/search_backend.ex
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
defmodule Pleroma.Search.SearchBackend do
|
||||
@doc """
|
||||
Search statuses with a query, restricting to only those the user should have access to.
|
||||
"""
|
||||
@callback search(user :: Pleroma.User.t(), query :: String.t(), options :: [any()]) :: [
|
||||
Pleroma.Activity.t()
|
||||
]
|
||||
|
||||
@doc """
|
||||
Add the object associated with the activity to the search index.
|
||||
|
||||
The whole activity is passed, to allow filtering on things such as scope.
|
||||
"""
|
||||
@callback add_to_index(activity :: Pleroma.Activity.t()) :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Remove the object from the index.
|
||||
|
||||
Just the object, as opposed to the whole activity, is passed, since the object
|
||||
is what contains the actual content and there is no need for filtering when removing
|
||||
from index.
|
||||
"""
|
||||
@callback remove_from_index(object :: Pleroma.Object.t()) :: :ok | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Healthcheck endpoints of search backend infrastructure to monitor for controlling
|
||||
processing of jobs in the Oban queue.
|
||||
|
||||
It is expected a 200 response is healthy and other responses are unhealthy.
|
||||
"""
|
||||
@callback healthcheck_endpoints :: list() | nil
|
||||
end
|
||||
|
|
@ -27,7 +27,7 @@ defmodule Pleroma.Signature do
|
|||
|
||||
_ ->
|
||||
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
|
||||
%{"ap_id" => ap_id} -> {:ok, ap_id}
|
||||
{:ok, %{"ap_id" => ap_id}} -> {:ok, ap_id}
|
||||
_ -> {:error, maybe_ap_id}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ defmodule Pleroma.Telemetry.Logger do
|
|||
_,
|
||||
_
|
||||
) do
|
||||
Logger.error(fn ->
|
||||
Logger.debug(fn ->
|
||||
"Connection pool had to refuse opening a connection to #{key} due to connection limit exhaustion"
|
||||
end)
|
||||
end
|
||||
|
|
@ -70,7 +70,7 @@ defmodule Pleroma.Telemetry.Logger do
|
|||
%{key: key},
|
||||
_
|
||||
) do
|
||||
Logger.warn(fn ->
|
||||
Logger.warning(fn ->
|
||||
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
|
||||
end)
|
||||
end
|
||||
|
|
@ -81,7 +81,7 @@ defmodule Pleroma.Telemetry.Logger do
|
|||
%{key: key, protocol: :http},
|
||||
_
|
||||
) do
|
||||
Logger.info(fn ->
|
||||
Logger.debug(fn ->
|
||||
"Pool worker for #{key}: #{length(clients)} clients are using an HTTP1 connection at the same time, head-of-line blocking might occur."
|
||||
end)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -34,7 +34,6 @@ defmodule Pleroma.Upload do
|
|||
|
||||
"""
|
||||
alias Ecto.UUID
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
require Logger
|
||||
|
|
@ -52,6 +51,7 @@ defmodule Pleroma.Upload do
|
|||
| {:size_limit, nil | non_neg_integer()}
|
||||
| {:uploader, module()}
|
||||
| {:filters, [module()]}
|
||||
| {:actor, String.t()}
|
||||
|
||||
@type t :: %__MODULE__{
|
||||
id: String.t(),
|
||||
|
|
@ -76,6 +76,8 @@ defmodule Pleroma.Upload do
|
|||
:path
|
||||
]
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
defp get_description(upload) do
|
||||
case {upload.description, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
|
||||
{description, _} when is_binary(description) -> description
|
||||
|
|
@ -85,7 +87,7 @@ defmodule Pleroma.Upload do
|
|||
end
|
||||
end
|
||||
|
||||
@spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
|
||||
@spec store(source, options :: [option()]) :: {:ok, map()} | {:error, any()}
|
||||
@doc "Store a file. If using a `Plug.Upload{}` as the source, be sure to use `Majic.Plug` to ensure its content_type and filename is correct."
|
||||
def store(upload, opts \\ []) do
|
||||
opts = get_opts(opts)
|
||||
|
|
@ -174,7 +176,7 @@ defmodule Pleroma.Upload do
|
|||
defp prepare_upload(%{img: "data:image/" <> image_data}, opts) do
|
||||
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
|
||||
data = Base.decode64!(parsed["data"], ignore: :whitespace)
|
||||
hash = Base.encode16(:crypto.hash(:sha256, data), lower: true)
|
||||
hash = Base.encode16(:crypto.hash(:sha256, data), case: :upper)
|
||||
|
||||
with :ok <- check_binary_size(data, opts.size_limit),
|
||||
tmp_path <- tempfile_for_image(data),
|
||||
|
|
@ -237,25 +239,29 @@ defmodule Pleroma.Upload do
|
|||
""
|
||||
end
|
||||
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
if String.contains?(base_url, Pleroma.Uploaders.IPFS.placeholder()) do
|
||||
String.replace(base_url, Pleroma.Uploaders.IPFS.placeholder(), path)
|
||||
else
|
||||
[base_url, path]
|
||||
|> Path.join()
|
||||
end
|
||||
end
|
||||
|
||||
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||
|
||||
def base_url do
|
||||
uploader = Config.get([Pleroma.Upload, :uploader])
|
||||
upload_base_url = Config.get([Pleroma.Upload, :base_url])
|
||||
public_endpoint = Config.get([uploader, :public_endpoint])
|
||||
uploader = @config_impl.get([Pleroma.Upload, :uploader])
|
||||
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
|
||||
public_endpoint = @config_impl.get([uploader, :public_endpoint])
|
||||
|
||||
case uploader do
|
||||
Pleroma.Uploaders.Local ->
|
||||
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
|
||||
Pleroma.Uploaders.S3 ->
|
||||
bucket = Config.get([Pleroma.Uploaders.S3, :bucket])
|
||||
truncated_namespace = Config.get([Pleroma.Uploaders.S3, :truncated_namespace])
|
||||
namespace = Config.get([Pleroma.Uploaders.S3, :bucket_namespace])
|
||||
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
|
||||
truncated_namespace = @config_impl.get([Pleroma.Uploaders.S3, :truncated_namespace])
|
||||
namespace = @config_impl.get([Pleroma.Uploaders.S3, :bucket_namespace])
|
||||
|
||||
bucket_with_namespace =
|
||||
cond do
|
||||
|
|
@ -275,6 +281,9 @@ defmodule Pleroma.Upload do
|
|||
Path.join([upload_base_url, bucket_with_namespace])
|
||||
end
|
||||
|
||||
Pleroma.Uploaders.IPFS ->
|
||||
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
|
||||
|
||||
_ ->
|
||||
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -38,9 +38,9 @@ defmodule Pleroma.Upload.Filter do
|
|||
{:ok, :noop} ->
|
||||
filter(rest, upload)
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}")
|
||||
error
|
||||
{:error, e} ->
|
||||
Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(e)}")
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,27 +8,28 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
"""
|
||||
require Logger
|
||||
|
||||
alias Vix.Vips.Image
|
||||
alias Vix.Vips.Operation
|
||||
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) ::
|
||||
{:ok, :filtered, Pleroma.Upload.t()} | {:ok, :noop} | {:error, String.t()}
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) do
|
||||
try do
|
||||
image =
|
||||
file
|
||||
|> Mogrify.open()
|
||||
|> Mogrify.verbose()
|
||||
{:ok, image} = Image.new_from_file(file)
|
||||
{width, height} = {Image.width(image), Image.height(image)}
|
||||
|
||||
upload =
|
||||
upload
|
||||
|> Map.put(:width, image.width)
|
||||
|> Map.put(:height, image.height)
|
||||
|> Map.put(:blurhash, get_blurhash(file))
|
||||
|> Map.put(:width, width)
|
||||
|> Map.put(:height, height)
|
||||
|> Map.put(:blurhash, get_blurhash(image))
|
||||
|
||||
{:ok, :filtered, upload}
|
||||
rescue
|
||||
e in ErlangError ->
|
||||
Logger.warn("#{__MODULE__}: #{inspect(e)}")
|
||||
Logger.warning("#{__MODULE__}: #{inspect(e)}")
|
||||
{:ok, :noop}
|
||||
end
|
||||
end
|
||||
|
|
@ -45,7 +46,7 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
{:ok, :filtered, upload}
|
||||
rescue
|
||||
e in ErlangError ->
|
||||
Logger.warn("#{__MODULE__}: #{inspect(e)}")
|
||||
Logger.warning("#{__MODULE__}: #{inspect(e)}")
|
||||
{:ok, :noop}
|
||||
end
|
||||
end
|
||||
|
|
@ -53,7 +54,7 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
def filter(_), do: {:ok, :noop}
|
||||
|
||||
defp get_blurhash(file) do
|
||||
with {:ok, blurhash} <- :eblurhash.magick(file) do
|
||||
with {:ok, blurhash} <- vips_blurhash(file) do
|
||||
blurhash
|
||||
else
|
||||
_ -> nil
|
||||
|
|
@ -77,7 +78,28 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
%{width: width, height: height}
|
||||
else
|
||||
nil -> {:error, {:ffprobe, :command_not_found}}
|
||||
{:error, _} = error -> error
|
||||
error -> {:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
defp vips_blurhash(%Vix.Vips.Image{} = image) do
|
||||
with {:ok, resized_image} <- Operation.thumbnail_image(image, 100),
|
||||
{height, width} <- {Image.height(resized_image), Image.width(resized_image)},
|
||||
max <- max(height, width),
|
||||
{x, y} <- {max(round(width * 5 / max), 1), max(round(height * 5 / max), 1)} do
|
||||
{:ok, rgb} =
|
||||
if Image.has_alpha?(resized_image) do
|
||||
# remove alpha channel
|
||||
resized_image
|
||||
|> Operation.extract_band!(0, n: 3)
|
||||
|> Image.write_to_binary()
|
||||
else
|
||||
Image.write_to_binary(resized_image)
|
||||
end
|
||||
|
||||
Blurhash.encode(rgb, width, height, x, y)
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -10,8 +10,6 @@ defmodule Pleroma.Upload.Filter.Exiftool.ReadDescription do
|
|||
"""
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
|
||||
|
||||
def filter(%Pleroma.Upload{description: description})
|
||||
when is_binary(description),
|
||||
do: {:ok, :noop}
|
||||
|
|
|
|||
20
lib/pleroma/upload/filter/only_media.ex
Normal file
20
lib/pleroma/upload/filter/only_media.ex
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Upload.Filter.OnlyMedia do
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
alias Pleroma.Upload
|
||||
|
||||
def filter(%Upload{content_type: content_type}) do
|
||||
[type, _subtype] = String.split(content_type, "/")
|
||||
|
||||
if type in ["image", "video", "audio"] do
|
||||
{:ok, :noop}
|
||||
else
|
||||
{:error, "Disallowed content-type: #{content_type}"}
|
||||
end
|
||||
end
|
||||
|
||||
def filter(_), do: {:ok, :noop}
|
||||
end
|
||||
77
lib/pleroma/uploaders/ipfs.ex
Normal file
77
lib/pleroma/uploaders/ipfs.ex
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Uploaders.IPFS do
|
||||
@behaviour Pleroma.Uploaders.Uploader
|
||||
require Logger
|
||||
|
||||
alias Tesla.Multipart
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
defp get_final_url(method) do
|
||||
config = @config_impl.get([__MODULE__])
|
||||
post_base_url = Keyword.get(config, :post_gateway_url)
|
||||
|
||||
Path.join([post_base_url, method])
|
||||
end
|
||||
|
||||
def put_file_endpoint do
|
||||
get_final_url("/api/v0/add")
|
||||
end
|
||||
|
||||
def delete_file_endpoint do
|
||||
get_final_url("/api/v0/files/rm")
|
||||
end
|
||||
|
||||
@placeholder "{CID}"
|
||||
def placeholder, do: @placeholder
|
||||
|
||||
@impl true
|
||||
def get_file(file) do
|
||||
b_url = Pleroma.Upload.base_url()
|
||||
|
||||
if String.contains?(b_url, @placeholder) do
|
||||
{:ok, {:url, String.replace(b_url, @placeholder, URI.decode(file))}}
|
||||
else
|
||||
{:error, "IPFS Get URL doesn't contain 'cid' placeholder"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
mp =
|
||||
Multipart.new()
|
||||
|> Multipart.add_content_type_param("charset=utf-8")
|
||||
|> Multipart.add_file(upload.tempfile)
|
||||
|
||||
case Pleroma.HTTP.post(put_file_endpoint(), mp, [], params: ["cid-version": "1"]) do
|
||||
{:ok, ret} ->
|
||||
case Jason.decode(ret.body) do
|
||||
{:ok, ret} ->
|
||||
if Map.has_key?(ret, "Hash") do
|
||||
{:ok, {:file, ret["Hash"]}}
|
||||
else
|
||||
{:error, "JSON doesn't contain Hash key"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "JSON decode failed"}
|
||||
end
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
{:error, "IPFS Gateway upload failed"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def delete_file(file) do
|
||||
case Pleroma.HTTP.post(delete_file_endpoint(), "", [], params: [arg: file]) do
|
||||
{:ok, %{status: 204}} -> :ok
|
||||
error -> {:error, inspect(error)}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -6,7 +6,8 @@ defmodule Pleroma.Uploaders.S3 do
|
|||
@behaviour Pleroma.Uploaders.Uploader
|
||||
require Logger
|
||||
|
||||
alias Pleroma.Config
|
||||
@ex_aws_impl Application.compile_env(:pleroma, [__MODULE__, :ex_aws_impl], ExAws)
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
# The file name is re-encoded with S3's constraints here to comply with previous
|
||||
# links with less strict filenames
|
||||
|
|
@ -22,7 +23,7 @@ defmodule Pleroma.Uploaders.S3 do
|
|||
|
||||
@impl true
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
config = Config.get([__MODULE__])
|
||||
config = @config_impl.get([__MODULE__])
|
||||
bucket = Keyword.get(config, :bucket)
|
||||
streaming = Keyword.get(config, :streaming_enabled)
|
||||
|
||||
|
|
@ -56,7 +57,7 @@ defmodule Pleroma.Uploaders.S3 do
|
|||
])
|
||||
end
|
||||
|
||||
case ExAws.request(op) do
|
||||
case @ex_aws_impl.request(op) do
|
||||
{:ok, _} ->
|
||||
{:ok, {:file, s3_name}}
|
||||
|
||||
|
|
@ -69,9 +70,9 @@ defmodule Pleroma.Uploaders.S3 do
|
|||
@impl true
|
||||
def delete_file(file) do
|
||||
[__MODULE__, :bucket]
|
||||
|> Config.get()
|
||||
|> @config_impl.get()
|
||||
|> ExAws.S3.delete_object(file)
|
||||
|> ExAws.request()
|
||||
|> @ex_aws_impl.request()
|
||||
|> case do
|
||||
{:ok, %{status_code: 204}} -> :ok
|
||||
error -> {:error, inspect(error)}
|
||||
|
|
@ -83,3 +84,7 @@ defmodule Pleroma.Uploaders.S3 do
|
|||
String.replace(name, @regex, "-")
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Pleroma.Uploaders.S3.ExAwsAPI do
|
||||
@callback request(op :: ExAws.Operation.t()) :: {:ok, ExAws.Operation.t()} | {:error, term()}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@
|
|||
defmodule Pleroma.Uploaders.Uploader do
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
@mix_env Mix.env()
|
||||
|
||||
@moduledoc """
|
||||
Defines the contract to put and get an uploaded file to any backend.
|
||||
"""
|
||||
|
|
@ -40,7 +38,7 @@ defmodule Pleroma.Uploaders.Uploader do
|
|||
|
||||
@callback delete_file(file :: String.t()) :: :ok | {:error, String.t()}
|
||||
|
||||
@callback http_callback(Plug.Conn.t(), Map.t()) ::
|
||||
@callback http_callback(Plug.Conn.t(), map()) ::
|
||||
{:ok, Plug.Conn.t()}
|
||||
| {:ok, Plug.Conn.t(), file_spec()}
|
||||
| {:error, Plug.Conn.t(), String.t()}
|
||||
|
|
@ -75,10 +73,5 @@ defmodule Pleroma.Uploaders.Uploader do
|
|||
end
|
||||
end
|
||||
|
||||
defp callback_timeout do
|
||||
case @mix_env do
|
||||
:test -> 1_000
|
||||
_ -> 30_000
|
||||
end
|
||||
end
|
||||
defp callback_timeout, do: Application.get_env(:pleroma, __MODULE__)[:timeout]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.User do
|
|||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
import Ecto, only: [assoc: 2]
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
alias Ecto.Multi
|
||||
alias Pleroma.Activity
|
||||
|
|
@ -39,6 +40,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Workers.BackgroundWorker
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
@type account_status ::
|
||||
|
|
@ -124,7 +126,6 @@ defmodule Pleroma.User do
|
|||
field(:domain_blocks, {:array, :string}, default: [])
|
||||
field(:is_active, :boolean, default: true)
|
||||
field(:no_rich_text, :boolean, default: false)
|
||||
field(:ap_enabled, :boolean, default: false)
|
||||
field(:is_moderator, :boolean, default: false)
|
||||
field(:is_admin, :boolean, default: false)
|
||||
field(:show_role, :boolean, default: true)
|
||||
|
|
@ -488,7 +489,6 @@ defmodule Pleroma.User do
|
|||
:nickname,
|
||||
:public_key,
|
||||
:avatar,
|
||||
:ap_enabled,
|
||||
:banner,
|
||||
:is_locked,
|
||||
:last_refreshed_at,
|
||||
|
|
@ -581,7 +581,7 @@ defmodule Pleroma.User do
|
|||
|> validate_format(:nickname, local_nickname_regex())
|
||||
|> validate_length(:bio, max: bio_limit)
|
||||
|> validate_length(:name, min: 1, max: name_limit)
|
||||
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
||||
|> validate_inclusion(:actor_type, Pleroma.Constants.allowed_user_actor_types())
|
||||
|> put_fields()
|
||||
|> put_emoji()
|
||||
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
|
||||
|
|
@ -597,9 +597,23 @@ defmodule Pleroma.User do
|
|||
|
||||
defp put_fields(changeset) do
|
||||
if raw_fields = get_change(changeset, :raw_fields) do
|
||||
old_fields = changeset.data.raw_fields
|
||||
|
||||
raw_fields =
|
||||
raw_fields
|
||||
|> Enum.filter(fn %{"name" => n} -> n != "" end)
|
||||
|> Enum.map(fn field ->
|
||||
previous =
|
||||
old_fields
|
||||
|> Enum.find(fn %{"value" => value} -> field["value"] == value end)
|
||||
|
||||
if previous && Map.has_key?(previous, "verified_at") do
|
||||
field
|
||||
|> Map.put("verified_at", previous["verified_at"])
|
||||
else
|
||||
field
|
||||
end
|
||||
end)
|
||||
|
||||
fields =
|
||||
raw_fields
|
||||
|
|
@ -673,7 +687,7 @@ defmodule Pleroma.User do
|
|||
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
||||
end
|
||||
|
||||
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||
def update_as_admin(user, params) do
|
||||
params = Map.put(params, "password_confirmation", params["password"])
|
||||
changeset = update_as_admin_changeset(user, params)
|
||||
|
|
@ -694,7 +708,7 @@ defmodule Pleroma.User do
|
|||
|> put_change(:password_reset_pending, false)
|
||||
end
|
||||
|
||||
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||
def reset_password(%User{} = user, params) do
|
||||
reset_password(user, user, params)
|
||||
end
|
||||
|
|
@ -1012,7 +1026,7 @@ defmodule Pleroma.User do
|
|||
|
||||
def maybe_send_confirmation_email(_), do: {:ok, :noop}
|
||||
|
||||
@spec send_confirmation_email(Uset.t()) :: User.t()
|
||||
@spec send_confirmation_email(User.t()) :: User.t()
|
||||
def send_confirmation_email(%User{} = user) do
|
||||
user
|
||||
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|
||||
|
|
@ -1049,7 +1063,8 @@ defmodule Pleroma.User do
|
|||
|
||||
def needs_update?(_), do: true
|
||||
|
||||
@spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
|
||||
@spec maybe_direct_follow(User.t(), User.t()) ::
|
||||
{:ok, User.t(), User.t()} | {:error, String.t()}
|
||||
|
||||
# "Locked" (self-locked) users demand explicit authorization of follow requests
|
||||
def maybe_direct_follow(%User{} = follower, %User{local: true, is_locked: true} = followed) do
|
||||
|
|
@ -1061,11 +1076,7 @@ defmodule Pleroma.User do
|
|||
end
|
||||
|
||||
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
|
||||
if not ap_enabled?(followed) do
|
||||
follow(follower, followed)
|
||||
else
|
||||
{:ok, follower, followed}
|
||||
end
|
||||
{:ok, follower, followed}
|
||||
end
|
||||
|
||||
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
|
||||
|
|
@ -1204,6 +1215,10 @@ defmodule Pleroma.User do
|
|||
|
||||
def update_and_set_cache(changeset) do
|
||||
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
|
||||
if get_change(changeset, :raw_fields) do
|
||||
BackgroundWorker.enqueue("verify_fields_links", %{"user_id" => user.id})
|
||||
end
|
||||
|
||||
set_cache(user)
|
||||
end
|
||||
end
|
||||
|
|
@ -1389,6 +1404,40 @@ defmodule Pleroma.User do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
@spec get_familiar_followers_query(User.t(), User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||
def get_familiar_followers_query(%User{} = user, %User{} = current_user, nil) do
|
||||
friends =
|
||||
get_friends_query(current_user)
|
||||
|> where([u], not u.hide_follows)
|
||||
|> select([u], u.id)
|
||||
|
||||
User.Query.build(%{is_active: true})
|
||||
|> where([u], u.id not in ^[user.id, current_user.id])
|
||||
|> join(:inner, [u], r in FollowingRelationship,
|
||||
as: :followers_relationships,
|
||||
on: r.following_id == ^user.id and r.follower_id == u.id
|
||||
)
|
||||
|> where([followers_relationships: r], r.state == ^:follow_accept)
|
||||
|> where([followers_relationships: r], r.follower_id in subquery(friends))
|
||||
end
|
||||
|
||||
def get_familiar_followers_query(%User{} = user, %User{} = current_user, page) do
|
||||
user
|
||||
|> get_familiar_followers_query(current_user, nil)
|
||||
|> User.Query.paginate(page, 20)
|
||||
end
|
||||
|
||||
@spec get_familiar_followers_query(User.t(), User.t()) :: Ecto.Query.t()
|
||||
def get_familiar_followers_query(%User{} = user, %User{} = current_user),
|
||||
do: get_familiar_followers_query(user, current_user, nil)
|
||||
|
||||
@spec get_familiar_followers(User.t(), User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
||||
def get_familiar_followers(%User{} = user, %User{} = current_user, page \\ nil) do
|
||||
user
|
||||
|> get_familiar_followers_query(current_user, page)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def increase_note_count(%User{} = user) do
|
||||
User
|
||||
|> where(id: ^user.id)
|
||||
|
|
@ -1566,7 +1615,7 @@ defmodule Pleroma.User do
|
|||
unmute(muter, mutee)
|
||||
else
|
||||
{who, result} = error ->
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"User.unmute/2 failed. #{who}: #{result}, muter_id: #{muter_id}, mutee_id: #{mutee_id}"
|
||||
)
|
||||
|
||||
|
|
@ -1788,14 +1837,17 @@ defmodule Pleroma.User do
|
|||
BackgroundWorker.enqueue("user_activation", %{"user_id" => user.id, "status" => status})
|
||||
end
|
||||
|
||||
@spec set_activation([User.t()], boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
@spec set_activation([User.t()], boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||
def set_activation(users, status) when is_list(users) do
|
||||
Repo.transaction(fn ->
|
||||
for user <- users, do: set_activation(user, status)
|
||||
for user <- users do
|
||||
{:ok, user} = set_activation(user, status)
|
||||
user
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
@spec set_activation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
@spec set_activation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||
def set_activation(%User{} = user, status) do
|
||||
with {:ok, user} <- set_activation_status(user, status) do
|
||||
user
|
||||
|
|
@ -1873,7 +1925,7 @@ defmodule Pleroma.User do
|
|||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
@spec purge_user_changeset(User.t()) :: Changeset.t()
|
||||
@spec purge_user_changeset(User.t()) :: Ecto.Changeset.t()
|
||||
def purge_user_changeset(user) do
|
||||
# "Right to be forgotten"
|
||||
# https://gdpr.eu/right-to-be-forgotten/
|
||||
|
|
@ -1898,7 +1950,6 @@ defmodule Pleroma.User do
|
|||
confirmation_token: nil,
|
||||
domain_blocks: [],
|
||||
is_active: false,
|
||||
ap_enabled: false,
|
||||
is_moderator: false,
|
||||
is_admin: false,
|
||||
mascot: nil,
|
||||
|
|
@ -1977,8 +2028,45 @@ defmodule Pleroma.User do
|
|||
maybe_delete_from_db(user)
|
||||
end
|
||||
|
||||
def perform(:verify_fields_links, user) do
|
||||
profile_urls = [user.ap_id]
|
||||
|
||||
fields =
|
||||
user.raw_fields
|
||||
|> Enum.map(&verify_field_link(&1, profile_urls))
|
||||
|
||||
changeset =
|
||||
user
|
||||
|> update_changeset(%{raw_fields: fields})
|
||||
|
||||
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
|
||||
set_cache(user)
|
||||
end
|
||||
end
|
||||
|
||||
def perform(:set_activation_async, user, status), do: set_activation(user, status)
|
||||
|
||||
defp verify_field_link(field, profile_urls) do
|
||||
verified_at =
|
||||
with %{"value" => value} <- field,
|
||||
{:verified_at, nil} <- {:verified_at, Map.get(field, "verified_at")},
|
||||
%{scheme: scheme, userinfo: nil, host: host}
|
||||
when not_empty_string(host) and scheme in ["http", "https"] <-
|
||||
URI.parse(value),
|
||||
{:not_idn, true} <- {:not_idn, to_string(:idna.encode(host)) == host},
|
||||
"me" <- Pleroma.Web.RelMe.maybe_put_rel_me(value, profile_urls) do
|
||||
CommonUtils.to_masto_date(NaiveDateTime.utc_now())
|
||||
else
|
||||
{:verified_at, value} when not_empty_string(value) ->
|
||||
value
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
|
||||
Map.put(field, "verified_at", verified_at)
|
||||
end
|
||||
|
||||
@spec external_users_query() :: Ecto.Query.t()
|
||||
def external_users_query do
|
||||
User.Query.build(%{
|
||||
|
|
@ -2143,7 +2231,7 @@ defmodule Pleroma.User do
|
|||
def public_key(_), do: {:error, "key not found"}
|
||||
|
||||
def get_public_key_for_ap_id(ap_id) do
|
||||
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
||||
with %User{} = user <- get_cached_by_ap_id(ap_id),
|
||||
{:ok, public_key} <- public_key(user) do
|
||||
{:ok, public_key}
|
||||
else
|
||||
|
|
@ -2151,10 +2239,6 @@ defmodule Pleroma.User do
|
|||
end
|
||||
end
|
||||
|
||||
def ap_enabled?(%User{local: true}), do: true
|
||||
def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
|
||||
def ap_enabled?(_), do: false
|
||||
|
||||
@doc "Gets or fetch a user by uri or nickname."
|
||||
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
||||
def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
|
||||
|
|
@ -2263,7 +2347,7 @@ defmodule Pleroma.User do
|
|||
if String.contains?(user.nickname, "@") do
|
||||
user.nickname
|
||||
else
|
||||
%{host: host} = URI.parse(user.ap_id)
|
||||
host = Pleroma.Web.WebFinger.host()
|
||||
user.nickname <> "@" <> host
|
||||
end
|
||||
end
|
||||
|
|
@ -2369,7 +2453,7 @@ defmodule Pleroma.User do
|
|||
updated_user
|
||||
end
|
||||
|
||||
@spec set_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
||||
@spec set_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||
def set_confirmation(%User{} = user, bool) do
|
||||
user
|
||||
|> confirmation_changeset(set_confirmation: bool)
|
||||
|
|
@ -2413,9 +2497,9 @@ defmodule Pleroma.User do
|
|||
|
||||
defp put_password_hash(changeset), do: changeset
|
||||
|
||||
def is_internal_user?(%User{nickname: nil}), do: true
|
||||
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
||||
def is_internal_user?(_), do: false
|
||||
def internal?(%User{nickname: nil}), do: true
|
||||
def internal?(%User{local: true, nickname: "internal." <> _}), do: true
|
||||
def internal?(_), do: false
|
||||
|
||||
# A hack because user delete activities have a fake id for whatever reason
|
||||
# TODO: Get rid of this
|
||||
|
|
@ -2547,7 +2631,7 @@ defmodule Pleroma.User do
|
|||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
@spec confirmation_changeset(User.t(), keyword()) :: Changeset.t()
|
||||
@spec confirmation_changeset(User.t(), keyword()) :: Ecto.Changeset.t()
|
||||
def confirmation_changeset(user, set_confirmation: confirmed?) do
|
||||
params =
|
||||
if confirmed? do
|
||||
|
|
@ -2565,9 +2649,9 @@ defmodule Pleroma.User do
|
|||
cast(user, params, [:is_confirmed, :confirmation_token])
|
||||
end
|
||||
|
||||
@spec approval_changeset(User.t(), keyword()) :: Changeset.t()
|
||||
def approval_changeset(user, set_approval: approved?) do
|
||||
cast(user, %{is_approved: approved?}, [:is_approved])
|
||||
@spec approval_changeset(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t()
|
||||
def approval_changeset(changeset, set_approval: approved?) do
|
||||
cast(changeset, %{is_approved: approved?}, [:is_approved])
|
||||
end
|
||||
|
||||
@spec add_pinned_object_id(User.t(), String.t()) :: {:ok, User.t()} | {:error, term()}
|
||||
|
|
@ -2670,10 +2754,11 @@ defmodule Pleroma.User do
|
|||
# - display name
|
||||
def sanitize_html(%User{} = user, filter) do
|
||||
fields =
|
||||
Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
|
||||
Enum.map(user.fields, fn %{"name" => name, "value" => value} = fields ->
|
||||
%{
|
||||
"name" => name,
|
||||
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
|
||||
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly),
|
||||
"verified_at" => Map.get(fields, "verified_at")
|
||||
}
|
||||
end)
|
||||
|
||||
|
|
@ -2692,6 +2777,8 @@ defmodule Pleroma.User do
|
|||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
def update_last_active_at(user), do: user
|
||||
|
||||
def active_user_count(days \\ 30) do
|
||||
active_after = Timex.shift(NaiveDateTime.utc_now(), days: -days)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,28 +9,36 @@ defmodule Pleroma.User.Backup do
|
|||
import Ecto.Query
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Bookmark
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.Backup.State
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.ActivityPub.UserView
|
||||
alias Pleroma.Workers.BackupWorker
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
schema "backups" do
|
||||
field(:content_type, :string)
|
||||
field(:file_name, :string)
|
||||
field(:file_size, :integer, default: 0)
|
||||
field(:processed, :boolean, default: false)
|
||||
field(:state, State, default: :invalid)
|
||||
field(:processed_number, :integer, default: 0)
|
||||
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
def create(user, admin_id \\ nil) do
|
||||
with :ok <- validate_limit(user, admin_id),
|
||||
{:ok, backup} <- user |> new() |> Repo.insert() do
|
||||
|
|
@ -46,7 +54,8 @@ defmodule Pleroma.User.Backup do
|
|||
%__MODULE__{
|
||||
user_id: user.id,
|
||||
content_type: "application/zip",
|
||||
file_name: name
|
||||
file_name: name,
|
||||
state: :pending
|
||||
}
|
||||
end
|
||||
|
||||
|
|
@ -109,30 +118,109 @@ defmodule Pleroma.User.Backup do
|
|||
|
||||
def get(id), do: Repo.get(__MODULE__, id)
|
||||
|
||||
def process(%__MODULE__{} = backup) do
|
||||
with {:ok, zip_file} <- export(backup),
|
||||
{:ok, %{size: size}} <- File.stat(zip_file),
|
||||
{:ok, _upload} <- upload(backup, zip_file) do
|
||||
backup
|
||||
|> cast(%{file_size: size, processed: true}, [:file_size, :processed])
|
||||
|> Repo.update()
|
||||
defp set_state(backup, state, processed_number \\ nil) do
|
||||
struct =
|
||||
%{state: state}
|
||||
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
|
||||
|
||||
backup
|
||||
|> cast(struct, [:state, :processed_number])
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def process(
|
||||
%__MODULE__{} = backup,
|
||||
processor_module \\ __MODULE__.Processor
|
||||
) do
|
||||
set_state(backup, :running, 0)
|
||||
|
||||
current_pid = self()
|
||||
|
||||
task =
|
||||
Task.Supervisor.async_nolink(
|
||||
Pleroma.TaskSupervisor,
|
||||
processor_module,
|
||||
:do_process,
|
||||
[backup, current_pid]
|
||||
)
|
||||
|
||||
wait_backup(backup, backup.processed_number, task)
|
||||
end
|
||||
|
||||
defp wait_backup(backup, current_processed, task) do
|
||||
wait_time = @config_impl.get([__MODULE__, :process_wait_time])
|
||||
|
||||
receive do
|
||||
{:progress, new_processed} ->
|
||||
total_processed = current_processed + new_processed
|
||||
|
||||
set_state(backup, :running, total_processed)
|
||||
wait_backup(backup, total_processed, task)
|
||||
|
||||
{:DOWN, _ref, _proc, _pid, reason} ->
|
||||
backup = get(backup.id)
|
||||
|
||||
if reason != :normal do
|
||||
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
|
||||
|
||||
{:ok, backup} = set_state(backup, :failed)
|
||||
|
||||
cleanup(backup)
|
||||
|
||||
{:error,
|
||||
%{
|
||||
backup: backup,
|
||||
reason: :exit,
|
||||
details: reason
|
||||
}}
|
||||
else
|
||||
{:ok, backup}
|
||||
end
|
||||
after
|
||||
wait_time ->
|
||||
Logger.error(
|
||||
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
|
||||
)
|
||||
|
||||
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
|
||||
|
||||
{:ok, backup} = set_state(backup, :failed)
|
||||
|
||||
cleanup(backup)
|
||||
|
||||
{:error,
|
||||
%{
|
||||
backup: backup,
|
||||
reason: :timeout
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
||||
@files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
|
||||
def export(%__MODULE__{} = backup) do
|
||||
@files [
|
||||
'actor.json',
|
||||
'outbox.json',
|
||||
'likes.json',
|
||||
'bookmarks.json',
|
||||
'followers.json',
|
||||
'following.json'
|
||||
]
|
||||
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
|
||||
def export(%__MODULE__{} = backup, caller_pid) do
|
||||
backup = Repo.preload(backup, :user)
|
||||
name = String.trim_trailing(backup.file_name, ".zip")
|
||||
dir = dir(name)
|
||||
dir = backup_tempdir(backup)
|
||||
|
||||
with :ok <- File.mkdir(dir),
|
||||
:ok <- actor(dir, backup.user),
|
||||
:ok <- statuses(dir, backup.user),
|
||||
:ok <- likes(dir, backup.user),
|
||||
:ok <- bookmarks(dir, backup.user),
|
||||
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
|
||||
:ok <- actor(dir, backup.user, caller_pid),
|
||||
:ok <- statuses(dir, backup.user, caller_pid),
|
||||
:ok <- likes(dir, backup.user, caller_pid),
|
||||
:ok <- bookmarks(dir, backup.user, caller_pid),
|
||||
:ok <- followers(dir, backup.user, caller_pid),
|
||||
:ok <- following(dir, backup.user, caller_pid),
|
||||
{:ok, zip_path} <- :zip.create(backup.file_name, @files, cwd: dir),
|
||||
{:ok, _} <- File.rm_rf(dir) do
|
||||
{:ok, to_string(zip_path)}
|
||||
{:ok, zip_path}
|
||||
else
|
||||
_ -> :error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -157,11 +245,12 @@ defmodule Pleroma.User.Backup do
|
|||
end
|
||||
end
|
||||
|
||||
defp actor(dir, user) do
|
||||
defp actor(dir, user, caller_pid) do
|
||||
with {:ok, json} <-
|
||||
UserView.render("user.json", %{user: user})
|
||||
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|
||||
|> Jason.encode() do
|
||||
send(caller_pid, {:progress, 1})
|
||||
File.write(Path.join(dir, "actor.json"), json)
|
||||
end
|
||||
end
|
||||
|
|
@ -180,47 +269,80 @@ defmodule Pleroma.User.Backup do
|
|||
)
|
||||
end
|
||||
|
||||
defp write(query, dir, name, fun) do
|
||||
defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
|
||||
|
||||
defp backup_tempdir(backup) do
|
||||
name = String.trim_trailing(backup.file_name, ".zip")
|
||||
dir(name)
|
||||
end
|
||||
|
||||
defp cleanup(backup) do
|
||||
dir = backup_tempdir(backup)
|
||||
File.rm_rf(dir)
|
||||
end
|
||||
|
||||
defp write(query, dir, name, fun, caller_pid) do
|
||||
path = Path.join(dir, "#{name}.json")
|
||||
|
||||
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
|
||||
|
||||
with {:ok, file} <- File.open(path, [:write, :utf8]),
|
||||
:ok <- write_header(file, name) do
|
||||
total =
|
||||
query
|
||||
|> Pleroma.Repo.chunk_stream(100)
|
||||
|> Pleroma.Repo.chunk_stream(chunk_size, _returns_as = :one, timeout: :infinity)
|
||||
|> Enum.reduce(0, fn i, acc ->
|
||||
with {:ok, data} <- fun.(i),
|
||||
with {:ok, data} <-
|
||||
(try do
|
||||
fun.(i)
|
||||
rescue
|
||||
e -> {:error, e}
|
||||
end),
|
||||
{:ok, str} <- Jason.encode(data),
|
||||
:ok <- IO.write(file, str <> ",\n") do
|
||||
if should_report?(acc + 1, chunk_size) do
|
||||
send(caller_pid, {:progress, chunk_size})
|
||||
end
|
||||
|
||||
acc + 1
|
||||
else
|
||||
_ -> acc
|
||||
{:error, e} ->
|
||||
Logger.warning(
|
||||
"Error processing backup item: #{inspect(e)}\n The item is: #{inspect(i)}"
|
||||
)
|
||||
|
||||
acc
|
||||
|
||||
_ ->
|
||||
acc
|
||||
end
|
||||
end)
|
||||
|
||||
send(caller_pid, {:progress, rem(total, chunk_size)})
|
||||
|
||||
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
|
||||
File.close(file)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp bookmarks(dir, %{id: user_id} = _user) do
|
||||
defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
|
||||
Bookmark
|
||||
|> where(user_id: ^user_id)
|
||||
|> join(:inner, [b], activity in assoc(b, :activity))
|
||||
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
|
||||
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
|
||||
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
|
||||
end
|
||||
|
||||
defp likes(dir, user) do
|
||||
defp likes(dir, user, caller_pid) do
|
||||
user.ap_id
|
||||
|> Activity.Queries.by_actor()
|
||||
|> Activity.Queries.by_type("Like")
|
||||
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
|
||||
|> write(dir, "likes", fn a -> {:ok, a.object} end)
|
||||
|> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
|
||||
end
|
||||
|
||||
defp statuses(dir, user) do
|
||||
defp statuses(dir, user, caller_pid) do
|
||||
opts =
|
||||
%{}
|
||||
|> Map.put(:type, ["Create", "Announce"])
|
||||
|
|
@ -233,10 +355,59 @@ defmodule Pleroma.User.Backup do
|
|||
]
|
||||
|> Enum.concat()
|
||||
|> ActivityPub.fetch_activities_query(opts)
|
||||
|> write(dir, "outbox", fn a ->
|
||||
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
|
||||
{:ok, Map.delete(activity, "@context")}
|
||||
end
|
||||
end)
|
||||
|> write(
|
||||
dir,
|
||||
"outbox",
|
||||
fn a ->
|
||||
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
|
||||
{:ok, Map.delete(activity, "@context")}
|
||||
end
|
||||
end,
|
||||
caller_pid
|
||||
)
|
||||
end
|
||||
|
||||
defp followers(dir, user, caller_pid) do
|
||||
User.get_followers_query(user)
|
||||
|> write(dir, "followers", fn a -> {:ok, a.ap_id} end, caller_pid)
|
||||
end
|
||||
|
||||
defp following(dir, user, caller_pid) do
|
||||
User.get_friends_query(user)
|
||||
|> write(dir, "following", fn a -> {:ok, a.ap_id} end, caller_pid)
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Pleroma.User.Backup.ProcessorAPI do
|
||||
@callback do_process(%Pleroma.User.Backup{}, pid()) ::
|
||||
{:ok, %Pleroma.User.Backup{}} | {:error, any()}
|
||||
end
|
||||
|
||||
defmodule Pleroma.User.Backup.Processor do
|
||||
@behaviour Pleroma.User.Backup.ProcessorAPI
|
||||
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User.Backup
|
||||
|
||||
import Ecto.Changeset
|
||||
|
||||
@impl true
|
||||
def do_process(backup, current_pid) do
|
||||
with {:ok, zip_file} <- Backup.export(backup, current_pid),
|
||||
{:ok, %{size: size}} <- File.stat(zip_file),
|
||||
{:ok, _upload} <- Backup.upload(backup, zip_file) do
|
||||
backup
|
||||
|> cast(
|
||||
%{
|
||||
file_size: size,
|
||||
processed: true,
|
||||
state: :complete
|
||||
},
|
||||
[:file_size, :processed, :state]
|
||||
)
|
||||
|> Repo.update()
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ defmodule Pleroma.User.Query do
|
|||
- pass non empty string
|
||||
- e.g. Pleroma.User.Query.build(%{email: "email@example.com"})
|
||||
- *contains criteria*
|
||||
- add field to @containns_criteria list
|
||||
- add field to @contains_criteria list
|
||||
- pass values list
|
||||
- e.g. Pleroma.User.Query.build(%{ap_id: ["http://ap_id1", "http://ap_id2"]})
|
||||
"""
|
||||
|
|
@ -71,7 +71,7 @@ defmodule Pleroma.User.Query do
|
|||
@equal_criteria [:email]
|
||||
@contains_criteria [:ap_id, :nickname]
|
||||
|
||||
@spec build(Query.t(), criteria()) :: Query.t()
|
||||
@spec build(Ecto.Query.t(), criteria()) :: Ecto.Query.t()
|
||||
def build(query \\ base_query(), criteria) do
|
||||
prepare_query(query, criteria)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ defmodule Pleroma.UserInviteToken do
|
|||
end
|
||||
|
||||
@spec update_invite(UserInviteToken.t(), map()) ::
|
||||
{:ok, UserInviteToken.t()} | {:error, Changeset.t()}
|
||||
{:ok, UserInviteToken.t()} | {:error, Ecto.Changeset.t()}
|
||||
def update_invite(invite, changes) do
|
||||
change(invite, changes) |> Repo.update()
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ defmodule Pleroma.UserRelationship do
|
|||
alias Pleroma.User
|
||||
alias Pleroma.UserRelationship
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
schema "user_relationships" do
|
||||
belongs_to(:source, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:target, User, type: FlakeId.Ecto.CompatType)
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ defmodule Pleroma.Web do
|
|||
namespace: Pleroma.Web
|
||||
|
||||
# Import convenience functions from controllers
|
||||
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
|
||||
import Phoenix.Controller, only: [get_csrf_token: 0, view_module: 1]
|
||||
|
||||
import Pleroma.Web.ErrorHelpers
|
||||
import Pleroma.Web.Gettext
|
||||
|
|
|
|||
|
|
@ -74,29 +74,40 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
defp check_remote_limit(_), do: true
|
||||
|
||||
def increase_note_count_if_public(actor, object) do
|
||||
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
|
||||
if public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
|
||||
end
|
||||
|
||||
def decrease_note_count_if_public(actor, object) do
|
||||
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
||||
if public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
||||
end
|
||||
|
||||
def update_last_status_at_if_public(actor, object) do
|
||||
if is_public?(object), do: User.update_last_status_at(actor), else: {:ok, actor}
|
||||
if public?(object), do: User.update_last_status_at(actor), else: {:ok, actor}
|
||||
end
|
||||
|
||||
defp increase_replies_count_if_reply(%{
|
||||
"object" => %{"inReplyTo" => reply_ap_id} = object,
|
||||
"type" => "Create"
|
||||
}) do
|
||||
if is_public?(object) do
|
||||
if public?(object) do
|
||||
Object.increase_replies_count(reply_ap_id)
|
||||
end
|
||||
end
|
||||
|
||||
defp increase_replies_count_if_reply(_create_data), do: :noop
|
||||
|
||||
@object_types ~w[ChatMessage Question Answer Audio Video Event Article Note Page]
|
||||
defp increase_quotes_count_if_quote(%{
|
||||
"object" => %{"quoteUrl" => quote_ap_id} = object,
|
||||
"type" => "Create"
|
||||
}) do
|
||||
if public?(object) do
|
||||
Object.increase_quotes_count(quote_ap_id)
|
||||
end
|
||||
end
|
||||
|
||||
defp increase_quotes_count_if_quote(_create_data), do: :noop
|
||||
|
||||
@object_types ~w[ChatMessage Question Answer Audio Video Image Event Article Note Page]
|
||||
@impl true
|
||||
def persist(%{"type" => type} = object, meta) when type in @object_types do
|
||||
with {:ok, object} <- Object.create(object) do
|
||||
|
|
@ -136,9 +147,10 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
# Splice in the child object if we have one.
|
||||
activity = Maps.put_if_present(activity, :object, object)
|
||||
|
||||
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
||||
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
||||
end)
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
|
||||
# Add local posts to search index
|
||||
if local, do: Pleroma.Search.add_to_index(activity)
|
||||
|
||||
{:ok, activity}
|
||||
else
|
||||
|
|
@ -163,7 +175,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
id: "pleroma:fakeid"
|
||||
}
|
||||
|
||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
{:ok, activity}
|
||||
|
||||
{:remote_limit_pass, _} ->
|
||||
|
|
@ -188,7 +200,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end
|
||||
|
||||
def notify_and_stream(activity) do
|
||||
Notification.create_notifications(activity)
|
||||
{:ok, notifications} = Notification.create_notifications(activity)
|
||||
Notification.send(notifications)
|
||||
|
||||
original_activity =
|
||||
case activity do
|
||||
|
|
@ -299,11 +312,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
with {:ok, activity} <- insert(create_data, local, fake),
|
||||
{:fake, false, activity} <- {:fake, fake, activity},
|
||||
_ <- increase_replies_count_if_reply(create_data),
|
||||
_ <- increase_quotes_count_if_quote(create_data),
|
||||
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
|
||||
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
||||
{:ok, _actor} <- update_last_status_at_if_public(actor, activity),
|
||||
_ <- notify_and_stream(activity),
|
||||
:ok <- maybe_schedule_poll_notifications(activity),
|
||||
:ok <- maybe_handle_group_posts(activity),
|
||||
:ok <- maybe_federate(activity) do
|
||||
{:ok, activity}
|
||||
else
|
||||
|
|
@ -455,6 +470,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> maybe_preload_objects(opts)
|
||||
|> maybe_preload_bookmarks(opts)
|
||||
|> maybe_set_thread_muted_field(opts)
|
||||
|> restrict_unauthenticated(opts[:user])
|
||||
|> restrict_blocked(opts)
|
||||
|> restrict_blockers_visibility(opts)
|
||||
|> restrict_recipients(recipients, opts[:user])
|
||||
|
|
@ -482,7 +498,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end
|
||||
|
||||
@spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
|
||||
FlakeId.Ecto.CompatType.t() | nil
|
||||
Ecto.UUID.t() | nil
|
||||
def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
|
||||
context
|
||||
|> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts))
|
||||
|
|
@ -963,8 +979,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp restrict_replies(query, %{exclude_replies: true}) do
|
||||
from(
|
||||
[_activity, object] in query,
|
||||
where: fragment("?->>'inReplyTo' is null", object.data)
|
||||
[activity, object] in query,
|
||||
where:
|
||||
fragment("?->>'inReplyTo' is null or ?->>'type' = 'Announce'", object.data, activity.data)
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -1215,6 +1232,44 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp restrict_filtered(query, _), do: query
|
||||
|
||||
defp restrict_unauthenticated(query, nil) do
|
||||
local = Config.restrict_unauthenticated_access?(:activities, :local)
|
||||
remote = Config.restrict_unauthenticated_access?(:activities, :remote)
|
||||
|
||||
cond do
|
||||
local and remote ->
|
||||
from(activity in query, where: false)
|
||||
|
||||
local ->
|
||||
from(activity in query, where: activity.local == false)
|
||||
|
||||
remote ->
|
||||
from(activity in query, where: activity.local == true)
|
||||
|
||||
true ->
|
||||
query
|
||||
end
|
||||
end
|
||||
|
||||
defp restrict_unauthenticated(query, _), do: query
|
||||
|
||||
defp restrict_quote_url(query, %{quote_url: quote_url}) do
|
||||
from([_activity, object] in query,
|
||||
where: fragment("(?)->'quoteUrl' = ?", object.data, ^quote_url)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_quote_url(query, _), do: query
|
||||
|
||||
defp restrict_rule(query, %{rule_id: rule_id}) do
|
||||
from(
|
||||
activity in query,
|
||||
where: fragment("(?)->'rules' \\? (?)", activity.data, ^rule_id)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_rule(query, _), do: query
|
||||
|
||||
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
|
||||
|
||||
defp exclude_poll_votes(query, _) do
|
||||
|
|
@ -1377,6 +1432,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> restrict_instance(opts)
|
||||
|> restrict_announce_object_actor(opts)
|
||||
|> restrict_filtered(opts)
|
||||
|> restrict_rule(opts)
|
||||
|> restrict_quote_url(opts)
|
||||
|> maybe_restrict_deactivated_users(opts)
|
||||
|> exclude_poll_votes(opts)
|
||||
|> exclude_chat_messages(opts)
|
||||
|
|
@ -1547,7 +1604,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
%{
|
||||
ap_id: data["id"],
|
||||
uri: get_actor_url(data["url"]),
|
||||
ap_enabled: true,
|
||||
banner: normalize_image(data["image"]),
|
||||
fields: fields,
|
||||
emoji: emojis,
|
||||
|
|
@ -1652,9 +1708,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
Fetcher.fetch_and_contain_remote_object_from_id(first) do
|
||||
{:ok, false}
|
||||
else
|
||||
{:error, {:ok, %{status: code}}} when code in [401, 403] -> {:ok, true}
|
||||
{:error, _} = e -> e
|
||||
e -> {:error, e}
|
||||
{:error, _} -> {:ok, true}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -1668,7 +1722,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end
|
||||
end
|
||||
|
||||
def fetch_and_prepare_user_from_ap_id(ap_id, additional \\ []) do
|
||||
defp fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
|
||||
{:ok, data} <- user_data_from_user_object(data, additional) do
|
||||
{:ok, maybe_update_follow_information(data)}
|
||||
|
|
@ -1721,6 +1775,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end)
|
||||
end
|
||||
|
||||
def pin_data_from_featured_collection(obj) do
|
||||
Logger.error("Could not parse featured collection #{inspect(obj)}")
|
||||
%{}
|
||||
end
|
||||
|
||||
def fetch_and_prepare_featured_from_ap_id(nil) do
|
||||
{:ok, %{}}
|
||||
end
|
||||
|
|
@ -1751,24 +1810,20 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
def make_user_from_ap_id(ap_id, additional \\ []) do
|
||||
user = User.get_cached_by_ap_id(ap_id)
|
||||
|
||||
if user && !User.ap_enabled?(user) do
|
||||
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
||||
else
|
||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
|
||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
|
||||
|
||||
if user do
|
||||
user
|
||||
|> User.remote_user_changeset(data)
|
||||
|> User.update_and_set_cache()
|
||||
else
|
||||
maybe_handle_clashing_nickname(data)
|
||||
if user do
|
||||
user
|
||||
|> User.remote_user_changeset(data)
|
||||
|> User.update_and_set_cache()
|
||||
else
|
||||
maybe_handle_clashing_nickname(data)
|
||||
|
||||
data
|
||||
|> User.remote_user_changeset()
|
||||
|> Repo.insert()
|
||||
|> User.set_cache()
|
||||
end
|
||||
data
|
||||
|> User.remote_user_changeset()
|
||||
|> Repo.insert()
|
||||
|> User.set_cache()
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -273,12 +273,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
end
|
||||
|
||||
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
||||
with %User{} = recipient <- User.get_cached_by_nickname(nickname),
|
||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
|
||||
with %User{is_active: true} = recipient <- User.get_cached_by_nickname(nickname),
|
||||
{:ok, %User{is_active: true} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
|
||||
true <- Utils.recipient_in_message(recipient, actor, params),
|
||||
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
|
||||
Federator.incoming_ap_doc(params)
|
||||
json(conn, "ok")
|
||||
else
|
||||
_ ->
|
||||
conn
|
||||
|> put_status(:bad_request)
|
||||
|> json("Invalid request.")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -287,10 +292,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
json(conn, "ok")
|
||||
end
|
||||
|
||||
def inbox(%{assigns: %{valid_signature: false}} = conn, _params) do
|
||||
conn
|
||||
|> put_status(:bad_request)
|
||||
|> json("Invalid HTTP Signature")
|
||||
def inbox(%{assigns: %{valid_signature: false}, req_headers: req_headers} = conn, params) do
|
||||
Federator.incoming_ap_doc(%{req_headers: req_headers, params: params})
|
||||
json(conn, "ok")
|
||||
end
|
||||
|
||||
# POST /relay/inbox -or- POST /internal/fetch/inbox
|
||||
|
|
@ -476,7 +480,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> json(message)
|
||||
|
||||
e ->
|
||||
Logger.warn(fn -> "AP C2S: #{inspect(e)}" end)
|
||||
Logger.warning(fn -> "AP C2S: #{inspect(e)}" end)
|
||||
|
||||
conn
|
||||
|> put_status(:bad_request)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
This module encodes our addressing policies and general shape of our objects.
|
||||
"""
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.User
|
||||
|
|
@ -16,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.CommonAPI.ActivityDraft
|
||||
alias Pleroma.Web.Endpoint
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
|
|
@ -54,13 +56,87 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
{:ok, data, []}
|
||||
end
|
||||
|
||||
defp unicode_emoji_react(_object, data, emoji) do
|
||||
data
|
||||
|> Map.put("content", emoji)
|
||||
|> Map.put("type", "EmojiReact")
|
||||
end
|
||||
|
||||
defp add_emoji_content(data, emoji, url) do
|
||||
tag = [
|
||||
%{
|
||||
"id" => url,
|
||||
"type" => "Emoji",
|
||||
"name" => Emoji.maybe_quote(emoji),
|
||||
"icon" => %{
|
||||
"type" => "Image",
|
||||
"url" => url
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
data
|
||||
|> Map.put("content", Emoji.maybe_quote(emoji))
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> Map.put("tag", tag)
|
||||
end
|
||||
|
||||
defp remote_custom_emoji_react(
|
||||
%{data: %{"reactions" => existing_reactions}},
|
||||
data,
|
||||
emoji
|
||||
) do
|
||||
[emoji_code, instance] = String.split(Emoji.maybe_strip_name(emoji), "@")
|
||||
|
||||
matching_reaction =
|
||||
Enum.find(
|
||||
existing_reactions,
|
||||
fn [name, _, url] ->
|
||||
if url != nil do
|
||||
url = URI.parse(url)
|
||||
url.host == instance && name == emoji_code
|
||||
end
|
||||
end
|
||||
)
|
||||
|
||||
if matching_reaction do
|
||||
[name, _, url] = matching_reaction
|
||||
add_emoji_content(data, name, url)
|
||||
else
|
||||
{:error, "Could not react"}
|
||||
end
|
||||
end
|
||||
|
||||
defp remote_custom_emoji_react(_object, _data, _emoji) do
|
||||
{:error, "Could not react"}
|
||||
end
|
||||
|
||||
defp local_custom_emoji_react(data, emoji) do
|
||||
with %{file: path} = emojo <- Emoji.get(emoji) do
|
||||
url = "#{Endpoint.url()}#{path}"
|
||||
add_emoji_content(data, emojo.code, url)
|
||||
else
|
||||
_ -> {:error, "Emoji does not exist"}
|
||||
end
|
||||
end
|
||||
|
||||
defp custom_emoji_react(object, data, emoji) do
|
||||
if String.contains?(emoji, "@") do
|
||||
remote_custom_emoji_react(object, data, emoji)
|
||||
else
|
||||
local_custom_emoji_react(data, emoji)
|
||||
end
|
||||
end
|
||||
|
||||
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
|
||||
def emoji_react(actor, object, emoji) do
|
||||
with {:ok, data, meta} <- object_action(actor, object) do
|
||||
data =
|
||||
data
|
||||
|> Map.put("content", emoji)
|
||||
|> Map.put("type", "EmojiReact")
|
||||
if Emoji.unicode?(emoji) do
|
||||
unicode_emoji_react(object, data, emoji)
|
||||
else
|
||||
custom_emoji_react(object, data, emoji)
|
||||
end
|
||||
|
||||
{:ok, data, meta}
|
||||
end
|
||||
|
|
@ -142,6 +218,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
"tag" => Keyword.values(draft.tags) |> Enum.uniq()
|
||||
}
|
||||
|> add_in_reply_to(draft.in_reply_to)
|
||||
|> add_quote(draft.quote_post)
|
||||
|> Map.merge(draft.extra)
|
||||
|
||||
{:ok, data, []}
|
||||
|
|
@ -157,6 +234,16 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
end
|
||||
end
|
||||
|
||||
defp add_quote(object, nil), do: object
|
||||
|
||||
defp add_quote(object, quote_post) do
|
||||
with %Object{} = quote_object <- Object.normalize(quote_post, fetch: false) do
|
||||
Map.put(object, "quoteUrl", quote_object.data["id"])
|
||||
else
|
||||
_ -> object
|
||||
end
|
||||
end
|
||||
|
||||
def chat_message(actor, recipient, content, opts \\ []) do
|
||||
basic = %{
|
||||
"id" => Utils.generate_object_id(),
|
||||
|
|
@ -261,7 +348,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
actor.ap_id == Relay.ap_id() ->
|
||||
[actor.follower_address]
|
||||
|
||||
public? and Visibility.is_local_public?(object) ->
|
||||
public? and Visibility.local_public?(object) ->
|
||||
[actor.follower_address, object.data["actor"], Utils.as_local_public()]
|
||||
|
||||
public? ->
|
||||
|
|
@ -289,7 +376,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
|
||||
# Address the actor of the object, and our actor's follower collection if the post is public.
|
||||
to =
|
||||
if Visibility.is_public?(object) do
|
||||
if Visibility.public?(object) do
|
||||
[actor.follower_address, object.data["actor"]]
|
||||
else
|
||||
[object.data["actor"]]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF do
|
||||
|
|
@ -54,6 +54,8 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
@required_description_keys [:key, :related_policy]
|
||||
|
||||
def filter_one(policy, message) do
|
||||
Code.ensure_loaded(policy)
|
||||
|
||||
should_plug_history? =
|
||||
if function_exported?(policy, :history_awareness, 0) do
|
||||
policy.history_awareness()
|
||||
|
|
@ -137,7 +139,16 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
|
||||
@spec subdomains_regex([String.t()]) :: [Regex.t()]
|
||||
def subdomains_regex(domains) when is_list(domains) do
|
||||
for domain <- domains, do: ~r(^#{String.replace(domain, "*.", "(.*\\.)*")}$)i
|
||||
for domain <- domains do
|
||||
try do
|
||||
target = String.replace(domain, "*.", "(.*\\.)*")
|
||||
~r<^#{target}$>i
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("MRF: Invalid subdomain Regex: #{domain}")
|
||||
reraise e, __STACKTRACE__
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@spec subdomain_match?([Regex.t()], String.t()) :: boolean()
|
||||
|
|
@ -188,6 +199,8 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
|
||||
def config_descriptions(policies) do
|
||||
Enum.reduce(policies, @mrf_config_descriptions, fn policy, acc ->
|
||||
Code.ensure_loaded(policy)
|
||||
|
||||
if function_exported?(policy, :config_description, 0) do
|
||||
description =
|
||||
@default_description
|
||||
|
|
@ -199,7 +212,7 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
if Enum.all?(@required_description_keys, &Map.has_key?(description, &1)) do
|
||||
[description | acc]
|
||||
else
|
||||
Logger.warn(
|
||||
Logger.warning(
|
||||
"#{policy} config description doesn't have one or all required keys #{inspect(@required_description_keys)}"
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -56,8 +56,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy do
|
|||
nick_score + name_score + actor_type_score
|
||||
end
|
||||
|
||||
defp determine_if_followbot(_), do: 0.0
|
||||
|
||||
defp bot_allowed?(%{"object" => target}, bot_actor) do
|
||||
%User{} = user = normalize_by_ap_id(target)
|
||||
|
||||
|
|
|
|||
281
lib/pleroma/web/activity_pub/mrf/emoji_policy.ex
Normal file
281
lib/pleroma/web/activity_pub/mrf/emoji_policy.ex
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.EmojiPolicy do
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object.Updater
|
||||
alias Pleroma.Web.ActivityPub.MRF.Utils
|
||||
|
||||
@moduledoc "Reject or force-unlisted emojis with certain URLs or names"
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
defp config_remove_url do
|
||||
Pleroma.Config.get([:mrf_emoji, :remove_url], [])
|
||||
end
|
||||
|
||||
defp config_remove_shortcode do
|
||||
Pleroma.Config.get([:mrf_emoji, :remove_shortcode], [])
|
||||
end
|
||||
|
||||
defp config_unlist_url do
|
||||
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_url], [])
|
||||
end
|
||||
|
||||
defp config_unlist_shortcode do
|
||||
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_shortcode], [])
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def history_awareness, do: :manual
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def filter(%{"type" => type, "object" => %{"type" => objtype} = object} = message)
|
||||
when type in ["Create", "Update"] and objtype in Pleroma.Constants.status_object_types() do
|
||||
with {:ok, object} <-
|
||||
Updater.do_with_history(object, fn object ->
|
||||
{:ok, process_remove(object, :url, config_remove_url())}
|
||||
end),
|
||||
{:ok, object} <-
|
||||
Updater.do_with_history(object, fn object ->
|
||||
{:ok, process_remove(object, :shortcode, config_remove_shortcode())}
|
||||
end),
|
||||
activity <- Map.put(message, "object", object),
|
||||
activity <- maybe_delist(activity) do
|
||||
{:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def filter(%{"type" => type} = object) when type in Pleroma.Constants.actor_types() do
|
||||
with object <- process_remove(object, :url, config_remove_url()),
|
||||
object <- process_remove(object, :shortcode, config_remove_shortcode()) do
|
||||
{:ok, object}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def filter(%{"type" => "EmojiReact"} = object) do
|
||||
with {:ok, _} <-
|
||||
matched_emoji_checker(config_remove_url(), config_remove_shortcode()).(object) do
|
||||
{:ok, object}
|
||||
else
|
||||
_ ->
|
||||
{:reject, "[EmojiPolicy] Rejected for having disallowed emoji"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def filter(message) do
|
||||
{:ok, message}
|
||||
end
|
||||
|
||||
defp match_string?(string, pattern) when is_binary(pattern) do
|
||||
string == pattern
|
||||
end
|
||||
|
||||
defp match_string?(string, %Regex{} = pattern) do
|
||||
String.match?(string, pattern)
|
||||
end
|
||||
|
||||
defp match_any?(string, patterns) do
|
||||
Enum.any?(patterns, &match_string?(string, &1))
|
||||
end
|
||||
|
||||
defp url_from_tag(%{"icon" => %{"url" => url}}), do: url
|
||||
defp url_from_tag(_), do: nil
|
||||
|
||||
defp url_from_emoji({_name, url}), do: url
|
||||
|
||||
defp shortcode_from_tag(%{"name" => name}) when is_binary(name), do: String.trim(name, ":")
|
||||
defp shortcode_from_tag(_), do: nil
|
||||
|
||||
defp shortcode_from_emoji({name, _url}), do: name
|
||||
|
||||
defp process_remove(object, :url, patterns) do
|
||||
process_remove_impl(object, &url_from_tag/1, &url_from_emoji/1, patterns)
|
||||
end
|
||||
|
||||
defp process_remove(object, :shortcode, patterns) do
|
||||
process_remove_impl(object, &shortcode_from_tag/1, &shortcode_from_emoji/1, patterns)
|
||||
end
|
||||
|
||||
defp process_remove_impl(object, extract_from_tag, extract_from_emoji, patterns) do
|
||||
object =
|
||||
if object["tag"] do
|
||||
Map.put(
|
||||
object,
|
||||
"tag",
|
||||
Enum.filter(
|
||||
object["tag"],
|
||||
fn
|
||||
%{"type" => "Emoji"} = tag ->
|
||||
str = extract_from_tag.(tag)
|
||||
|
||||
if is_binary(str) do
|
||||
not match_any?(str, patterns)
|
||||
else
|
||||
true
|
||||
end
|
||||
|
||||
_ ->
|
||||
true
|
||||
end
|
||||
)
|
||||
)
|
||||
else
|
||||
object
|
||||
end
|
||||
|
||||
object =
|
||||
if object["emoji"] do
|
||||
Map.put(
|
||||
object,
|
||||
"emoji",
|
||||
object["emoji"]
|
||||
|> Enum.reduce(%{}, fn {name, url} = emoji, acc ->
|
||||
if not match_any?(extract_from_emoji.(emoji), patterns) do
|
||||
Map.put(acc, name, url)
|
||||
else
|
||||
acc
|
||||
end
|
||||
end)
|
||||
)
|
||||
else
|
||||
object
|
||||
end
|
||||
|
||||
object
|
||||
end
|
||||
|
||||
defp matched_emoji_checker(urls, shortcodes) do
|
||||
fn object ->
|
||||
if any_emoji_match?(object, &url_from_tag/1, &url_from_emoji/1, urls) or
|
||||
any_emoji_match?(
|
||||
object,
|
||||
&shortcode_from_tag/1,
|
||||
&shortcode_from_emoji/1,
|
||||
shortcodes
|
||||
) do
|
||||
{:matched, nil}
|
||||
else
|
||||
{:ok, %{}}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_delist(%{"object" => object, "to" => to, "type" => "Create"} = activity) do
|
||||
check = matched_emoji_checker(config_unlist_url(), config_unlist_shortcode())
|
||||
|
||||
should_delist? = fn object ->
|
||||
with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check) do
|
||||
false
|
||||
else
|
||||
_ -> true
|
||||
end
|
||||
end
|
||||
|
||||
if Pleroma.Constants.as_public() in to and should_delist?.(object) do
|
||||
to = List.delete(to, Pleroma.Constants.as_public())
|
||||
cc = [Pleroma.Constants.as_public() | activity["cc"] || []]
|
||||
|
||||
activity
|
||||
|> Map.put("to", to)
|
||||
|> Map.put("cc", cc)
|
||||
else
|
||||
activity
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_delist(activity), do: activity
|
||||
|
||||
defp any_emoji_match?(object, extract_from_tag, extract_from_emoji, patterns) do
|
||||
Kernel.||(
|
||||
Enum.any?(
|
||||
object["tag"] || [],
|
||||
fn
|
||||
%{"type" => "Emoji"} = tag ->
|
||||
str = extract_from_tag.(tag)
|
||||
|
||||
if is_binary(str) do
|
||||
match_any?(str, patterns)
|
||||
else
|
||||
false
|
||||
end
|
||||
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
),
|
||||
(object["emoji"] || [])
|
||||
|> Enum.any?(fn emoji -> match_any?(extract_from_emoji.(emoji), patterns) end)
|
||||
)
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def describe do
|
||||
mrf_emoji =
|
||||
Pleroma.Config.get(:mrf_emoji, [])
|
||||
|> Enum.map(fn {key, value} ->
|
||||
{key, Enum.map(value, &Utils.describe_regex_or_string/1)}
|
||||
end)
|
||||
|> Enum.into(%{})
|
||||
|
||||
{:ok, %{mrf_emoji: mrf_emoji}}
|
||||
end
|
||||
|
||||
@impl Pleroma.Web.ActivityPub.MRF.Policy
|
||||
def config_description do
|
||||
%{
|
||||
key: :mrf_emoji,
|
||||
related_policy: "Pleroma.Web.ActivityPub.MRF.EmojiPolicy",
|
||||
label: "MRF Emoji",
|
||||
description:
|
||||
"Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html).",
|
||||
children: [
|
||||
%{
|
||||
key: :remove_url,
|
||||
type: {:list, :string},
|
||||
description: """
|
||||
A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
|
||||
|
||||
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
|
||||
""",
|
||||
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
|
||||
},
|
||||
%{
|
||||
key: :remove_shortcode,
|
||||
type: {:list, :string},
|
||||
description: """
|
||||
A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
|
||||
|
||||
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
|
||||
""",
|
||||
suggestions: ["foo", ~r/foo/iu]
|
||||
},
|
||||
%{
|
||||
key: :federated_timeline_removal_url,
|
||||
type: {:list, :string},
|
||||
description: """
|
||||
A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
|
||||
|
||||
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
|
||||
""",
|
||||
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
|
||||
},
|
||||
%{
|
||||
key: :federated_timeline_removal_shortcode,
|
||||
type: {:list, :string},
|
||||
description: """
|
||||
A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
|
||||
|
||||
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
|
||||
""",
|
||||
suggestions: ["foo", ~r/foo/iu]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
end
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue