Merge remote-tracking branch 'origin/develop' into instance_rules

Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
This commit is contained in:
marcin mikołajczak 2023-12-22 14:34:30 +01:00
commit 6051715a99
1114 changed files with 53017 additions and 6853 deletions

View file

@ -1,113 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Benchmark do
import Mix.Pleroma
use Mix.Task
def run(["search"]) do
start_pleroma()
Benchee.run(%{
"search" => fn ->
Pleroma.Activity.search(nil, "cofe")
end
})
end
def run(["tag"]) do
start_pleroma()
Benchee.run(%{
"tag" => fn ->
%{"type" => "Create", "tag" => "cofe"}
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
end
})
end
def run(["render_timeline", nickname | _] = args) do
start_pleroma()
user = Pleroma.User.get_by_nickname(nickname)
activities =
%{}
|> Map.put("type", ["Create", "Announce"])
|> Map.put("blocking_user", user)
|> Map.put("muting_user", user)
|> Map.put("user", user)
|> Map.put("limit", 4096)
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|> Enum.reverse()
inputs = %{
"1 activity" => Enum.take_random(activities, 1),
"10 activities" => Enum.take_random(activities, 10),
"20 activities" => Enum.take_random(activities, 20),
"40 activities" => Enum.take_random(activities, 40),
"80 activities" => Enum.take_random(activities, 80)
}
inputs =
if Enum.at(args, 2) == "extended" do
Map.merge(inputs, %{
"200 activities" => Enum.take_random(activities, 200),
"500 activities" => Enum.take_random(activities, 500),
"2000 activities" => Enum.take_random(activities, 2000),
"4096 activities" => Enum.take_random(activities, 4096)
})
else
inputs
end
Benchee.run(
%{
"Standart rendering" => fn activities ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: activities,
for: user,
as: :activity
})
end
},
inputs: inputs
)
end
def run(["adapters"]) do
start_pleroma()
:ok =
Pleroma.Gun.Conn.open(
"https://httpbin.org/stream-bytes/1500",
:gun_connections
)
Process.sleep(1_500)
Benchee.run(
%{
"Without conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
pool: :no_pool,
receive_conn: false
)
end,
"Without conn and with pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
end,
"With reused conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
end,
"With reused conn and with pool" => fn ->
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
end
},
parallel: 10
)
end
end

View file

@ -304,13 +304,8 @@ defmodule Mix.Tasks.Pleroma.Config do
System.cmd("mix", ["format", path])
end
if Code.ensure_loaded?(Config.Reader) do
defp config_header, do: "import Config\r\n\r\n"
defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
else
defp config_header, do: "use Mix.Config\r\n\r\n"
defp read_file(config_file), do: Mix.Config.eval!(config_file)
end
defp config_header, do: "import Config\r\n\r\n"
defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
defp write_and_delete(config, file, delete?) do
config

View file

@ -154,9 +154,8 @@ defmodule Mix.Tasks.Pleroma.Database do
|> join(:inner, [a], o in Object,
on:
fragment(
"(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
"(?->>'id') = associated_object_id((?))",
o.data,
a.data,
a.data
)
)

View file

@ -34,7 +34,8 @@ defmodule Mix.Tasks.Pleroma.Instance do
static_dir: :string,
listen_ip: :string,
listen_port: :string,
strip_uploads: :string,
strip_uploads_location: :string,
read_uploads_description: :string,
anonymize_uploads: :string,
dedupe_uploads: :string
],
@ -161,7 +162,7 @@ defmodule Mix.Tasks.Pleroma.Instance do
)
|> Path.expand()
{strip_uploads_message, strip_uploads_default} =
{strip_uploads_location_message, strip_uploads_location_default} =
if Pleroma.Utils.command_available?("exiftool") do
{"Do you want to strip location (GPS) data from uploaded images? This requires exiftool, it was detected as installed. (y/n)",
"y"}
@ -170,12 +171,29 @@ defmodule Mix.Tasks.Pleroma.Instance do
"n"}
end
strip_uploads =
strip_uploads_location =
get_option(
options,
:strip_uploads,
strip_uploads_message,
strip_uploads_default
:strip_uploads_location,
strip_uploads_location_message,
strip_uploads_location_default
) === "y"
{read_uploads_description_message, read_uploads_description_default} =
if Pleroma.Utils.command_available?("exiftool") do
{"Do you want to read data from uploaded files so clients can use it to prefill fields like image description? This requires exiftool, it was detected as installed. (y/n)",
"y"}
else
{"Do you want to read data from uploaded files so clients can use it to prefill fields like image description? This requires exiftool, it was detected as not installed, please install it if you answer yes. (y/n)",
"n"}
end
read_uploads_description =
get_option(
options,
:read_uploads_description,
read_uploads_description_message,
read_uploads_description_default
) === "y"
anonymize_uploads =
@ -229,7 +247,8 @@ defmodule Mix.Tasks.Pleroma.Instance do
listen_port: listen_port,
upload_filters:
upload_filters(%{
strip: strip_uploads,
strip_location: strip_uploads_location,
read_description: read_uploads_description,
anonymize: anonymize_uploads,
dedupe: dedupe_uploads
})
@ -247,12 +266,20 @@ defmodule Mix.Tasks.Pleroma.Instance do
config_dir = Path.dirname(config_path)
psql_dir = Path.dirname(psql_path)
# Note: Distros requiring group read (0o750) on those directories should
# pre-create the directories.
[config_dir, psql_dir, static_dir, uploads_dir]
|> Enum.reject(&File.exists?/1)
|> Enum.map(&File.mkdir_p!/1)
|> Enum.each(fn dir ->
File.mkdir_p!(dir)
File.chmod!(dir, 0o700)
end)
shell_info("Writing config to #{config_path}.")
# Sadly no fchmod(2) equivalent in Elixir…
File.touch!(config_path)
File.chmod!(config_path, 0o640)
File.write(config_path, result_config)
shell_info("Writing the postgres script to #{psql_path}.")
File.write(psql_path, result_psql)
@ -271,8 +298,7 @@ defmodule Mix.Tasks.Pleroma.Instance do
else
shell_error(
"The task would have overwritten the following files:\n" <>
(Enum.map(will_overwrite, &"- #{&1}\n") |> Enum.join("")) <>
"Rerun with `--force` to overwrite them."
Enum.map_join(will_overwrite, &"- #{&1}\n") <> "Rerun with `--force` to overwrite them."
)
end
end
@ -297,12 +323,19 @@ defmodule Mix.Tasks.Pleroma.Instance do
defp upload_filters(filters) when is_map(filters) do
enabled_filters =
if filters.strip do
[Pleroma.Upload.Filter.Exiftool]
if filters.strip_location do
[Pleroma.Upload.Filter.Exiftool.StripLocation]
else
[]
end
enabled_filters =
if filters.read_description do
enabled_filters ++ [Pleroma.Upload.Filter.Exiftool.ReadDescription]
else
enabled_filters
end
enabled_filters =
if filters.anonymize do
enabled_filters ++ [Pleroma.Upload.Filter.AnonymizeFilename]

View file

@ -6,7 +6,70 @@ defmodule Mix.Tasks.Pleroma.OpenapiSpec do
def run([path]) do
# Load Pleroma application to get version info
Application.load(:pleroma)
spec = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
File.write(path, spec)
spec_json = Pleroma.Web.ApiSpec.spec(server_specific: false) |> Jason.encode!()
# to get rid of the structs
spec_regened = spec_json |> Jason.decode!()
check_specs!(spec_regened)
File.write(path, spec_json)
end
defp check_specs!(spec) do
with :ok <- check_specs(spec) do
:ok
else
{_, errors} ->
IO.puts(IO.ANSI.format([:red, :bright, "Spec check failed, errors:"]))
Enum.map(errors, &IO.puts/1)
raise "Spec check failed"
end
end
def check_specs(spec) do
errors =
spec["paths"]
|> Enum.flat_map(fn {path, %{} = endpoints} ->
Enum.map(
endpoints,
fn {method, endpoint} ->
with :ok <- check_endpoint(spec, endpoint) do
:ok
else
error ->
"#{endpoint["operationId"]} (#{method} #{path}): #{error}"
end
end
)
|> Enum.reject(fn res -> res == :ok end)
end)
if errors == [] do
:ok
else
{:error, errors}
end
end
defp check_endpoint(spec, endpoint) do
valid_tags = available_tags(spec)
with {_, [_ | _] = tags} <- {:tags, endpoint["tags"]},
{_, []} <- {:unavailable, Enum.reject(tags, &(&1 in valid_tags))} do
:ok
else
{:tags, _} ->
"No tags specified"
{:unavailable, tags} ->
"Tags #{inspect(tags)} not available. Please add it in \"x-tagGroups\" in Pleroma.Web.ApiSpec"
end
end
defp available_tags(spec) do
spec["x-tagGroups"]
|> Enum.flat_map(fn %{"tags" => tags} -> tags end)
end
end

View file

@ -0,0 +1,145 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
require Pleroma.Constants
import Mix.Pleroma
import Ecto.Query
import Pleroma.Search.Meilisearch,
only: [meili_post: 2, meili_put: 2, meili_get: 1, meili_delete: 1]
def run(["index"]) do
start_pleroma()
Pleroma.HTML.compile_scrubbers()
meili_version =
(
{:ok, result} = meili_get("/version")
result["pkgVersion"]
)
# The ranking rule syntax was changed but nothing about that is mentioned in the changelog
if not Version.match?(meili_version, ">= 0.25.0") do
raise "Meilisearch <0.24.0 not supported"
end
{:ok, _} =
meili_post(
"/indexes/objects/settings/ranking-rules",
[
"published:desc",
"words",
"exactness",
"proximity",
"typo",
"attribute",
"sort"
]
)
{:ok, _} =
meili_post(
"/indexes/objects/settings/searchable-attributes",
[
"content"
]
)
IO.puts("Created indices. Starting to insert posts.")
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
Pleroma.Repo.transaction(
fn ->
query =
from(Pleroma.Object,
# Only index public and unlisted posts which are notes and have some text
where:
fragment("data->>'type' = 'Note'") and
(fragment("data->'to' \\? ?", ^Pleroma.Constants.as_public()) or
fragment("data->'cc' \\? ?", ^Pleroma.Constants.as_public())),
order_by: [desc: fragment("data->'published'")]
)
count = query |> Pleroma.Repo.aggregate(:count, :data)
IO.puts("Entries to index: #{count}")
Pleroma.Repo.stream(
query,
timeout: :infinity
)
|> Stream.map(&Pleroma.Search.Meilisearch.object_to_search_data/1)
|> Stream.filter(fn o -> not is_nil(o) end)
|> Stream.chunk_every(chunk_size)
|> Stream.transform(0, fn objects, acc ->
new_acc = acc + Enum.count(objects)
# Reset to the beginning of the line and rewrite it
IO.write("\r")
IO.write("Indexed #{new_acc} entries")
{[objects], new_acc}
end)
|> Stream.each(fn objects ->
result =
meili_put(
"/indexes/objects/documents",
objects
)
with {:ok, res} <- result do
if not Map.has_key?(res, "uid") do
IO.puts("\nFailed to index: #{inspect(result)}")
end
else
e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
end
end)
|> Stream.run()
end,
timeout: :infinity
)
IO.write("\n")
end
def run(["clear"]) do
start_pleroma()
meili_delete("/indexes/objects/documents")
end
def run(["show-keys", master_key]) do
start_pleroma()
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
{:ok, result} =
Pleroma.HTTP.get(
Path.join(endpoint, "/keys"),
[{"Authorization", "Bearer #{master_key}"}]
)
decoded = Jason.decode!(result.body)
if decoded["results"] do
Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
IO.puts("#{desc}: #{key}")
end)
else
IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
end
end
def run(["stats"]) do
start_pleroma()
{:ok, result} = meili_get("/indexes/objects/stats")
IO.puts("Number of entries: #{result["numberOfDocuments"]}")
IO.puts("Indexing? #{result["isIndexing"]}")
end
end

View file

@ -112,9 +112,10 @@ defmodule Mix.Tasks.Pleroma.User do
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
shell_info("Generated password reset token for #{user.nickname}")
IO.puts("URL: #{Pleroma.Web.Router.Helpers.reset_password_url(Pleroma.Web.Endpoint,
:reset,
token.token)}")
url =
Pleroma.Web.Router.Helpers.reset_password_url(Pleroma.Web.Endpoint, :reset, token.token)
IO.puts("URL: #{url}")
else
_ ->
shell_error("No local user #{nickname}")
@ -421,6 +422,38 @@ defmodule Mix.Tasks.Pleroma.User do
|> Stream.run()
end
def run(["fix_follow_state", local_user, remote_user]) do
start_pleroma()
with {:local, %User{} = local} <- {:local, User.get_by_nickname(local_user)},
{:remote, %User{} = remote} <- {:remote, User.get_by_nickname(remote_user)},
{:follow_data, %{data: %{"state" => request_state}}} <-
{:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
calculated_state = User.following?(local, remote)
shell_info(
"Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
)
if calculated_state == false && request_state == "accept" do
shell_info("Discrepancy found, fixing")
Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
shell_info("Relationship fixed")
else
shell_info("No discrepancy found")
end
else
{:local, _} ->
shell_error("No local user #{local_user}")
{:remote, _} ->
shell_error("No remote user #{remote_user}")
{:follow_data, _} ->
shell_error("No follow data for #{local_user} and #{remote_user}")
end
end
defp set_moderator(user, value) do
{:ok, user} =
user

View file

@ -26,7 +26,6 @@ defmodule Phoenix.Transports.WebSocket.Raw do
conn
|> fetch_query_params
|> Transport.transport_log(opts[:transport_log])
|> Transport.force_ssl(handler, endpoint, opts)
|> Transport.check_origin(handler, endpoint, opts)
case conn do

View file

@ -53,7 +53,7 @@ defmodule Pleroma.Activity do
#
# ```
# |> join(:inner, [activity], o in Object,
# on: fragment("(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
# on: fragment("(?->>'id') = associated_object_id((?))",
# o.data, activity.data, activity.data))
# |> preload([activity, object], [object: object])
# ```
@ -69,9 +69,8 @@ defmodule Pleroma.Activity do
join(query, join_type, [activity], o in Object,
on:
fragment(
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
"(?->>'id') = associated_object_id(?)",
o.data,
activity.data,
activity.data
),
as: :object
@ -362,12 +361,14 @@ defmodule Pleroma.Activity do
end
def restrict_deactivated_users(query) do
deactivated_users_query = from(u in User.Query.build(%{deactivated: true}), select: u.ap_id)
from(activity in query, where: activity.actor not in subquery(deactivated_users_query))
query
|> join(:inner, [activity], user in User,
as: :user,
on: activity.actor == user.ap_id and user.is_active == true
)
end
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
defdelegate search(user, query, options \\ []), to: Pleroma.Search.DatabaseSearch
def direct_conversation_id(activity, for_user) do
alias Pleroma.Conversation.Participation

View file

@ -8,6 +8,40 @@ defmodule Pleroma.Activity.HTML do
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
# We store a list of cache keys related to an activity in a
# separate cache, scrubber_management_cache. It has the same
# size as scrubber_cache (see application.ex). Every time we add
# a cache to scrubber_cache, we update scrubber_management_cache.
#
# The most recent write of a certain key in the management cache
# is the same as the most recent write of any record related to that
# key in the main cache.
# Assuming LRW ( https://hexdocs.pm/cachex/Cachex.Policy.LRW.html ),
# this means when the management cache is evicted by cachex, all
# related records in the main cache will also have been evicted.
defp get_cache_keys_for(activity_id) do
with {:ok, list} when is_list(list) <- @cachex.get(:scrubber_management_cache, activity_id) do
list
else
_ -> []
end
end
defp add_cache_key_for(activity_id, additional_key) do
current = get_cache_keys_for(activity_id)
unless additional_key in current do
@cachex.put(:scrubber_management_cache, activity_id, [additional_key | current])
end
end
def invalidate_cache_for(activity_id) do
keys = get_cache_keys_for(activity_id)
Enum.map(keys, &@cachex.del(:scrubber_cache, &1))
@cachex.del(:scrubber_management_cache, activity_id)
end
def get_cached_scrubbed_html_for_activity(
content,
scrubbers,
@ -19,6 +53,8 @@ defmodule Pleroma.Activity.HTML do
@cachex.fetch!(:scrubber_cache, key, fn _key ->
object = Object.normalize(activity, fetch: false)
add_cache_key_for(activity.id, key)
HTML.ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
end)
end

View file

@ -13,6 +13,14 @@ defmodule Pleroma.Activity.Ir.Topics do
|> List.flatten()
end
defp generate_topics(%{data: %{"type" => "ChatMessage"}}, %{data: %{"type" => "Delete"}}) do
["user", "user:pleroma_chat"]
end
defp generate_topics(%{data: %{"type" => "ChatMessage"}}, %{data: %{"type" => "Create"}}) do
[]
end
defp generate_topics(%{data: %{"type" => "Answer"}}, _) do
[]
end
@ -21,7 +29,7 @@ defmodule Pleroma.Activity.Ir.Topics do
["user", "list"] ++ visibility_tags(object, activity)
end
defp visibility_tags(object, activity) do
defp visibility_tags(object, %{data: %{"type" => type}} = activity) when type != "Announce" do
case Visibility.get_visibility(activity) do
"public" ->
if activity.local do
@ -31,6 +39,10 @@ defmodule Pleroma.Activity.Ir.Topics do
end
|> item_creation_tags(object, activity)
"local" ->
["public:local"]
|> item_creation_tags(object, activity)
"direct" ->
["direct"]
@ -39,6 +51,10 @@ defmodule Pleroma.Activity.Ir.Topics do
end
end
defp visibility_tags(_object, _activity) do
[]
end
defp item_creation_tags(tags, object, %{data: %{"type" => "Create"}} = activity) do
tags ++
remote_topics(activity) ++ hashtags_to_topics(object) ++ attachment_topics(object, activity)
@ -63,7 +79,18 @@ defmodule Pleroma.Activity.Ir.Topics do
defp attachment_topics(%{data: %{"attachment" => []}}, _act), do: []
defp attachment_topics(_object, %{local: true}), do: ["public:media", "public:local:media"]
defp attachment_topics(_object, %{local: true} = activity) do
case Visibility.get_visibility(activity) do
"public" ->
["public:media", "public:local:media"]
"local" ->
["public:local:media"]
_ ->
[]
end
end
defp attachment_topics(_object, %{actor: actor}) when is_binary(actor),
do: ["public:media", "public:remote:media:" <> URI.parse(actor).host]

View file

@ -52,8 +52,7 @@ defmodule Pleroma.Activity.Queries do
activity in query,
where:
fragment(
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
activity.data,
"associated_object_id((?)) = ANY(?)",
activity.data,
^object_ids
)
@ -64,8 +63,7 @@ defmodule Pleroma.Activity.Queries do
from(activity in query,
where:
fragment(
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
activity.data,
"associated_object_id((?)) = ?",
activity.data,
^object_id
)

160
lib/pleroma/announcement.ex Normal file
View file

@ -0,0 +1,160 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Announcement do
use Ecto.Schema
import Ecto.Changeset, only: [cast: 3, validate_required: 2]
import Ecto.Query
alias Pleroma.AnnouncementReadRelationship
alias Pleroma.Repo
@type t :: %__MODULE__{}
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
schema "announcements" do
field(:data, :map)
field(:starts_at, :utc_datetime)
field(:ends_at, :utc_datetime)
field(:rendered, :map)
timestamps(type: :utc_datetime)
end
def change(struct, params \\ %{}) do
struct
|> cast(validate_params(struct, params), [:data, :starts_at, :ends_at, :rendered])
|> validate_required([:data])
end
defp validate_params(struct, params) do
base_data =
%{
"content" => "",
"all_day" => false
}
|> Map.merge((struct && struct.data) || %{})
merged_data =
Map.merge(base_data, params.data)
|> Map.take(["content", "all_day"])
params
|> Map.merge(%{data: merged_data})
|> add_rendered_properties()
end
def add_rendered_properties(params) do
{content_html, _, _} =
Pleroma.Web.CommonAPI.Utils.format_input(params.data["content"], "text/plain",
mentions_format: :full
)
rendered = %{
"content" => content_html
}
params
|> Map.put(:rendered, rendered)
end
def add(params) do
changeset = change(%__MODULE__{}, params)
Repo.insert(changeset)
end
def update(announcement, params) do
changeset = change(announcement, params)
Repo.update(changeset)
end
def list_all do
__MODULE__
|> Repo.all()
end
def list_paginated(%{limit: limited_number, offset: offset_number}) do
__MODULE__
|> limit(^limited_number)
|> offset(^offset_number)
|> Repo.all()
end
def get_by_id(id) do
Repo.get_by(__MODULE__, id: id)
end
def delete_by_id(id) do
with announcement when not is_nil(announcement) <- get_by_id(id),
{:ok, _} <- Repo.delete(announcement) do
:ok
else
_ ->
:error
end
end
def read_by?(announcement, user) do
AnnouncementReadRelationship.exists?(user, announcement)
end
def mark_read_by(announcement, user) do
AnnouncementReadRelationship.mark_read(user, announcement)
end
def render_json(announcement, opts \\ []) do
extra_params =
case Keyword.fetch(opts, :for) do
{:ok, user} when not is_nil(user) ->
%{read: read_by?(announcement, user)}
_ ->
%{}
end
admin_extra_params =
case Keyword.fetch(opts, :admin) do
{:ok, true} ->
%{pleroma: %{raw_content: announcement.data["content"]}}
_ ->
%{}
end
base = %{
id: announcement.id,
content: announcement.rendered["content"],
starts_at: announcement.starts_at,
ends_at: announcement.ends_at,
all_day: announcement.data["all_day"],
published_at: announcement.inserted_at,
updated_at: announcement.updated_at,
mentions: [],
statuses: [],
tags: [],
emojis: [],
reactions: []
}
base
|> Map.merge(extra_params)
|> Map.merge(admin_extra_params)
end
# "visible" means:
# starts_at < time < ends_at
def list_all_visible_when(time) do
__MODULE__
|> where([a], is_nil(a.starts_at) or a.starts_at < ^time)
|> where([a], is_nil(a.ends_at) or a.ends_at > ^time)
|> Repo.all()
end
def list_all_visible do
list_all_visible_when(DateTime.now("Etc/UTC") |> elem(1))
end
end

View file

@ -0,0 +1,55 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.AnnouncementReadRelationship do
use Ecto.Schema
import Ecto.Changeset
alias FlakeId.Ecto.CompatType
alias Pleroma.Announcement
alias Pleroma.Repo
alias Pleroma.User
@type t :: %__MODULE__{}
schema "announcement_read_relationships" do
belongs_to(:user, User, type: CompatType)
belongs_to(:announcement, Announcement, type: CompatType)
timestamps(updated_at: false)
end
def mark_read(user, announcement) do
%__MODULE__{}
|> cast(%{user_id: user.id, announcement_id: announcement.id}, [:user_id, :announcement_id])
|> validate_required([:user_id, :announcement_id])
|> foreign_key_constraint(:user_id)
|> foreign_key_constraint(:announcement_id)
|> unique_constraint([:user_id, :announcement_id])
|> Repo.insert()
end
def mark_unread(user, announcement) do
with relationship <- get(user, announcement),
{:exists, true} <- {:exists, not is_nil(relationship)},
{:ok, _} <- Repo.delete(relationship) do
:ok
else
{:exists, false} ->
:ok
_ ->
:error
end
end
def get(user, announcement) do
Repo.get_by(__MODULE__, user_id: user.id, announcement_id: announcement.id)
end
def exists?(user, announcement) do
not is_nil(get(user, announcement))
end
end

View file

@ -54,7 +54,6 @@ defmodule Pleroma.Application do
Config.DeprecationWarnings.warn()
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.ApplicationRequirements.verify!()
setup_instrumenters()
load_custom_modules()
Pleroma.Docs.JSON.compile()
limiters_setup()
@ -91,10 +90,12 @@ defmodule Pleroma.Application do
# Define workers and child supervisors to be supervised
children =
[
Pleroma.PromEx,
Pleroma.Repo,
Config.TransferTask,
Pleroma.Emoji,
Pleroma.Web.Plugs.RateLimiter.Supervisor
Pleroma.Web.Plugs.RateLimiter.Supervisor,
{Task.Supervisor, name: Pleroma.TaskSupervisor}
] ++
cachex_children() ++
http_children(adapter, @mix_env) ++
@ -112,7 +113,17 @@ defmodule Pleroma.Application do
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
# If we have a lot of caches, default max_restarts can cause test
# resets to fail.
# Go for the default 3 unless we're in test
max_restarts =
if @mix_env == :test do
100
else
3
end
opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
result = Supervisor.start_link(children, opts)
set_postgres_server_version()
@ -127,7 +138,7 @@ defmodule Pleroma.Application do
num
else
e ->
Logger.warn(
Logger.warning(
"Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
)
@ -159,29 +170,6 @@ defmodule Pleroma.Application do
end
end
defp setup_instrumenters do
require Prometheus.Registry
if Application.get_env(:prometheus, Pleroma.Repo.Instrumenter) do
:ok =
:telemetry.attach(
"prometheus-ecto",
[:pleroma, :repo, :query],
&Pleroma.Repo.Instrumenter.handle_event/4,
%{}
)
Pleroma.Repo.Instrumenter.setup()
end
Pleroma.Web.Endpoint.MetricsExporter.setup()
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
# Note: disabled until prometheus-phx is integrated into prometheus-phoenix:
# Pleroma.Web.Endpoint.Instrumenter.setup()
PrometheusPhx.setup()
end
defp cachex_children do
[
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
@ -189,6 +177,7 @@ defmodule Pleroma.Application do
build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000),
build_cachex("scrubber", limit: 2500),
build_cachex("scrubber_management", limit: 2500),
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
@ -197,7 +186,8 @@ defmodule Pleroma.Application do
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000
)
),
build_cachex("rel_me", limit: 2500)
]
end
@ -238,7 +228,8 @@ defmodule Pleroma.Application do
defp background_migrators do
[
Pleroma.Migrators.HashtagsTableMigrator
Pleroma.Migrators.HashtagsTableMigrator,
Pleroma.Migrators.ContextObjectsDeletionMigrator
]
end
@ -308,7 +299,11 @@ defmodule Pleroma.Application do
def limiters_setup do
config = Config.get(ConcurrentLimiter, [])
[Pleroma.Web.RichMedia.Helpers, Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy]
[
Pleroma.Web.RichMedia.Helpers,
Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy,
Pleroma.Search
]
|> Enum.each(fn module ->
mod_config = Keyword.get(config, module, [])

View file

@ -34,7 +34,7 @@ defmodule Pleroma.ApplicationRequirements do
defp check_welcome_message_config!(:ok) do
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
not Pleroma.Emails.Mailer.enabled?() do
Logger.warn("""
Logger.warning("""
To send welcome emails, you need to enable the mailer.
Welcome emails will NOT be sent with the current config.
@ -53,7 +53,7 @@ defmodule Pleroma.ApplicationRequirements do
def check_confirmation_accounts!(:ok) do
if Pleroma.Config.get([:instance, :account_activation_required]) &&
not Pleroma.Emails.Mailer.enabled?() do
Logger.warn("""
Logger.warning("""
Account activation is required, but the mailer is disabled.
Users will NOT be able to confirm their accounts with this config.
Either disable account activation or enable the mailer.
@ -164,11 +164,10 @@ defmodule Pleroma.ApplicationRequirements do
defp check_system_commands!(:ok) do
filter_commands_statuses = [
check_filter(Pleroma.Upload.Filter.Exiftool, "exiftool"),
check_filter(Pleroma.Upload.Filter.Exiftool.StripLocation, "exiftool"),
check_filter(Pleroma.Upload.Filter.Exiftool.ReadDescription, "exiftool"),
check_filter(Pleroma.Upload.Filter.Mogrify, "mogrify"),
check_filter(Pleroma.Upload.Filter.Mogrifun, "mogrify"),
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "mogrify"),
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "convert"),
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "ffprobe")
]

View file

@ -1,20 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Authenticator do
use Sshd.PasswordAuthenticator
alias Pleroma.User
alias Pleroma.Web.Plugs.AuthenticationPlug
def authenticate(username, password) do
username = to_string(username)
password = to_string(password)
with %User{} = user <- User.get_by_nickname(username) do
AuthenticationPlug.checkpw(password, user.password_hash)
else
_e -> false
end
end
end

View file

@ -1,149 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Handler do
use Sshd.ShellHandler
alias Pleroma.Activity
alias Pleroma.HTML
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.CommonAPI
def on_shell(username, _pubkey, _ip, _port) do
:ok = IO.puts("Welcome to #{Pleroma.Config.get([:instance, :name])}!")
user = Pleroma.User.get_cached_by_nickname(to_string(username))
Logger.debug("#{inspect(user)}")
loop(run_state(user: user))
end
def on_connect(username, ip, port, method) do
Logger.debug(fn ->
"""
Incoming SSH shell #{inspect(self())} requested for #{username} from #{inspect(ip)}:#{inspect(port)} using #{inspect(method)}
"""
end)
end
def on_disconnect(username, ip, port) do
Logger.debug(fn ->
"Disconnecting SSH shell for #{username} from #{inspect(ip)}:#{inspect(port)}"
end)
end
defp loop(state) do
self_pid = self()
counter = state.counter
prefix = state.prefix
user = state.user
input = spawn(fn -> io_get(self_pid, prefix, counter, user.nickname) end)
wait_input(state, input)
end
def puts_activity(activity) do
status = Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{activity: activity})
IO.puts("-- #{status.id} by #{status.account.display_name} (#{status.account.acct})")
IO.puts(HTML.strip_tags(status.content))
IO.puts("")
end
def handle_command(state, "help") do
IO.puts("Available commands:")
IO.puts("help - This help")
IO.puts("home - Show the home timeline")
IO.puts("p <text> - Post the given text")
IO.puts("r <id> <text> - Reply to the post with the given id")
IO.puts("quit - Quit")
state
end
def handle_command(%{user: user} = state, "r " <> text) do
text = String.trim(text)
[activity_id, rest] = String.split(text, " ", parts: 2)
with %Activity{} <- Activity.get_by_id(activity_id),
{:ok, _activity} <-
CommonAPI.post(user, %{status: rest, in_reply_to_status_id: activity_id}) do
IO.puts("Replied!")
else
_e -> IO.puts("Could not reply...")
end
state
end
def handle_command(%{user: user} = state, "p " <> text) do
text = String.trim(text)
with {:ok, _activity} <- CommonAPI.post(user, %{status: text}) do
IO.puts("Posted!")
else
_e -> IO.puts("Could not post...")
end
state
end
def handle_command(state, "home") do
user = state.user
params =
%{}
|> Map.put(:type, ["Create"])
|> Map.put(:blocking_user, user)
|> Map.put(:muting_user, user)
|> Map.put(:user, user)
activities =
[user.ap_id | Pleroma.User.following(user)]
|> ActivityPub.fetch_activities(params)
Enum.each(activities, fn activity ->
puts_activity(activity)
end)
state
end
def handle_command(state, command) do
IO.puts("Unknown command '#{command}'")
state
end
defp wait_input(state, input) do
receive do
{:input, ^input, "quit\n"} ->
IO.puts("Exiting...")
{:input, ^input, code} when is_binary(code) ->
code = String.trim(code)
state = handle_command(state, code)
loop(%{state | counter: state.counter + 1})
{:error, :interrupted} ->
IO.puts("Caught Ctrl+C...")
loop(%{state | counter: state.counter + 1})
{:input, ^input, msg} ->
:ok = Logger.warn("received unknown message: #{inspect(msg)}")
loop(%{state | counter: state.counter + 1})
end
end
defp run_state(opts) do
%{prefix: "pleroma", counter: 1, user: opts[:user]}
end
defp io_get(pid, prefix, counter, username) do
prompt = prompt(prefix, counter, username)
send(pid, {:input, self(), IO.gets(:stdio, prompt)})
end
defp prompt(prefix, counter, username) do
prompt = "#{username}@#{prefix}:#{counter}>"
prompt <> " "
end
end

View file

@ -20,13 +20,50 @@ defmodule Pleroma.Config.DeprecationWarnings do
"\n* `config :pleroma, :instance, mrf_transparency_exclusions` is now `config :pleroma, :mrf, transparency_exclusions`"}
]
def check_exiftool_filter do
filters = Config.get([Pleroma.Upload]) |> Keyword.get(:filters, [])
if Pleroma.Upload.Filter.Exiftool in filters do
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using Exiftool as a filter instead of Exiftool.StripLocation. This should work for now, but you are advised to change to the new configuration to prevent possible issues later:
```
config :pleroma, Pleroma.Upload,
filters: [Pleroma.Upload.Filter.Exiftool]
```
Is now
```
config :pleroma, Pleroma.Upload,
filters: [Pleroma.Upload.Filter.Exiftool.StripLocation]
```
""")
new_config =
filters
|> Enum.map(fn
Pleroma.Upload.Filter.Exiftool -> Pleroma.Upload.Filter.Exiftool.StripLocation
filter -> filter
end)
Config.put([Pleroma.Upload, :filters], new_config)
:error
else
:ok
end
end
def check_simple_policy_tuples do
has_strings =
Config.get([:mrf_simple])
|> Enum.any?(fn {_, v} -> Enum.any?(v, &is_binary/1) end)
if has_strings do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the SimplePolicy configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@ -84,7 +121,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
has_strings = Config.get([:instance, :quarantined_instances]) |> Enum.any?(&is_binary/1)
if has_strings do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the quarantined_instances configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@ -121,7 +158,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
has_strings = Config.get([:mrf, :transparency_exclusions]) |> Enum.any?(&is_binary/1)
if has_strings do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using strings in the transparency_exclusions configuration instead of tuples. They should work for now, but you are advised to change to the new configuration to prevent possible issues later:
@ -156,7 +193,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
def check_hellthread_threshold do
if Config.get([:mrf_hellthread, :threshold]) do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
You are using the old configuration mechanism for the hellthread filter. Please check config.md.
""")
@ -180,7 +217,8 @@ defmodule Pleroma.Config.DeprecationWarnings do
check_old_chat_shoutbox(),
check_quarantined_instances_tuples(),
check_transparency_exclusions_tuples(),
check_simple_policy_tuples()
check_simple_policy_tuples(),
check_exiftool_filter()
]
|> Enum.reduce(:ok, fn
:ok, :ok -> :ok
@ -236,7 +274,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
if warning == "" do
:ok
else
Logger.warn(warning_preface <> warning)
Logger.warning(warning_preface <> warning)
:error
end
end
@ -246,7 +284,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
whitelist = Config.get([:media_proxy, :whitelist])
if Enum.any?(whitelist, &(not String.starts_with?(&1, "http"))) do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using old format (only domain) for MediaProxy whitelist option. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
""")
@ -261,7 +299,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
pool_config = Config.get(:connections_pool)
if timeout = pool_config[:await_up_timeout] do
Logger.warn("""
Logger.warning("""
!!!DEPRECATION WARNING!!!
Your config is using old setting `config :pleroma, :connections_pool, await_up_timeout`. Please change to `config :pleroma, :connections_pool, connect_timeout` to ensure compatibility with future releases.
""")
@ -273,7 +311,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
warning_preface = """
!!!DEPRECATION WARNING!!!
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. The setting will not take effect until updated.
"""
updated_config =
@ -293,7 +331,7 @@ defmodule Pleroma.Config.DeprecationWarnings do
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
end)
Logger.warn(Enum.join([warning_preface | pool_warnings]))
Logger.warning(Enum.join([warning_preface | pool_warnings]))
Config.put(:pools, updated_config)
:error

View file

@ -5,4 +5,11 @@
defmodule Pleroma.Config.Getting do
@callback get(any()) :: any()
@callback get(any(), any()) :: any()
def get(key), do: get(key, nil)
def get(key, default), do: impl().get(key, default)
def impl do
Application.get_env(:pleroma, :config_impl, Pleroma.Config)
end
end

View file

@ -19,21 +19,10 @@ defmodule Pleroma.Config.Loader do
:tesla
]
if Code.ensure_loaded?(Config.Reader) do
@reader Config.Reader
def read(path), do: @reader.read!(path)
else
# support for Elixir less than 1.9
@reader Mix.Config
def read(path) do
path
|> @reader.eval!()
|> elem(0)
end
end
@reader Config.Reader
@spec read(Path.t()) :: keyword()
def read(path), do: @reader.read!(path)
@spec merge(keyword(), keyword()) :: keyword()
def merge(c1, c2), do: @reader.merge(c1, c2)

View file

@ -23,7 +23,7 @@ defmodule Pleroma.Config.Oban do
You are using old workers in Oban crontab settings, which were removed.
Please, remove setting from crontab in your config file (prod.secret.exs): #{inspect(setting)}
"""
|> Logger.warn()
|> Logger.warning()
List.delete(acc, setting)
else

View file

@ -20,6 +20,20 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do
with_runtime_config =
if File.exists?(config_path) do
# <https://git.pleroma.social/pleroma/pleroma/-/issues/3135>
%File.Stat{mode: mode} = File.lstat!(config_path)
if Bitwise.band(mode, 0o007) > 0 do
raise "Configuration at #{config_path} has world-permissions, execute the following: chmod o= #{config_path}"
end
if Bitwise.band(mode, 0o020) > 0 do
raise "Configuration at #{config_path} has group-wise write permissions, execute the following: chmod g-w #{config_path}"
end
# Note: Elixir doesn't provides a getuid(2)
# so cannot forbid group-read only when config is owned by us
runtime_config = Config.Reader.read!(config_path)
with_defaults

View file

@ -47,7 +47,7 @@ defmodule Pleroma.Config.TransferTask do
{logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
logger
|> Enum.sort()
@ -55,8 +55,7 @@ defmodule Pleroma.Config.TransferTask do
started_applications = Application.started_applications()
# TODO: some problem with prometheus after restart!
reject = [nil, :prometheus, :postgrex]
reject = [nil, :postgrex]
reject =
if restart_pleroma? do
@ -104,11 +103,6 @@ defmodule Pleroma.Config.TransferTask do
end
# change logger configuration in runtime, without restart
defp configure({:quack, key, _, merged}) do
Logger.configure_backend(Quack.Logger, [{key, merged}])
:ok = update_env(:quack, key, merged)
end
defp configure({_, :backends, _, merged}) do
# removing current backends
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
@ -150,7 +144,7 @@ defmodule Pleroma.Config.TransferTask do
error_msg =
"updating env causes error, group: #{inspect(group)}, key: #{inspect(key)}, value: #{inspect(value)} error: #{inspect(error)}"
Logger.warn(error_msg)
Logger.warning(error_msg)
nil
end
@ -184,12 +178,12 @@ defmodule Pleroma.Config.TransferTask do
:ok = Application.start(app)
else
nil ->
Logger.warn("#{app} is not started.")
Logger.warning("#{app} is not started.")
error ->
error
|> inspect()
|> Logger.warn()
|> Logger.warning()
end
end

View file

@ -163,7 +163,6 @@ defmodule Pleroma.ConfigDB do
defp only_full_update?(%ConfigDB{group: group, key: key}) do
full_key_update = [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist},
@ -386,7 +385,7 @@ defmodule Pleroma.ConfigDB do
@spec module_name?(String.t()) :: boolean()
def module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
end
end

View file

@ -28,4 +28,75 @@ defmodule Pleroma.Constants do
do:
~w(index.html robots.txt static static-fe finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc embed.js embed.css)
)
const(status_updatable_fields,
do: [
"source",
"tag",
"updated",
"emoji",
"content",
"summary",
"sensitive",
"attachment",
"generator"
]
)
const(status_object_types,
do: [
"Note",
"Question",
"Audio",
"Video",
"Event",
"Article",
"Page"
]
)
const(updatable_object_types,
do: [
"Note",
"Question",
"Audio",
"Video",
"Event",
"Article",
"Page"
]
)
const(actor_types,
do: [
"Application",
"Group",
"Organization",
"Person",
"Service"
]
)
# basic regex, just there to weed out potential mistakes
# https://datatracker.ietf.org/doc/html/rfc2045#section-5.1
const(mime_regex,
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
)
const(upload_object_types, do: ["Document", "Image"])
const(activity_json_canonical_mime_type,
do: "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
)
const(activity_json_mime_types,
do: [
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
"application/activity+json"
]
)
const(public_streams,
do: ["public", "public:local", "public:media", "public:local:media"]
)
end

View file

@ -42,4 +42,5 @@ defmodule Pleroma.DataMigration do
end
def populate_hashtags_table, do: get_by_name("populate_hashtags_table")
def delete_context_objects, do: get_by_name("delete_context_objects")
end

View file

@ -17,6 +17,8 @@ defmodule Pleroma.Docs.Generator do
# This shouldn't be needed as all modules are expected to have module_info/1,
# but in test enviroments some transient modules `:elixir_compiler_XX`
# are loaded for some reason (where XX is a random integer).
Code.ensure_loaded(module)
if function_exported?(module, :module_info, 1) do
module.module_info(:attributes)
|> Keyword.get_values(:behaviour)

View file

@ -0,0 +1,10 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Docs.Translator do
require Pleroma.Docs.Translator.Compiler
require Pleroma.Web.Gettext
@before_compile Pleroma.Docs.Translator.Compiler
end

View file

@ -0,0 +1,119 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Docs.Translator.Compiler do
@external_resource "config/description.exs"
@raw_config Pleroma.Config.Loader.read("config/description.exs")
@raw_descriptions @raw_config[:pleroma][:config_description]
defmacro __before_compile__(_env) do
strings =
__MODULE__.descriptions()
|> __MODULE__.extract_strings()
quote do
def placeholder do
unquote do
Enum.map(
strings,
fn {path, type, string} ->
ctxt = msgctxt_for(path, type)
quote do
Pleroma.Web.Gettext.dpgettext_noop(
"config_descriptions",
unquote(ctxt),
unquote(string)
)
end
end
)
end
end
end
end
def descriptions do
Pleroma.Web.ActivityPub.MRF.config_descriptions()
|> Enum.reduce(@raw_descriptions, fn description, acc -> [description | acc] end)
|> Pleroma.Docs.Generator.convert_to_strings()
end
def extract_strings(descriptions) do
descriptions
|> Enum.reduce(%{strings: [], path: []}, &process_item/2)
|> Map.get(:strings)
end
defp process_item(entity, acc) do
current_level =
acc
|> process_desc(entity)
|> process_label(entity)
process_children(entity, current_level)
end
defp process_desc(acc, %{description: desc} = item) do
%{
strings: [{acc.path ++ [key_for(item)], "description", desc} | acc.strings],
path: acc.path
}
end
defp process_desc(acc, _) do
acc
end
defp process_label(acc, %{label: label} = item) do
%{
strings: [{acc.path ++ [key_for(item)], "label", label} | acc.strings],
path: acc.path
}
end
defp process_label(acc, _) do
acc
end
defp process_children(%{children: children} = item, acc) do
current_level = Map.put(acc, :path, acc.path ++ [key_for(item)])
children
|> Enum.reduce(current_level, &process_item/2)
|> Map.put(:path, acc.path)
end
defp process_children(_, acc) do
acc
end
def msgctxt_for(path, type) do
"config #{type} at #{Enum.join(path, " > ")}"
end
defp convert_group({_, group}) do
group
end
defp convert_group(group) do
group
end
def key_for(%{group: group, key: key}) do
"#{convert_group(group)}-#{key}"
end
def key_for(%{group: group}) do
convert_group(group)
end
def key_for(%{key: key}) do
key
end
def key_for(_) do
nil
end
end

View file

@ -27,3 +27,11 @@ defenum(Pleroma.DataMigration.State,
failed: 4,
manual: 5
)
defenum(Pleroma.User.Backup.State,
pending: 1,
running: 2,
complete: 3,
failed: 4,
invalid: 5
)

View file

@ -0,0 +1,23 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.BareUri do
use Ecto.Type
def type, do: :string
def cast(uri) when is_binary(uri) do
case URI.parse(uri) do
%URI{scheme: nil} -> :error
%URI{} -> {:ok, uri}
_ -> :error
end
end
def cast(_), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
end

View file

@ -0,0 +1,25 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.MIME do
use Ecto.Type
require Pleroma.Constants
def type, do: :string
def cast(mime) when is_binary(mime) do
if mime =~ Pleroma.Constants.mime_regex() do
{:ok, mime}
else
{:ok, "application/octet-stream"}
end
end
def cast(_), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
end

View file

@ -1,13 +1,13 @@
# emoji-test.txt
# Date: 2021-08-26, 17:22:23 GMT
# © 2021 Unicode®, Inc.
# Date: 2022-08-12, 20:24:39 GMT
# © 2022 Unicode®, Inc.
# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries.
# For terms of use, see http://www.unicode.org/terms_of_use.html
# For terms of use, see https://www.unicode.org/terms_of_use.html
#
# Emoji Keyboard/Display Test Data for UTS #51
# Version: 14.0
# Version: 15.0
#
# For documentation and usage, see http://www.unicode.org/reports/tr51
# For documentation and usage, see https://www.unicode.org/reports/tr51
#
# This file provides data for testing which emoji forms should be in keyboards and which should also be displayed/processed.
# Format: code points; status # emoji name
@ -92,6 +92,7 @@
1F62C ; fully-qualified # 😬 E1.0 grimacing face
1F62E 200D 1F4A8 ; fully-qualified # 😮‍💨 E13.1 face exhaling
1F925 ; fully-qualified # 🤥 E3.0 lying face
1FAE8 ; fully-qualified # 🫨 E15.0 shaking face
# subgroup: face-sleepy
1F60C ; fully-qualified # 😌 E0.6 relieved face
@ -155,7 +156,7 @@
# subgroup: face-negative
1F624 ; fully-qualified # 😤 E0.6 face with steam from nose
1F621 ; fully-qualified # 😡 E0.6 pouting face
1F621 ; fully-qualified # 😡 E0.6 enraged face
1F620 ; fully-qualified # 😠 E0.6 angry face
1F92C ; fully-qualified # 🤬 E5.0 face with symbols on mouth
1F608 ; fully-qualified # 😈 E1.0 smiling face with horns
@ -190,8 +191,7 @@
1F649 ; fully-qualified # 🙉 E0.6 hear-no-evil monkey
1F64A ; fully-qualified # 🙊 E0.6 speak-no-evil monkey
# subgroup: emotion
1F48B ; fully-qualified # 💋 E0.6 kiss mark
# subgroup: heart
1F48C ; fully-qualified # 💌 E0.6 love letter
1F498 ; fully-qualified # 💘 E0.6 heart with arrow
1F49D ; fully-qualified # 💝 E0.6 heart with ribbon
@ -210,14 +210,20 @@
2764 200D 1FA79 ; unqualified # ❤‍🩹 E13.1 mending heart
2764 FE0F ; fully-qualified # ❤️ E0.6 red heart
2764 ; unqualified # ❤ E0.6 red heart
1FA77 ; fully-qualified # 🩷 E15.0 pink heart
1F9E1 ; fully-qualified # 🧡 E5.0 orange heart
1F49B ; fully-qualified # 💛 E0.6 yellow heart
1F49A ; fully-qualified # 💚 E0.6 green heart
1F499 ; fully-qualified # 💙 E0.6 blue heart
1FA75 ; fully-qualified # 🩵 E15.0 light blue heart
1F49C ; fully-qualified # 💜 E0.6 purple heart
1F90E ; fully-qualified # 🤎 E12.0 brown heart
1F5A4 ; fully-qualified # 🖤 E3.0 black heart
1FA76 ; fully-qualified # 🩶 E15.0 grey heart
1F90D ; fully-qualified # 🤍 E12.0 white heart
# subgroup: emotion
1F48B ; fully-qualified # 💋 E0.6 kiss mark
1F4AF ; fully-qualified # 💯 E0.6 hundred points
1F4A2 ; fully-qualified # 💢 E0.6 anger symbol
1F4A5 ; fully-qualified # 💥 E0.6 collision
@ -226,21 +232,20 @@
1F4A8 ; fully-qualified # 💨 E0.6 dashing away
1F573 FE0F ; fully-qualified # 🕳️ E0.7 hole
1F573 ; unqualified # 🕳 E0.7 hole
1F4A3 ; fully-qualified # 💣 E0.6 bomb
1F4AC ; fully-qualified # 💬 E0.6 speech balloon
1F441 FE0F 200D 1F5E8 FE0F ; fully-qualified # 👁️‍🗨️ E2.0 eye in speech bubble
1F441 200D 1F5E8 FE0F ; unqualified # 👁‍🗨️ E2.0 eye in speech bubble
1F441 FE0F 200D 1F5E8 ; unqualified # 👁️‍🗨 E2.0 eye in speech bubble
1F441 FE0F 200D 1F5E8 ; minimally-qualified # 👁️‍🗨 E2.0 eye in speech bubble
1F441 200D 1F5E8 ; unqualified # 👁‍🗨 E2.0 eye in speech bubble
1F5E8 FE0F ; fully-qualified # 🗨️ E2.0 left speech bubble
1F5E8 ; unqualified # 🗨 E2.0 left speech bubble
1F5EF FE0F ; fully-qualified # 🗯️ E0.7 right anger bubble
1F5EF ; unqualified # 🗯 E0.7 right anger bubble
1F4AD ; fully-qualified # 💭 E1.0 thought balloon
1F4A4 ; fully-qualified # 💤 E0.6 zzz
1F4A4 ; fully-qualified # 💤 E0.6 ZZZ
# Smileys & Emotion subtotal: 177
# Smileys & Emotion subtotal: 177 w/o modifiers
# Smileys & Emotion subtotal: 180
# Smileys & Emotion subtotal: 180 w/o modifiers
# group: People & Body
@ -300,6 +305,18 @@
1FAF4 1F3FD ; fully-qualified # 🫴🏽 E14.0 palm up hand: medium skin tone
1FAF4 1F3FE ; fully-qualified # 🫴🏾 E14.0 palm up hand: medium-dark skin tone
1FAF4 1F3FF ; fully-qualified # 🫴🏿 E14.0 palm up hand: dark skin tone
1FAF7 ; fully-qualified # 🫷 E15.0 leftwards pushing hand
1FAF7 1F3FB ; fully-qualified # 🫷🏻 E15.0 leftwards pushing hand: light skin tone
1FAF7 1F3FC ; fully-qualified # 🫷🏼 E15.0 leftwards pushing hand: medium-light skin tone
1FAF7 1F3FD ; fully-qualified # 🫷🏽 E15.0 leftwards pushing hand: medium skin tone
1FAF7 1F3FE ; fully-qualified # 🫷🏾 E15.0 leftwards pushing hand: medium-dark skin tone
1FAF7 1F3FF ; fully-qualified # 🫷🏿 E15.0 leftwards pushing hand: dark skin tone
1FAF8 ; fully-qualified # 🫸 E15.0 rightwards pushing hand
1FAF8 1F3FB ; fully-qualified # 🫸🏻 E15.0 rightwards pushing hand: light skin tone
1FAF8 1F3FC ; fully-qualified # 🫸🏼 E15.0 rightwards pushing hand: medium-light skin tone
1FAF8 1F3FD ; fully-qualified # 🫸🏽 E15.0 rightwards pushing hand: medium skin tone
1FAF8 1F3FE ; fully-qualified # 🫸🏾 E15.0 rightwards pushing hand: medium-dark skin tone
1FAF8 1F3FF ; fully-qualified # 🫸🏿 E15.0 rightwards pushing hand: dark skin tone
# subgroup: hand-fingers-partial
1F44C ; fully-qualified # 👌 E0.6 OK hand
@ -473,11 +490,11 @@
1F932 1F3FE ; fully-qualified # 🤲🏾 E5.0 palms up together: medium-dark skin tone
1F932 1F3FF ; fully-qualified # 🤲🏿 E5.0 palms up together: dark skin tone
1F91D ; fully-qualified # 🤝 E3.0 handshake
1F91D 1F3FB ; fully-qualified # 🤝🏻 E3.0 handshake: light skin tone
1F91D 1F3FC ; fully-qualified # 🤝🏼 E3.0 handshake: medium-light skin tone
1F91D 1F3FD ; fully-qualified # 🤝🏽 E3.0 handshake: medium skin tone
1F91D 1F3FE ; fully-qualified # 🤝🏾 E3.0 handshake: medium-dark skin tone
1F91D 1F3FF ; fully-qualified # 🤝🏿 E3.0 handshake: dark skin tone
1F91D 1F3FB ; fully-qualified # 🤝🏻 E14.0 handshake: light skin tone
1F91D 1F3FC ; fully-qualified # 🤝🏼 E14.0 handshake: medium-light skin tone
1F91D 1F3FD ; fully-qualified # 🤝🏽 E14.0 handshake: medium skin tone
1F91D 1F3FE ; fully-qualified # 🤝🏾 E14.0 handshake: medium-dark skin tone
1F91D 1F3FF ; fully-qualified # 🤝🏿 E14.0 handshake: dark skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FC ; fully-qualified # 🫱🏻‍🫲🏼 E14.0 handshake: light skin tone, medium-light skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FD ; fully-qualified # 🫱🏻‍🫲🏽 E14.0 handshake: light skin tone, medium skin tone
1FAF1 1F3FB 200D 1FAF2 1F3FE ; fully-qualified # 🫱🏻‍🫲🏾 E14.0 handshake: light skin tone, medium-dark skin tone
@ -1455,7 +1472,7 @@
1F575 1F3FF ; fully-qualified # 🕵🏿 E2.0 detective: dark skin tone
1F575 FE0F 200D 2642 FE0F ; fully-qualified # 🕵️‍♂️ E4.0 man detective
1F575 200D 2642 FE0F ; unqualified # 🕵‍♂️ E4.0 man detective
1F575 FE0F 200D 2642 ; unqualified # 🕵️‍♂ E4.0 man detective
1F575 FE0F 200D 2642 ; minimally-qualified # 🕵️‍♂ E4.0 man detective
1F575 200D 2642 ; unqualified # 🕵‍♂ E4.0 man detective
1F575 1F3FB 200D 2642 FE0F ; fully-qualified # 🕵🏻‍♂️ E4.0 man detective: light skin tone
1F575 1F3FB 200D 2642 ; minimally-qualified # 🕵🏻‍♂ E4.0 man detective: light skin tone
@ -1469,7 +1486,7 @@
1F575 1F3FF 200D 2642 ; minimally-qualified # 🕵🏿‍♂ E4.0 man detective: dark skin tone
1F575 FE0F 200D 2640 FE0F ; fully-qualified # 🕵️‍♀️ E4.0 woman detective
1F575 200D 2640 FE0F ; unqualified # 🕵‍♀️ E4.0 woman detective
1F575 FE0F 200D 2640 ; unqualified # 🕵️‍♀ E4.0 woman detective
1F575 FE0F 200D 2640 ; minimally-qualified # 🕵️‍♀ E4.0 woman detective
1F575 200D 2640 ; unqualified # 🕵‍♀ E4.0 woman detective
1F575 1F3FB 200D 2640 FE0F ; fully-qualified # 🕵🏻‍♀️ E4.0 woman detective: light skin tone
1F575 1F3FB 200D 2640 ; minimally-qualified # 🕵🏻‍♀ E4.0 woman detective: light skin tone
@ -2302,7 +2319,7 @@
1F3CC 1F3FF ; fully-qualified # 🏌🏿 E4.0 person golfing: dark skin tone
1F3CC FE0F 200D 2642 FE0F ; fully-qualified # 🏌️‍♂️ E4.0 man golfing
1F3CC 200D 2642 FE0F ; unqualified # 🏌‍♂️ E4.0 man golfing
1F3CC FE0F 200D 2642 ; unqualified # 🏌️‍♂ E4.0 man golfing
1F3CC FE0F 200D 2642 ; minimally-qualified # 🏌️‍♂ E4.0 man golfing
1F3CC 200D 2642 ; unqualified # 🏌‍♂ E4.0 man golfing
1F3CC 1F3FB 200D 2642 FE0F ; fully-qualified # 🏌🏻‍♂️ E4.0 man golfing: light skin tone
1F3CC 1F3FB 200D 2642 ; minimally-qualified # 🏌🏻‍♂ E4.0 man golfing: light skin tone
@ -2316,7 +2333,7 @@
1F3CC 1F3FF 200D 2642 ; minimally-qualified # 🏌🏿‍♂ E4.0 man golfing: dark skin tone
1F3CC FE0F 200D 2640 FE0F ; fully-qualified # 🏌️‍♀️ E4.0 woman golfing
1F3CC 200D 2640 FE0F ; unqualified # 🏌‍♀️ E4.0 woman golfing
1F3CC FE0F 200D 2640 ; unqualified # 🏌️‍♀ E4.0 woman golfing
1F3CC FE0F 200D 2640 ; minimally-qualified # 🏌️‍♀ E4.0 woman golfing
1F3CC 200D 2640 ; unqualified # 🏌‍♀ E4.0 woman golfing
1F3CC 1F3FB 200D 2640 FE0F ; fully-qualified # 🏌🏻‍♀️ E4.0 woman golfing: light skin tone
1F3CC 1F3FB 200D 2640 ; minimally-qualified # 🏌🏻‍♀ E4.0 woman golfing: light skin tone
@ -2427,7 +2444,7 @@
26F9 1F3FF ; fully-qualified # ⛹🏿 E2.0 person bouncing ball: dark skin tone
26F9 FE0F 200D 2642 FE0F ; fully-qualified # ⛹️‍♂️ E4.0 man bouncing ball
26F9 200D 2642 FE0F ; unqualified # ⛹‍♂️ E4.0 man bouncing ball
26F9 FE0F 200D 2642 ; unqualified # ⛹️‍♂ E4.0 man bouncing ball
26F9 FE0F 200D 2642 ; minimally-qualified # ⛹️‍♂ E4.0 man bouncing ball
26F9 200D 2642 ; unqualified # ⛹‍♂ E4.0 man bouncing ball
26F9 1F3FB 200D 2642 FE0F ; fully-qualified # ⛹🏻‍♂️ E4.0 man bouncing ball: light skin tone
26F9 1F3FB 200D 2642 ; minimally-qualified # ⛹🏻‍♂ E4.0 man bouncing ball: light skin tone
@ -2441,7 +2458,7 @@
26F9 1F3FF 200D 2642 ; minimally-qualified # ⛹🏿‍♂ E4.0 man bouncing ball: dark skin tone
26F9 FE0F 200D 2640 FE0F ; fully-qualified # ⛹️‍♀️ E4.0 woman bouncing ball
26F9 200D 2640 FE0F ; unqualified # ⛹‍♀️ E4.0 woman bouncing ball
26F9 FE0F 200D 2640 ; unqualified # ⛹️‍♀ E4.0 woman bouncing ball
26F9 FE0F 200D 2640 ; minimally-qualified # ⛹️‍♀ E4.0 woman bouncing ball
26F9 200D 2640 ; unqualified # ⛹‍♀ E4.0 woman bouncing ball
26F9 1F3FB 200D 2640 FE0F ; fully-qualified # ⛹🏻‍♀️ E4.0 woman bouncing ball: light skin tone
26F9 1F3FB 200D 2640 ; minimally-qualified # ⛹🏻‍♀ E4.0 woman bouncing ball: light skin tone
@ -2462,7 +2479,7 @@
1F3CB 1F3FF ; fully-qualified # 🏋🏿 E2.0 person lifting weights: dark skin tone
1F3CB FE0F 200D 2642 FE0F ; fully-qualified # 🏋️‍♂️ E4.0 man lifting weights
1F3CB 200D 2642 FE0F ; unqualified # 🏋‍♂️ E4.0 man lifting weights
1F3CB FE0F 200D 2642 ; unqualified # 🏋️‍♂ E4.0 man lifting weights
1F3CB FE0F 200D 2642 ; minimally-qualified # 🏋️‍♂ E4.0 man lifting weights
1F3CB 200D 2642 ; unqualified # 🏋‍♂ E4.0 man lifting weights
1F3CB 1F3FB 200D 2642 FE0F ; fully-qualified # 🏋🏻‍♂️ E4.0 man lifting weights: light skin tone
1F3CB 1F3FB 200D 2642 ; minimally-qualified # 🏋🏻‍♂ E4.0 man lifting weights: light skin tone
@ -2476,7 +2493,7 @@
1F3CB 1F3FF 200D 2642 ; minimally-qualified # 🏋🏿‍♂ E4.0 man lifting weights: dark skin tone
1F3CB FE0F 200D 2640 FE0F ; fully-qualified # 🏋️‍♀️ E4.0 woman lifting weights
1F3CB 200D 2640 FE0F ; unqualified # 🏋‍♀️ E4.0 woman lifting weights
1F3CB FE0F 200D 2640 ; unqualified # 🏋️‍♀ E4.0 woman lifting weights
1F3CB FE0F 200D 2640 ; minimally-qualified # 🏋️‍♀ E4.0 woman lifting weights
1F3CB 200D 2640 ; unqualified # 🏋‍♀ E4.0 woman lifting weights
1F3CB 1F3FB 200D 2640 FE0F ; fully-qualified # 🏋🏻‍♀️ E4.0 woman lifting weights: light skin tone
1F3CB 1F3FB 200D 2640 ; minimally-qualified # 🏋🏻‍♀ E4.0 woman lifting weights: light skin tone
@ -3262,8 +3279,8 @@
1FAC2 ; fully-qualified # 🫂 E13.0 people hugging
1F463 ; fully-qualified # 👣 E0.6 footprints
# People & Body subtotal: 2986
# People & Body subtotal: 506 w/o modifiers
# People & Body subtotal: 2998
# People & Body subtotal: 508 w/o modifiers
# group: Component
@ -3306,6 +3323,8 @@
1F405 ; fully-qualified # 🐅 E1.0 tiger
1F406 ; fully-qualified # 🐆 E1.0 leopard
1F434 ; fully-qualified # 🐴 E0.6 horse face
1FACE ; fully-qualified # 🫎 E15.0 moose
1FACF ; fully-qualified # 🫏 E15.0 donkey
1F40E ; fully-qualified # 🐎 E0.6 horse
1F984 ; fully-qualified # 🦄 E1.0 unicorn
1F993 ; fully-qualified # 🦓 E5.0 zebra
@ -3373,6 +3392,9 @@
1F9A9 ; fully-qualified # 🦩 E12.0 flamingo
1F99A ; fully-qualified # 🦚 E11.0 peacock
1F99C ; fully-qualified # 🦜 E11.0 parrot
1FABD ; fully-qualified # 🪽 E15.0 wing
1F426 200D 2B1B ; fully-qualified # 🐦‍⬛ E15.0 black bird
1FABF ; fully-qualified # 🪿 E15.0 goose
# subgroup: animal-amphibian
1F438 ; fully-qualified # 🐸 E0.6 frog
@ -3399,6 +3421,7 @@
1F419 ; fully-qualified # 🐙 E0.6 octopus
1F41A ; fully-qualified # 🐚 E0.6 spiral shell
1FAB8 ; fully-qualified # 🪸 E14.0 coral
1FABC ; fully-qualified # 🪼 E15.0 jellyfish
# subgroup: animal-bug
1F40C ; fully-qualified # 🐌 E0.6 snail
@ -3433,6 +3456,7 @@
1F33B ; fully-qualified # 🌻 E0.6 sunflower
1F33C ; fully-qualified # 🌼 E0.6 blossom
1F337 ; fully-qualified # 🌷 E0.6 tulip
1FABB ; fully-qualified # 🪻 E15.0 hyacinth
# subgroup: plant-other
1F331 ; fully-qualified # 🌱 E0.6 seedling
@ -3451,9 +3475,10 @@
1F343 ; fully-qualified # 🍃 E0.6 leaf fluttering in wind
1FAB9 ; fully-qualified # 🪹 E14.0 empty nest
1FABA ; fully-qualified # 🪺 E14.0 nest with eggs
1F344 ; fully-qualified # 🍄 E0.6 mushroom
# Animals & Nature subtotal: 151
# Animals & Nature subtotal: 151 w/o modifiers
# Animals & Nature subtotal: 159
# Animals & Nature subtotal: 159 w/o modifiers
# group: Food & Drink
@ -3492,10 +3517,11 @@
1F966 ; fully-qualified # 🥦 E5.0 broccoli
1F9C4 ; fully-qualified # 🧄 E12.0 garlic
1F9C5 ; fully-qualified # 🧅 E12.0 onion
1F344 ; fully-qualified # 🍄 E0.6 mushroom
1F95C ; fully-qualified # 🥜 E3.0 peanuts
1FAD8 ; fully-qualified # 🫘 E14.0 beans
1F330 ; fully-qualified # 🌰 E0.6 chestnut
1FADA ; fully-qualified # 🫚 E15.0 ginger root
1FADB ; fully-qualified # 🫛 E15.0 pea pod
# subgroup: food-prepared
1F35E ; fully-qualified # 🍞 E0.6 bread
@ -3607,8 +3633,8 @@
1FAD9 ; fully-qualified # 🫙 E14.0 jar
1F3FA ; fully-qualified # 🏺 E1.0 amphora
# Food & Drink subtotal: 134
# Food & Drink subtotal: 134 w/o modifiers
# Food & Drink subtotal: 135
# Food & Drink subtotal: 135 w/o modifiers
# group: Travel & Places
@ -3974,11 +4000,10 @@
1F3AF ; fully-qualified # 🎯 E0.6 bullseye
1FA80 ; fully-qualified # 🪀 E12.0 yo-yo
1FA81 ; fully-qualified # 🪁 E12.0 kite
1F52B ; fully-qualified # 🔫 E0.6 water pistol
1F3B1 ; fully-qualified # 🎱 E0.6 pool 8 ball
1F52E ; fully-qualified # 🔮 E0.6 crystal ball
1FA84 ; fully-qualified # 🪄 E13.0 magic wand
1F9FF ; fully-qualified # 🧿 E11.0 nazar amulet
1FAAC ; fully-qualified # 🪬 E14.0 hamsa
1F3AE ; fully-qualified # 🎮 E0.6 video game
1F579 FE0F ; fully-qualified # 🕹️ E0.7 joystick
1F579 ; unqualified # 🕹 E0.7 joystick
@ -4013,8 +4038,8 @@
1F9F6 ; fully-qualified # 🧶 E11.0 yarn
1FAA2 ; fully-qualified # 🪢 E13.0 knot
# Activities subtotal: 97
# Activities subtotal: 97 w/o modifiers
# Activities subtotal: 96
# Activities subtotal: 96 w/o modifiers
# group: Objects
@ -4040,6 +4065,7 @@
1FA73 ; fully-qualified # 🩳 E12.0 shorts
1F459 ; fully-qualified # 👙 E0.6 bikini
1F45A ; fully-qualified # 👚 E0.6 womans clothes
1FAAD ; fully-qualified # 🪭 E15.0 folding hand fan
1F45B ; fully-qualified # 👛 E0.6 purse
1F45C ; fully-qualified # 👜 E0.6 handbag
1F45D ; fully-qualified # 👝 E0.6 clutch bag
@ -4055,6 +4081,7 @@
1F461 ; fully-qualified # 👡 E0.6 womans sandal
1FA70 ; fully-qualified # 🩰 E12.0 ballet shoes
1F462 ; fully-qualified # 👢 E0.6 womans boot
1FAAE ; fully-qualified # 🪮 E15.0 hair pick
1F451 ; fully-qualified # 👑 E0.6 crown
1F452 ; fully-qualified # 👒 E0.6 womans hat
1F3A9 ; fully-qualified # 🎩 E0.6 top hat
@ -4103,6 +4130,8 @@
1FA95 ; fully-qualified # 🪕 E12.0 banjo
1F941 ; fully-qualified # 🥁 E3.0 drum
1FA98 ; fully-qualified # 🪘 E13.0 long drum
1FA87 ; fully-qualified # 🪇 E15.0 maracas
1FA88 ; fully-qualified # 🪈 E15.0 flute
# subgroup: phone
1F4F1 ; fully-qualified # 📱 E0.6 mobile phone
@ -4275,7 +4304,7 @@
1F5E1 ; unqualified # 🗡 E0.7 dagger
2694 FE0F ; fully-qualified # ⚔️ E1.0 crossed swords
2694 ; unqualified # ⚔ E1.0 crossed swords
1F52B ; fully-qualified # 🔫 E0.6 water pistol
1F4A3 ; fully-qualified # 💣 E0.6 bomb
1FA83 ; fully-qualified # 🪃 E13.0 boomerang
1F3F9 ; fully-qualified # 🏹 E1.0 bow and arrow
1F6E1 FE0F ; fully-qualified # 🛡️ E0.7 shield
@ -4354,12 +4383,14 @@
1FAA6 ; fully-qualified # 🪦 E13.0 headstone
26B1 FE0F ; fully-qualified # ⚱️ E1.0 funeral urn
26B1 ; unqualified # ⚱ E1.0 funeral urn
1F9FF ; fully-qualified # 🧿 E11.0 nazar amulet
1FAAC ; fully-qualified # 🪬 E14.0 hamsa
1F5FF ; fully-qualified # 🗿 E0.6 moai
1FAA7 ; fully-qualified # 🪧 E13.0 placard
1FAAA ; fully-qualified # 🪪 E14.0 identification card
# Objects subtotal: 304
# Objects subtotal: 304 w/o modifiers
# Objects subtotal: 310
# Objects subtotal: 310 w/o modifiers
# group: Symbols
@ -4455,6 +4486,7 @@
262E ; unqualified # ☮ E1.0 peace symbol
1F54E ; fully-qualified # 🕎 E1.0 menorah
1F52F ; fully-qualified # 🔯 E0.6 dotted six-pointed star
1FAAF ; fully-qualified # 🪯 E15.0 khanda
# subgroup: zodiac
2648 ; fully-qualified # ♈ E0.6 Aries
@ -4503,6 +4535,7 @@
1F505 ; fully-qualified # 🔅 E1.0 dim button
1F506 ; fully-qualified # 🔆 E1.0 bright button
1F4F6 ; fully-qualified # 📶 E0.6 antenna bars
1F6DC ; fully-qualified # 🛜 E15.0 wireless
1F4F3 ; fully-qualified # 📳 E0.6 vibration mode
1F4F4 ; fully-qualified # 📴 E0.6 mobile phone off
@ -4693,8 +4726,8 @@
1F533 ; fully-qualified # 🔳 E0.6 white square button
1F532 ; fully-qualified # 🔲 E0.6 black square button
# Symbols subtotal: 302
# Symbols subtotal: 302 w/o modifiers
# Symbols subtotal: 304
# Symbols subtotal: 304 w/o modifiers
# group: Flags
@ -4709,7 +4742,7 @@
1F3F3 200D 1F308 ; unqualified # 🏳‍🌈 E4.0 rainbow flag
1F3F3 FE0F 200D 26A7 FE0F ; fully-qualified # 🏳️‍⚧️ E13.0 transgender flag
1F3F3 200D 26A7 FE0F ; unqualified # 🏳‍⚧️ E13.0 transgender flag
1F3F3 FE0F 200D 26A7 ; unqualified # 🏳️‍⚧ E13.0 transgender flag
1F3F3 FE0F 200D 26A7 ; minimally-qualified # 🏳️‍⚧ E13.0 transgender flag
1F3F3 200D 26A7 ; unqualified # 🏳‍⚧ E13.0 transgender flag
1F3F4 200D 2620 FE0F ; fully-qualified # 🏴‍☠️ E11.0 pirate flag
1F3F4 200D 2620 ; minimally-qualified # 🏴‍☠ E11.0 pirate flag
@ -4983,9 +5016,9 @@
# Flags subtotal: 275 w/o modifiers
# Status Counts
# fully-qualified : 3624
# minimally-qualified : 817
# unqualified : 252
# fully-qualified : 3655
# minimally-qualified : 827
# unqualified : 242
# component : 9
#EOF

View file

@ -9,6 +9,7 @@ defmodule Pleroma.Emoji do
"""
use GenServer
alias Pleroma.Emoji.Combinations
alias Pleroma.Emoji.Loader
require Logger
@ -50,6 +51,8 @@ defmodule Pleroma.Emoji do
@doc "Returns the path of the emoji `name`."
@spec get(String.t()) :: String.t() | nil
def get(name) do
name = maybe_strip_name(name)
case :ets.lookup(@ets, name) do
[{_, path}] -> path
_ -> nil
@ -137,4 +140,68 @@ defmodule Pleroma.Emoji do
end
def is_unicode_emoji?(_), do: false
@emoji_regex ~r/:[A-Za-z0-9_-]+(@.+)?:/
def is_custom_emoji?(s) when is_binary(s), do: Regex.match?(@emoji_regex, s)
def is_custom_emoji?(_), do: false
def maybe_strip_name(name) when is_binary(name), do: String.trim(name, ":")
def maybe_strip_name(name), do: name
def maybe_quote(name) when is_binary(name) do
if is_unicode_emoji?(name) do
name
else
if String.starts_with?(name, ":") do
name
else
":#{name}:"
end
end
end
def maybe_quote(name), do: name
def emoji_url(%{"type" => "EmojiReact", "content" => _, "tag" => []}), do: nil
def emoji_url(%{"type" => "EmojiReact", "content" => emoji, "tag" => tags}) do
emoji = maybe_strip_name(emoji)
tag =
tags
|> Enum.find(fn tag ->
tag["type"] == "Emoji" && !is_nil(tag["name"]) && tag["name"] == emoji
end)
if is_nil(tag) do
nil
else
tag
|> Map.get("icon")
|> Map.get("url")
end
end
def emoji_url(_), do: nil
def emoji_name_with_instance(name, url) do
url = url |> URI.parse() |> Map.get(:host)
"#{name}@#{url}"
end
emoji_qualification_map =
emojis
|> Enum.filter(&String.contains?(&1, "\uFE0F"))
|> Combinations.variate_emoji_qualification()
for {qualified, unqualified_list} <- emoji_qualification_map do
for unqualified <- unqualified_list do
def fully_qualify_emoji(unquote(unqualified)), do: unquote(qualified)
end
end
def fully_qualify_emoji(emoji), do: emoji
end

View file

@ -0,0 +1,45 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Emoji.Combinations do
# FE0F is the emoji variation sequence. It is used for fully-qualifying
# emoji, and that includes emoji combinations.
# This code generates combinations per emoji: for each FE0F, all possible
# combinations of the character being removed or staying will be generated.
# This is made as an attempt to find all partially-qualified and unqualified
# versions of a fully-qualified emoji.
# I have found *no cases* for which this would be a problem, after browsing
# the entire emoji list in emoji-test.txt. This is safe, and, sadly, most
# likely sane too.
defp qualification_combinations(codepoints) do
qualification_combinations([[]], codepoints)
end
defp qualification_combinations(acc, []), do: acc
defp qualification_combinations(acc, ["\uFE0F" | tail]) do
acc
|> Enum.flat_map(fn x -> [x, x ++ ["\uFE0F"]] end)
|> qualification_combinations(tail)
end
defp qualification_combinations(acc, [codepoint | tail]) do
acc
|> Enum.map(&Kernel.++(&1, [codepoint]))
|> qualification_combinations(tail)
end
def variate_emoji_qualification(emoji) when is_binary(emoji) do
emoji
|> String.codepoints()
|> qualification_combinations()
|> Enum.map(&List.to_string/1)
end
def variate_emoji_qualification(emoji) when is_list(emoji) do
emoji
|> Enum.map(fn emoji -> {emoji, variate_emoji_qualification(emoji)} end)
end
end

View file

@ -59,7 +59,7 @@ defmodule Pleroma.Emoji.Loader do
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
if not Enum.empty?(files) do
Logger.warn(
Logger.warning(
"Found files in the emoji folder. These will be ignored, please move them to a subdirectory\nFound files: #{Enum.join(files, ", ")}"
)
end

View file

@ -285,6 +285,7 @@ defmodule Pleroma.Emoji.Pack do
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
def load_pack(name) do
name = Path.basename(name)
pack_file = Path.join([emoji_path(), name, "pack.json"])
with {:ok, _} <- File.stat(pack_file),

View file

@ -194,12 +194,13 @@ defmodule Pleroma.FollowingRelationship do
|> join(:inner, [r], f in assoc(r, :follower))
|> where(following_id: ^origin.id)
|> where([r, f], f.allow_following_move == true)
|> where([r, f], f.local == true)
|> limit(50)
|> preload([:follower])
|> Repo.all()
|> Enum.map(fn following_relationship ->
Repo.delete(following_relationship)
Pleroma.Web.CommonAPI.follow(following_relationship.follower, target)
Pleroma.Web.CommonAPI.unfollow(following_relationship.follower, origin)
end)
|> case do
[] ->

View file

@ -124,7 +124,7 @@ defmodule Pleroma.Formatter do
end
def markdown_to_html(text) do
Earmark.as_html!(text, %Earmark.Options{compact_output: true})
Earmark.as_html!(text, %Earmark.Options{compact_output: true, smartypants: false})
end
def html_escape({text, mentions, hashtags}, type) do

View file

@ -56,7 +56,7 @@ defmodule Pleroma.Gun.Conn do
{:ok, conn, protocol}
else
error ->
Logger.warn(
Logger.warning(
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
@ -90,7 +90,7 @@ defmodule Pleroma.Gun.Conn do
{:ok, conn, protocol}
else
error ->
Logger.warn(
Logger.warning(
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
@ -106,7 +106,7 @@ defmodule Pleroma.Gun.Conn do
{:ok, conn, protocol}
else
error ->
Logger.warn(
Logger.warning(
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)

View file

@ -8,11 +8,12 @@ defmodule Pleroma.Helpers.MediaHelper do
"""
alias Pleroma.HTTP
alias Vix.Vips.Operation
require Logger
def missing_dependencies do
Enum.reduce([imagemagick: "convert", ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
if Pleroma.Utils.command_available?(executable) do
acc
else
@ -22,54 +23,22 @@ defmodule Pleroma.Helpers.MediaHelper do
end
def image_resize(url, options) do
with executable when is_binary(executable) <- System.find_executable("convert"),
{:ok, args} <- prepare_image_resize_args(options),
{:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, fifo_path} <- mkfifo() do
args = List.flatten([fifo_path, args])
run_fifo(fifo_path, env, executable, args)
with {:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, resized} <-
Operation.thumbnail_buffer(env.body, options.max_width,
height: options.max_height,
size: :VIPS_SIZE_DOWN
) do
if options[:format] == "png" do
Operation.pngsave_buffer(resized, Q: options[:quality])
else
Operation.jpegsave_buffer(resized, Q: options[:quality], interlace: true)
end
else
nil -> {:error, {:convert, :command_not_found}}
{:error, _} = error -> error
end
end
defp prepare_image_resize_args(
%{max_width: max_width, max_height: max_height, format: "png"} = options
) do
quality = options[:quality] || 85
resize = Enum.join([max_width, "x", max_height, ">"])
args = [
"-resize",
resize,
"-quality",
to_string(quality),
"png:-"
]
{:ok, args}
end
defp prepare_image_resize_args(%{max_width: max_width, max_height: max_height} = options) do
quality = options[:quality] || 85
resize = Enum.join([max_width, "x", max_height, ">"])
args = [
"-interlace",
"Plane",
"-resize",
resize,
"-quality",
to_string(quality),
"jpg:-"
]
{:ok, args}
end
defp prepare_image_resize_args(_), do: {:error, :missing_options}
# Note: video thumbnail is intentionally not resized (always has original dimensions)
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),

View file

@ -106,5 +106,12 @@ defmodule Pleroma.HTTP do
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
end
defp adapter_middlewares(_), do: []
defp adapter_middlewares(_) do
if Pleroma.Config.get(:env) == :test do
# Emulate redirects in test env, which are handled by adapters in other environments
[Tesla.Middleware.FollowRedirects]
else
[]
end
end
end

View file

@ -70,15 +70,15 @@ defmodule Pleroma.HTTP.AdapterHelper do
{:ok, parse_host(host), port}
else
{_, _} ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
Logger.warning("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
Logger.warning("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
Logger.warning("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@ -88,7 +88,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
{:ok, type, parse_host(host), port}
else
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
Logger.warning("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end

View file

@ -24,10 +24,6 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
end
defp add_scheme_opts(opts, %URI{scheme: "https"}) do
Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
end
defp add_scheme_opts(opts, _), do: opts
defp maybe_add_with_body(opts) do

View file

@ -6,7 +6,11 @@ defmodule Pleroma.HTTP.WebPush do
@moduledoc false
def post(url, payload, headers, options \\ []) do
list_headers = Map.to_list(headers)
list_headers =
headers
|> Map.to_list()
|> Kernel.++([{"content-type", "octet-stream"}])
Pleroma.HTTP.post(url, payload, list_headers, options)
end
end

View file

@ -7,6 +7,7 @@ defmodule Pleroma.Instances.Instance do
alias Pleroma.Instances
alias Pleroma.Instances.Instance
alias Pleroma.Maps
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Workers.BackgroundWorker
@ -24,6 +25,14 @@ defmodule Pleroma.Instances.Instance do
field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime)
embeds_one :metadata, Pleroma.Instances.Metadata, primary_key: false do
field(:software_name, :string)
field(:software_version, :string)
field(:software_repository, :string)
end
field(:metadata_updated_at, :utc_datetime)
timestamps()
end
@ -31,11 +40,17 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
|> cast(params, __schema__(:fields) -- [:metadata])
|> cast_embed(:metadata, with: &metadata_changeset/2)
|> validate_required([:host])
|> unique_constraint(:host)
end
def metadata_changeset(struct, params \\ %{}) do
struct
|> cast(params, [:software_name, :software_version, :software_repository])
end
def filter_reachable([]), do: %{}
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
@ -82,13 +97,9 @@ defmodule Pleroma.Instances.Instance do
def reachable?(url_or_host) when is_binary(url_or_host), do: true
def set_reachable(url_or_host) when is_binary(url_or_host) do
with host <- host(url_or_host),
%Instance{} = existing_record <- Repo.get_by(Instance, %{host: host}) do
{:ok, _instance} =
existing_record
|> changeset(%{unreachable_since: nil})
|> Repo.update()
end
%Instance{host: host(url_or_host)}
|> changeset(%{unreachable_since: nil})
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
end
def set_reachable(_), do: {:error, nil}
@ -162,7 +173,7 @@ defmodule Pleroma.Instances.Instance do
end
rescue
e ->
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
Logger.warning("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
nil
end
@ -190,7 +201,7 @@ defmodule Pleroma.Instances.Instance do
end
rescue
e ->
Logger.warn(
Logger.warning(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
)
@ -198,6 +209,89 @@ defmodule Pleroma.Instances.Instance do
end
end
def get_or_update_metadata(%URI{host: host} = instance_uri) do
existing_record = Repo.get_by(Instance, %{host: host})
now = NaiveDateTime.utc_now()
if existing_record && existing_record.metadata_updated_at &&
NaiveDateTime.diff(now, existing_record.metadata_updated_at) < 86_400 do
existing_record.metadata
else
metadata = scrape_metadata(instance_uri)
if existing_record do
existing_record
|> changeset(%{metadata: metadata, metadata_updated_at: now})
|> Repo.update()
else
%Instance{}
|> changeset(%{host: host, metadata: metadata, metadata_updated_at: now})
|> Repo.insert()
end
metadata
end
end
defp get_nodeinfo_uri(well_known) do
links = Map.get(well_known, "links", [])
nodeinfo21 =
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.1"))["href"]
nodeinfo20 =
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))["href"]
cond do
is_binary(nodeinfo21) -> {:ok, nodeinfo21}
is_binary(nodeinfo20) -> {:ok, nodeinfo20}
true -> {:error, :no_links}
end
end
defp scrape_metadata(%URI{} = instance_uri) do
try do
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: well_known_body}} <-
instance_uri
|> URI.merge("/.well-known/nodeinfo")
|> to_string()
|> Pleroma.HTTP.get([{"accept", "application/json"}]),
{:ok, well_known_json} <- Jason.decode(well_known_body),
{:ok, nodeinfo_uri} <- get_nodeinfo_uri(well_known_json),
{:ok, %Tesla.Env{body: nodeinfo_body}} <-
Pleroma.HTTP.get(nodeinfo_uri, [{"accept", "application/json"}]),
{:ok, nodeinfo} <- Jason.decode(nodeinfo_body) do
# Can extract more metadata from NodeInfo but need to be careful about it's size,
# can't just dump the entire thing
software = Map.get(nodeinfo, "software", %{})
%{
software_name: software["name"],
software_version: software["version"]
}
|> Maps.put_if_present(:software_repository, software["repository"])
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
rescue
e ->
Logger.warning(
"Instance.scrape_metadata(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
)
nil
end
end
@doc """
Deletes all users from an instance in a background task, thus also deleting
all of those users' activities and notifications.

View file

@ -20,7 +20,7 @@ defmodule Pleroma.Maintenance do
"full" ->
Logger.info("Running VACUUM FULL.")
Logger.warn(
Logger.warning(
"Re-packing your entire database may take a while and will consume extra disk space during the process."
)

View file

@ -0,0 +1,139 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Migrators.ContextObjectsDeletionMigrator do
defmodule State do
use Pleroma.Migrators.Support.BaseMigratorState
@impl Pleroma.Migrators.Support.BaseMigratorState
defdelegate data_migration(), to: Pleroma.DataMigration, as: :delete_context_objects
end
use Pleroma.Migrators.Support.BaseMigrator
alias Pleroma.Migrators.Support.BaseMigrator
alias Pleroma.Object
@doc "This migration removes objects created exclusively for contexts, containing only an `id` field."
@impl BaseMigrator
def feature_config_path, do: [:features, :delete_context_objects]
@impl BaseMigrator
def fault_rate_allowance, do: Config.get([:delete_context_objects, :fault_rate_allowance], 0)
@impl BaseMigrator
def perform do
data_migration_id = data_migration_id()
max_processed_id = get_stat(:max_processed_id, 0)
Logger.info("Deleting context objects from `objects` (from oid: #{max_processed_id})...")
query()
|> where([object], object.id > ^max_processed_id)
|> Repo.chunk_stream(100, :batches, timeout: :infinity)
|> Stream.each(fn objects ->
object_ids = Enum.map(objects, & &1.id)
results = Enum.map(object_ids, &delete_context_object(&1))
failed_ids =
results
|> Enum.filter(&(elem(&1, 0) == :error))
|> Enum.map(&elem(&1, 1))
chunk_affected_count =
results
|> Enum.filter(&(elem(&1, 0) == :ok))
|> length()
for failed_id <- failed_ids do
_ =
Repo.query(
"INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <>
"VALUES ($1, $2) ON CONFLICT DO NOTHING;",
[data_migration_id, failed_id]
)
end
_ =
Repo.query(
"DELETE FROM data_migration_failed_ids " <>
"WHERE data_migration_id = $1 AND record_id = ANY($2)",
[data_migration_id, object_ids -- failed_ids]
)
max_object_id = Enum.at(object_ids, -1)
put_stat(:max_processed_id, max_object_id)
increment_stat(:iteration_processed_count, length(object_ids))
increment_stat(:processed_count, length(object_ids))
increment_stat(:failed_count, length(failed_ids))
increment_stat(:affected_count, chunk_affected_count)
put_stat(:records_per_second, records_per_second())
persist_state()
# A quick and dirty approach to controlling the load this background migration imposes
sleep_interval = Config.get([:delete_context_objects, :sleep_interval_ms], 0)
Process.sleep(sleep_interval)
end)
|> Stream.run()
end
@impl BaseMigrator
def query do
# Context objects have no activity type, and only one field, `id`.
# Only those context objects are without types.
from(
object in Object,
where: fragment("(?)->'type' IS NULL", object.data),
select: %{
id: object.id
}
)
end
@spec delete_context_object(integer()) :: {:ok | :error, integer()}
defp delete_context_object(id) do
result =
%Object{id: id}
|> Repo.delete()
|> elem(0)
{result, id}
end
@impl BaseMigrator
def retry_failed do
data_migration_id = data_migration_id()
failed_objects_query()
|> Repo.chunk_stream(100, :one)
|> Stream.each(fn object ->
with {res, _} when res != :error <- delete_context_object(object.id) do
_ =
Repo.query(
"DELETE FROM data_migration_failed_ids " <>
"WHERE data_migration_id = $1 AND record_id = $2",
[data_migration_id, object.id]
)
end
end)
|> Stream.run()
put_stat(:failed_count, failures_count())
persist_state()
force_continue()
end
defp failed_objects_query do
from(o in Object)
|> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"),
on: dmf.record_id == o.id
)
|> where([_o, dmf], dmf.data_migration_id == ^data_migration_id())
|> order_by([o], asc: o.id)
end
end

View file

@ -183,7 +183,7 @@ defmodule Pleroma.Migrators.HashtagsTableMigrator do
DELETE FROM hashtags_objects WHERE object_id IN
(SELECT DISTINCT objects.id FROM objects
JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities
ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') =
ON associated_object_id(activities) =
(objects.data->>'id')
AND activities.data->>'type' = 'Create'
WHERE activities.id IS NULL);

View file

@ -73,7 +73,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
data_migration.state == :manual or data_migration.name in manual_migrations ->
message = "Data migration is in manual execution or manual fix mode."
update_status(:manual, message)
Logger.warn("#{__MODULE__}: #{message}")
Logger.warning("#{__MODULE__}: #{message}")
data_migration.state == :complete ->
on_complete(data_migration)
@ -109,7 +109,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`.
"""
Logger.warn("#{__MODULE__}: #{message}")
Logger.warning("#{__MODULE__}: #{message}")
update_status(:manual, message)
on_complete(data_migration())
@ -125,7 +125,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
defp on_complete(data_migration) do
if data_migration.feature_lock || feature_state() == :disabled do
Logger.warn(
Logger.warning(
"#{__MODULE__}: migration complete but feature is locked; consider enabling."
)

View file

@ -117,9 +117,8 @@ defmodule Pleroma.Notification do
|> join(:left, [n, a], object in Object,
on:
fragment(
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
"(?->>'id') = associated_object_id(?)",
object.data,
a.data,
a.data
)
)
@ -179,6 +178,7 @@ defmodule Pleroma.Notification do
from([_n, a, o] in query,
where:
fragment("not(?->>'content' ~* ?)", o.data, ^regex) or
fragment("?->>'content' is null", o.data) or
fragment("?->>'actor' = ?", o.data, ^user.ap_id)
)
end
@ -193,13 +193,11 @@ defmodule Pleroma.Notification do
|> join(:left, [n, a], mutated_activity in Pleroma.Activity,
on:
fragment(
"COALESCE((?->'object')->>'id', ?->>'object')",
a.data,
"associated_object_id(?)",
a.data
) ==
fragment(
"COALESCE((?->'object')->>'id', ?->>'object')",
mutated_activity.data,
"associated_object_id(?)",
mutated_activity.data
) and
fragment("(?->>'type' = 'Like' or ?->>'type' = 'Announce')", a.data, a.data) and
@ -341,14 +339,6 @@ defmodule Pleroma.Notification do
|> Repo.delete_all()
end
def destroy_multiple_from_types(%{id: user_id}, types) do
from(n in Notification,
where: n.user_id == ^user_id,
where: n.type in ^types
)
|> Repo.delete_all()
end
def dismiss(%Pleroma.Activity{} = activity) do
Notification
|> where([n], n.activity_id == ^activity.id)
@ -385,7 +375,7 @@ defmodule Pleroma.Notification do
end
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag"] do
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
do_create_notifications(activity, options)
end
@ -439,6 +429,9 @@ defmodule Pleroma.Notification do
activity
|> type_from_activity_object()
"Update" ->
"update"
t ->
raise "No notification type for activity type #{t}"
end
@ -513,7 +506,16 @@ defmodule Pleroma.Notification do
def get_notified_from_activity(activity, local_only \\ true)
def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only)
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReact", "Flag"] do
when type in [
"Create",
"Like",
"Announce",
"Follow",
"Move",
"EmojiReact",
"Flag",
"Update"
] do
potential_receiver_ap_ids = get_potential_receiver_ap_ids(activity)
potential_receivers =
@ -550,7 +552,24 @@ defmodule Pleroma.Notification do
end
def get_potential_receiver_ap_ids(%{data: %{"type" => "Flag", "actor" => actor}}) do
(User.all_superusers() |> Enum.map(fn user -> user.ap_id end)) -- [actor]
(User.all_users_with_privilege(:reports_manage_reports)
|> Enum.map(fn user -> user.ap_id end)) --
[actor]
end
# Update activity: notify all who repeated this
def get_potential_receiver_ap_ids(%{data: %{"type" => "Update", "actor" => actor}} = activity) do
with %Object{data: %{"id" => object_id}} <- Object.normalize(activity, fetch: false) do
repeaters =
Activity.Queries.by_type("Announce")
|> Activity.Queries.by_object_id(object_id)
|> Activity.with_joined_user_actor()
|> where([a, u], u.local)
|> select([a, u], u.ap_id)
|> Repo.all()
repeaters -- [actor]
end
end
def get_potential_receiver_ap_ids(activity) do
@ -661,7 +680,7 @@ defmodule Pleroma.Notification do
cond do
opts[:type] == "poll" -> false
user.ap_id == actor -> false
!User.following?(follower, user) -> true
!User.following?(user, follower) -> true
true -> false
end
end

View file

@ -40,8 +40,7 @@ defmodule Pleroma.Object do
join(query, join_type, [{object, object_position}], a in Activity,
on:
fragment(
"COALESCE(?->'object'->>'id', ?->>'object') = (? ->> 'id') AND (?->>'type' = ?) ",
a.data,
"associated_object_id(?) = (? ->> 'id') AND (?->>'type' = ?) ",
a.data,
object.data,
a.data,
@ -145,7 +144,7 @@ defmodule Pleroma.Object do
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
end
def normalize(_, options \\ [fetch: false])
def normalize(_, options \\ [fetch: false, id_only: false])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop!
@ -173,10 +172,15 @@ defmodule Pleroma.Object do
def normalize(%{"id" => ap_id}, options), do: normalize(ap_id, options)
def normalize(ap_id, options) when is_binary(ap_id) do
if Keyword.get(options, :fetch) do
Fetcher.fetch_object_from_id!(ap_id, options)
else
get_cached_by_ap_id(ap_id)
cond do
Keyword.get(options, :id_only) ->
ap_id
Keyword.get(options, :fetch) ->
Fetcher.fetch_object_from_id!(ap_id, options)
true ->
get_cached_by_ap_id(ap_id)
end
end
@ -208,10 +212,6 @@ defmodule Pleroma.Object do
end
end
def context_mapping(context) do
Object.change(%Object{}, %{data: %{"id" => context}})
end
def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ DateTime.utc_now()) do
%ObjectTombstone{
id: id,
@ -328,6 +328,52 @@ defmodule Pleroma.Object do
end
end
def increase_quotes_count(ap_id) do
Object
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|> update([o],
set: [
data:
fragment(
"""
safe_jsonb_set(?, '{quotesCount}',
(coalesce((?->>'quotesCount')::int, 0) + 1)::varchar::jsonb, true)
""",
o.data,
o.data
)
]
)
|> Repo.update_all([])
|> case do
{1, [object]} -> set_cache(object)
_ -> {:error, "Not found"}
end
end
def decrease_quotes_count(ap_id) do
Object
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|> update([o],
set: [
data:
fragment(
"""
safe_jsonb_set(?, '{quotesCount}',
(greatest(0, (?->>'quotesCount')::int - 1))::varchar::jsonb, true)
""",
o.data,
o.data
)
]
)
|> Repo.update_all([])
|> case do
{1, [object]} -> set_cache(object)
_ -> {:error, "Not found"}
end
end
def increase_vote_count(ap_id, name, actor) do
with %Object{} = object <- Object.normalize(ap_id, fetch: false),
"Question" <- object.data["type"] do
@ -425,4 +471,30 @@ defmodule Pleroma.Object do
end
def object_data_hashtags(_), do: []
def get_emoji_reactions(object) do
reactions = object.data["reactions"]
if is_list(reactions) or is_map(reactions) do
reactions
|> Enum.map(fn
[_emoji, users, _maybe_url] = item when is_list(users) ->
item
[emoji, users] when is_list(users) ->
[emoji, users, nil]
# This case is here to process the Map situation, which will happen
# only with the legacy two-value format.
{emoji, users} when is_list(users) ->
[emoji, users, nil]
_ ->
nil
end)
|> Enum.reject(&is_nil/1)
else
[]
end
end
end

View file

@ -4,46 +4,34 @@
defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
alias Pleroma.Instances
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
require Logger
require Pleroma.Constants
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
defp reinject_object(%Object{data: %{}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
with {:ok, new_data, _} <- ObjectValidator.validate(new_data, %{}),
{:ok, new_data} <- MRF.filter(new_data),
{:ok, new_object, _} <-
Object.Updater.do_update_and_invalidate_cache(
object,
new_data,
_touch_changeset? = true
) do
{:ok, new_object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
@ -51,20 +39,11 @@ defmodule Pleroma.Object.Fetcher do
end
end
defp reinject_object(%Object{} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
defp reinject_object(_, new_data) do
with {:ok, object, _} <- Pipeline.common_pipeline(new_data, local: false) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
e -> e
end
end
@ -200,6 +179,10 @@ defmodule Pleroma.Object.Fetcher do
{:ok, body} <- get_object(id),
{:ok, data} <- safe_json_decode(body),
:ok <- Containment.contain_origin_from_id(id, data) do
if not Instances.reachable?(id) do
Instances.set_reachable(id)
end
{:ok, data}
else
{:scheme, _} ->

View file

@ -0,0 +1,290 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Object.Updater do
require Pleroma.Constants
alias Pleroma.Object
alias Pleroma.Repo
def update_content_fields(orig_object_data, updated_object) do
Pleroma.Constants.status_updatable_fields()
|> Enum.reduce(
%{data: orig_object_data, updated: false},
fn field, %{data: data, updated: updated} ->
updated =
updated or
(field != "updated" and
Map.get(updated_object, field) != Map.get(orig_object_data, field))
data =
if Map.has_key?(updated_object, field) do
Map.put(data, field, updated_object[field])
else
Map.drop(data, [field])
end
%{data: data, updated: updated}
end
)
end
def maybe_history(object) do
with history <- Map.get(object, "formerRepresentations"),
true <- is_map(history),
"OrderedCollection" <- Map.get(history, "type"),
true <- is_list(Map.get(history, "orderedItems")),
true <- is_integer(Map.get(history, "totalItems")) do
history
else
_ -> nil
end
end
def history_for(object) do
with history when not is_nil(history) <- maybe_history(object) do
history
else
_ -> history_skeleton()
end
end
defp history_skeleton do
%{
"type" => "OrderedCollection",
"totalItems" => 0,
"orderedItems" => []
}
end
def maybe_update_history(
updated_object,
orig_object_data,
opts
) do
updated = opts[:updated]
use_history_in_new_object? = opts[:use_history_in_new_object?]
if not updated do
%{updated_object: updated_object, used_history_in_new_object?: false}
else
# Put edit history
# Note that we may have got the edit history by first fetching the object
{new_history, used_history_in_new_object?} =
with true <- use_history_in_new_object?,
updated_history when not is_nil(updated_history) <- maybe_history(opts[:new_data]) do
{updated_history, true}
else
_ ->
history = history_for(orig_object_data)
latest_history_item =
orig_object_data
|> Map.drop(["id", "formerRepresentations"])
updated_history =
history
|> Map.put("orderedItems", [latest_history_item | history["orderedItems"]])
|> Map.put("totalItems", history["totalItems"] + 1)
{updated_history, false}
end
updated_object =
updated_object
|> Map.put("formerRepresentations", new_history)
%{updated_object: updated_object, used_history_in_new_object?: used_history_in_new_object?}
end
end
defp maybe_update_poll(to_be_updated, updated_object) do
choice_key = fn
%{"anyOf" => [_ | _]} -> "anyOf"
%{"oneOf" => [_ | _]} -> "oneOf"
_ -> nil
end
with true <- to_be_updated["type"] == "Question",
key when not is_nil(key) <- choice_key.(updated_object),
true <- key == choice_key.(to_be_updated),
orig_choices <- to_be_updated[key] |> Enum.map(&Map.drop(&1, ["replies"])),
new_choices <- updated_object[key] |> Enum.map(&Map.drop(&1, ["replies"])),
true <- orig_choices == new_choices do
# Choices are the same, but counts are different
to_be_updated
|> Map.put(key, updated_object[key])
else
# Choices (or vote type) have changed, do not allow this
_ -> to_be_updated
end
end
# This calculates the data to be sent as the object of an Update.
# new_data's formerRepresentations is not considered.
# formerRepresentations is added to the returned data.
def make_update_object_data(original_data, new_data, date) do
%{data: updated_data, updated: updated} =
original_data
|> update_content_fields(new_data)
if not updated do
updated_data
else
%{updated_object: updated_data} =
updated_data
|> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false)
updated_data
|> Map.put("updated", date)
end
end
# This calculates the data of the new Object from an Update.
# new_data's formerRepresentations is considered.
def make_new_object_data_from_update_object(original_data, new_data) do
update_is_reasonable =
with {_, updated} when not is_nil(updated) <- {:cur_updated, new_data["updated"]},
{_, {:ok, updated_time, _}} <- {:cur_updated, DateTime.from_iso8601(updated)},
{_, last_updated} when not is_nil(last_updated) <-
{:last_updated, original_data["updated"] || original_data["published"]},
{_, {:ok, last_updated_time, _}} <-
{:last_updated, DateTime.from_iso8601(last_updated)},
:gt <- DateTime.compare(updated_time, last_updated_time) do
:update_everything
else
# only allow poll updates
{:cur_updated, _} -> :no_content_update
:eq -> :no_content_update
# allow all updates
{:last_updated, _} -> :update_everything
# allow no updates
_ -> false
end
%{
updated_object: updated_data,
used_history_in_new_object?: used_history_in_new_object?,
updated: updated
} =
if update_is_reasonable == :update_everything do
%{data: updated_data, updated: updated} =
original_data
|> update_content_fields(new_data)
updated_data
|> maybe_update_history(original_data,
updated: updated,
use_history_in_new_object?: true,
new_data: new_data
)
|> Map.put(:updated, updated)
else
%{
updated_object: original_data,
used_history_in_new_object?: false,
updated: false
}
end
updated_data =
if update_is_reasonable != false do
updated_data
|> maybe_update_poll(new_data)
else
updated_data
end
%{
updated_data: updated_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
}
end
def for_each_history_item(%{"orderedItems" => items} = history, _object, fun) do
new_items =
Enum.map(items, fun)
|> Enum.reduce_while(
{:ok, []},
fn
{:ok, item}, {:ok, acc} -> {:cont, {:ok, acc ++ [item]}}
e, _acc -> {:halt, e}
end
)
case new_items do
{:ok, items} -> {:ok, Map.put(history, "orderedItems", items)}
e -> e
end
end
def for_each_history_item(history, _, _) do
{:ok, history}
end
def do_with_history(object, fun) do
with history <- object["formerRepresentations"],
object <- Map.drop(object, ["formerRepresentations"]),
{_, {:ok, object}} <- {:main_body, fun.(object)},
{_, {:ok, history}} <- {:history_items, for_each_history_item(history, object, fun)} do
object =
if history do
Map.put(object, "formerRepresentations", history)
else
object
end
{:ok, object}
else
{:main_body, e} -> e
{:history_items, e} -> e
end
end
defp maybe_touch_changeset(changeset, true) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_touch_changeset(changeset, _), do: changeset
def do_update_and_invalidate_cache(orig_object, updated_object, touch_changeset? \\ false) do
orig_object_ap_id = updated_object["id"]
orig_object_data = orig_object.data
%{
updated_data: updated_object_data,
updated: updated,
used_history_in_new_object?: used_history_in_new_object?
} = make_new_object_data_from_update_object(orig_object_data, updated_object)
changeset =
orig_object
|> Repo.preload(:hashtags)
|> Object.change(%{data: updated_object_data})
|> maybe_touch_changeset(touch_changeset?)
with {:ok, new_object} <- Repo.update(changeset),
{:ok, _} <- Object.invalid_object_cache(new_object),
{:ok, _} <- Object.set_cache(new_object),
# The metadata/utils.ex uses the object id for the cache.
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(new_object.id) do
if used_history_in_new_object? do
with create_activity when not is_nil(create_activity) <-
Pleroma.Activity.get_create_by_object_ap_id(orig_object_ap_id),
{:ok, _} <- Pleroma.Activity.HTML.invalidate_cache_for(create_activity.id) do
nil
else
_ -> nil
end
end
{:ok, new_object, updated}
end
end
end

49
lib/pleroma/prom_ex.ex Normal file
View file

@ -0,0 +1,49 @@
defmodule Pleroma.PromEx do
use PromEx, otp_app: :pleroma
alias PromEx.Plugins
@impl true
def plugins do
[
# PromEx built in plugins
Plugins.Application,
Plugins.Beam,
{Plugins.Phoenix, router: Pleroma.Web.Router, endpoint: Pleroma.Web.Endpoint},
Plugins.Ecto,
Plugins.Oban
# Plugins.PhoenixLiveView,
# Plugins.Absinthe,
# Plugins.Broadway,
# Add your own PromEx metrics plugins
# Pleroma.Users.PromExPlugin
]
end
@impl true
def dashboard_assigns do
[
datasource_id: Pleroma.Config.get([Pleroma.PromEx, :datasource]),
default_selected_interval: "30s"
]
end
@impl true
def dashboards do
[
# PromEx built in Grafana dashboards
{:prom_ex, "application.json"},
{:prom_ex, "beam.json"},
{:prom_ex, "phoenix.json"},
{:prom_ex, "ecto.json"},
{:prom_ex, "oban.json"}
# {:prom_ex, "phoenix_live_view.json"},
# {:prom_ex, "absinthe.json"},
# {:prom_ex, "broadway.json"},
# Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
# {:pleroma, "/grafana_dashboards/user_metrics.json"}
]
end
end

View file

@ -11,8 +11,6 @@ defmodule Pleroma.Repo do
import Ecto.Query
require Logger
defmodule Instrumenter, do: use(Prometheus.EctoInstrumenter)
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.

View file

@ -192,7 +192,7 @@ defmodule Pleroma.ReverseProxy do
halt(conn)
{:error, error, conn} ->
Logger.warn(
Logger.warning(
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
)

View file

@ -7,7 +7,6 @@ defmodule Pleroma.ReverseProxy.Client.Hackney do
@impl true
def request(method, url, headers, body, opts \\ []) do
opts = Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
:hackney.request(method, url, headers, body, opts)
end

View file

@ -6,7 +6,6 @@ defmodule Pleroma.ScheduledActivity do
use Ecto.Schema
alias Ecto.Multi
alias Pleroma.Config
alias Pleroma.Repo
alias Pleroma.ScheduledActivity
alias Pleroma.User
@ -20,6 +19,8 @@ defmodule Pleroma.ScheduledActivity do
@min_offset :timer.minutes(5)
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
schema "scheduled_activities" do
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
field(:scheduled_at, :naive_datetime)
@ -40,7 +41,11 @@ defmodule Pleroma.ScheduledActivity do
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
)
when is_list(media_ids) do
media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids})
media_attachments =
Utils.attachments_from_ids(
%{media_ids: media_ids},
User.get_cached_by_id(changeset.data.user_id)
)
params =
params
@ -83,7 +88,7 @@ defmodule Pleroma.ScheduledActivity do
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|> select([sa], count(sa.id))
|> Repo.one()
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
|> Kernel.>=(@config_impl.get([ScheduledActivity, :daily_user_limit]))
end
def exceeds_total_user_limit?(user_id) do
@ -91,7 +96,7 @@ defmodule Pleroma.ScheduledActivity do
|> where(user_id: ^user_id)
|> select([sa], count(sa.id))
|> Repo.one()
|> Kernel.>=(Config.get([ScheduledActivity, :total_user_limit]))
|> Kernel.>=(@config_impl.get([ScheduledActivity, :total_user_limit]))
end
def far_enough?(scheduled_at) when is_binary(scheduled_at) do
@ -119,7 +124,7 @@ defmodule Pleroma.ScheduledActivity do
def create(%User{} = user, attrs) do
Multi.new()
|> Multi.insert(:scheduled_activity, new(user, attrs))
|> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|> maybe_add_jobs(@config_impl.get([ScheduledActivity, :enabled]))
|> Repo.transaction()
|> transaction_response
end

17
lib/pleroma/search.ex Normal file
View file

@ -0,0 +1,17 @@
defmodule Pleroma.Search do
alias Pleroma.Workers.SearchIndexingWorker
def add_to_index(%Pleroma.Activity{id: activity_id}) do
SearchIndexingWorker.enqueue("add_to_index", %{"activity" => activity_id})
end
def remove_from_index(%Pleroma.Object{id: object_id}) do
SearchIndexingWorker.enqueue("remove_from_index", %{"object" => object_id})
end
def search(query, options) do
search_module = Pleroma.Config.get([Pleroma.Search, :module], Pleroma.Activity)
search_module.search(options[:for_user], query, options)
end
end

View file

@ -1,9 +1,10 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Activity.Search do
defmodule Pleroma.Search.DatabaseSearch do
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Object.Fetcher
alias Pleroma.Pagination
alias Pleroma.User
@ -13,8 +14,11 @@ defmodule Pleroma.Activity.Search do
import Ecto.Query
@behaviour Pleroma.Search.SearchBackend
@impl true
def search(user, search_query, options \\ []) do
index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
index_type = if Config.get([:database, :rum_enabled]), do: :rum, else: :gin
limit = Enum.min([Keyword.get(options, :limit), 40])
offset = Keyword.get(options, :offset, 0)
author = Keyword.get(options, :author)
@ -30,7 +34,7 @@ defmodule Pleroma.Activity.Search do
Activity
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> restrict_public()
|> restrict_public(user)
|> query_with(index_type, search_query, search_function)
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
@ -45,6 +49,12 @@ defmodule Pleroma.Activity.Search do
end
end
@impl true
def add_to_index(_activity), do: :ok
@impl true
def remove_from_index(_object), do: :ok
def maybe_restrict_author(query, %User{} = author) do
Activity.Queries.by_author(query, author)
end
@ -57,7 +67,19 @@ defmodule Pleroma.Activity.Search do
def maybe_restrict_blocked(query, _), do: query
defp restrict_public(q) do
defp restrict_public(q, user) when not is_nil(user) do
intended_recipients = [
Pleroma.Constants.as_public(),
Pleroma.Web.ActivityPub.Utils.as_local_public()
]
from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data),
where: fragment("? && ?", ^intended_recipients, a.recipients)
)
end
defp restrict_public(q, _user) do
from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data),
where: ^Pleroma.Constants.as_public() in a.recipients
@ -124,8 +146,8 @@ defmodule Pleroma.Activity.Search do
)
end
defp maybe_restrict_local(q, user) do
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
def maybe_restrict_local(q, user) do
limit = Config.get([:instance, :limit_to_local_content], :unauthenticated)
case {limit, user} do
{:all, _} -> restrict_local(q)
@ -137,7 +159,7 @@ defmodule Pleroma.Activity.Search do
defp restrict_local(q), do: where(q, local: true)
defp maybe_fetch(activities, user, search_query) do
def maybe_fetch(activities, user, search_query) do
with true <- Regex.match?(~r/https?:/, search_query),
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),

View file

@ -0,0 +1,181 @@
defmodule Pleroma.Search.Meilisearch do
require Logger
require Pleroma.Constants
alias Pleroma.Activity
alias Pleroma.Config.Getting, as: Config
import Pleroma.Search.DatabaseSearch
import Ecto.Query
@behaviour Pleroma.Search.SearchBackend
defp meili_headers do
private_key = Config.get([Pleroma.Search.Meilisearch, :private_key])
[{"Content-Type", "application/json"}] ++
if is_nil(private_key), do: [], else: [{"Authorization", "Bearer #{private_key}"}]
end
def meili_get(path) do
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
result =
Pleroma.HTTP.get(
Path.join(endpoint, path),
meili_headers()
)
with {:ok, res} <- result do
{:ok, Jason.decode!(res.body)}
end
end
def meili_post(path, params) do
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
result =
Pleroma.HTTP.post(
Path.join(endpoint, path),
Jason.encode!(params),
meili_headers()
)
with {:ok, res} <- result do
{:ok, Jason.decode!(res.body)}
end
end
def meili_put(path, params) do
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
result =
Pleroma.HTTP.request(
:put,
Path.join(endpoint, path),
Jason.encode!(params),
meili_headers(),
[]
)
with {:ok, res} <- result do
{:ok, Jason.decode!(res.body)}
end
end
def meili_delete(path) do
endpoint = Config.get([Pleroma.Search.Meilisearch, :url])
with {:ok, _} <-
Pleroma.HTTP.request(
:delete,
Path.join(endpoint, path),
"",
meili_headers(),
[]
) do
:ok
else
_ -> {:error, "Could not remove from index"}
end
end
@impl true
def search(user, query, options \\ []) do
limit = Enum.min([Keyword.get(options, :limit), 40])
offset = Keyword.get(options, :offset, 0)
author = Keyword.get(options, :author)
res =
meili_post(
"/indexes/objects/search",
%{q: query, offset: offset, limit: limit}
)
with {:ok, result} <- res do
hits = result["hits"] |> Enum.map(& &1["ap"])
try do
hits
|> Activity.create_by_object_ap_id()
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)
|> maybe_fetch(user, query)
|> order_by([object: obj], desc: obj.data["published"])
|> Pleroma.Repo.all()
rescue
_ -> maybe_fetch([], user, query)
end
end
end
def object_to_search_data(object) do
# Only index public or unlisted Notes
if not is_nil(object) and object.data["type"] == "Note" and
not is_nil(object.data["content"]) and
(Pleroma.Constants.as_public() in object.data["to"] or
Pleroma.Constants.as_public() in object.data["cc"]) and
object.data["content"] not in ["", "."] do
data = object.data
content_str =
case data["content"] do
[nil | rest] -> to_string(rest)
str -> str
end
content =
with {:ok, scrubbed} <-
FastSanitize.Sanitizer.scrub(content_str, Pleroma.HTML.Scrubber.SearchIndexing),
trimmed <- String.trim(scrubbed) do
trimmed
end
# Make sure we have a non-empty string
if content != "" do
{:ok, published, _} = DateTime.from_iso8601(data["published"])
%{
id: object.id,
content: content,
ap: data["id"],
published: published |> DateTime.to_unix()
}
end
end
end
@impl true
def add_to_index(activity) do
maybe_search_data = object_to_search_data(activity.object)
if activity.data["type"] == "Create" and maybe_search_data do
result =
meili_put(
"/indexes/objects/documents",
[maybe_search_data]
)
with {:ok, %{"status" => "enqueued"}} <- result do
# Added successfully
:ok
else
_ ->
# There was an error, report it
Logger.error("Failed to add activity #{activity.id} to index: #{inspect(result)}")
{:error, result}
end
else
# The post isn't something we can search, that's ok
:ok
end
end
@impl true
def remove_from_index(object) do
meili_delete("/indexes/objects/documents/#{object.id}")
end
end

View file

@ -0,0 +1,24 @@
defmodule Pleroma.Search.SearchBackend do
@doc """
Search statuses with a query, restricting to only those the user should have access to.
"""
@callback search(user :: Pleroma.User.t(), query :: String.t(), options :: [any()]) :: [
Pleroma.Activity.t()
]
@doc """
Add the object associated with the activity to the search index.
The whole activity is passed, to allow filtering on things such as scope.
"""
@callback add_to_index(activity :: Pleroma.Activity.t()) :: :ok | {:error, any()}
@doc """
Remove the object from the index.
Just the object, as opposed to the whole activity, is passed, since the object
is what contains the actual content and there is no need for fitlering when removing
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: {:ok, any()} | {:error, any()}
end

View file

@ -10,17 +10,14 @@ defmodule Pleroma.Signature do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
@known_suffixes ["/publickey", "/main-key"]
def key_id_to_actor_id(key_id) do
uri =
URI.parse(key_id)
key_id
|> URI.parse()
|> Map.put(:fragment, nil)
uri =
if not is_nil(uri.path) and String.ends_with?(uri.path, "/publickey") do
Map.put(uri, :path, String.replace(uri.path, "/publickey", ""))
else
uri
end
|> remove_suffix(@known_suffixes)
maybe_ap_id = URI.to_string(uri)
@ -36,6 +33,16 @@ defmodule Pleroma.Signature do
end
end
defp remove_suffix(uri, [test | rest]) do
if not is_nil(uri.path) and String.ends_with?(uri.path, test) do
Map.put(uri, :path, String.replace(uri.path, test, ""))
else
remove_suffix(uri, rest)
end
end
defp remove_suffix(uri, []), do: uri
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
@ -59,9 +66,8 @@ defmodule Pleroma.Signature do
end
end
def sign(%User{} = user, headers) do
with {:ok, %{keys: keys}} <- User.ensure_keys_present(user),
{:ok, private_key, _} <- Keys.keys_from_pem(keys) do
def sign(%User{keys: keys} = user, headers) do
with {:ok, private_key, _} <- Keys.keys_from_pem(keys) do
HTTPSignatures.sign(private_key, user.ap_id <> "#main-key", headers)
end
end

View file

@ -70,7 +70,7 @@ defmodule Pleroma.Telemetry.Logger do
%{key: key},
_
) do
Logger.warn(fn ->
Logger.warning(fn ->
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
end)
end

View file

@ -34,8 +34,8 @@ defmodule Pleroma.Upload do
"""
alias Ecto.UUID
alias Pleroma.Config
alias Pleroma.Maps
alias Pleroma.Web.ActivityPub.Utils
require Logger
@type source ::
@ -60,12 +60,25 @@ defmodule Pleroma.Upload do
width: integer(),
height: integer(),
blurhash: String.t(),
description: String.t(),
path: String.t()
}
defstruct [:id, :name, :tempfile, :content_type, :width, :height, :blurhash, :path]
defstruct [
:id,
:name,
:tempfile,
:content_type,
:width,
:height,
:blurhash,
:description,
:path
]
defp get_description(opts, upload) do
case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
defp get_description(upload) do
case {upload.description, Pleroma.Config.get([Pleroma.Upload, :default_description])} do
{description, _} when is_binary(description) -> description
{_, :filename} -> upload.name
{_, str} when is_binary(str) -> str
@ -81,13 +94,14 @@ defmodule Pleroma.Upload do
with {:ok, upload} <- prepare_upload(upload, opts),
upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
{:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
description = get_description(opts, upload),
description = get_description(upload),
{_, true} <-
{:description_limit,
String.length(description) <= Pleroma.Config.get([:instance, :description_limit])},
{:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
{:ok,
%{
"id" => Utils.generate_object_id(),
"type" => opts.activity_type,
"mediaType" => upload.content_type,
"url" => [
@ -152,7 +166,8 @@ defmodule Pleroma.Upload do
id: UUID.generate(),
name: file.filename,
tempfile: file.path,
content_type: file.content_type
content_type: file.content_type,
description: opts.description
}}
end
end
@ -172,7 +187,8 @@ defmodule Pleroma.Upload do
id: UUID.generate(),
name: hash <> "." <> ext,
tempfile: tmp_path,
content_type: content_type
content_type: content_type,
description: opts.description
}}
end
end
@ -229,18 +245,18 @@ defmodule Pleroma.Upload do
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
def base_url do
uploader = Config.get([Pleroma.Upload, :uploader])
upload_base_url = Config.get([Pleroma.Upload, :base_url])
public_endpoint = Config.get([uploader, :public_endpoint])
uploader = @config_impl.get([Pleroma.Upload, :uploader])
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
public_endpoint = @config_impl.get([uploader, :public_endpoint])
case uploader do
Pleroma.Uploaders.Local ->
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
Pleroma.Uploaders.S3 ->
bucket = Config.get([Pleroma.Uploaders.S3, :bucket])
truncated_namespace = Config.get([Pleroma.Uploaders.S3, :truncated_namespace])
namespace = Config.get([Pleroma.Uploaders.S3, :bucket_namespace])
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
truncated_namespace = @config_impl.get([Pleroma.Uploaders.S3, :truncated_namespace])
namespace = @config_impl.get([Pleroma.Uploaders.S3, :bucket_namespace])
bucket_with_namespace =
cond do

View file

@ -38,9 +38,9 @@ defmodule Pleroma.Upload.Filter do
{:ok, :noop} ->
filter(rest, upload)
error ->
Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(error)}")
error
{:error, e} ->
Logger.error("#{__MODULE__}: Filter #{filter} failed: #{inspect(e)}")
{:error, e}
end
end
end

View file

@ -8,27 +8,28 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
"""
require Logger
alias Vix.Vips.Image
alias Vix.Vips.Operation
@behaviour Pleroma.Upload.Filter
@spec filter(Pleroma.Upload.t()) ::
{:ok, :filtered, Pleroma.Upload.t()} | {:ok, :noop} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) do
try do
image =
file
|> Mogrify.open()
|> Mogrify.verbose()
{:ok, image} = Image.new_from_file(file)
{width, height} = {Image.width(image), Image.height(image)}
upload =
upload
|> Map.put(:width, image.width)
|> Map.put(:height, image.height)
|> Map.put(:blurhash, get_blurhash(file))
|> Map.put(:width, width)
|> Map.put(:height, height)
|> Map.put(:blurhash, get_blurhash(image))
{:ok, :filtered, upload}
rescue
e in ErlangError ->
Logger.warn("#{__MODULE__}: #{inspect(e)}")
Logger.warning("#{__MODULE__}: #{inspect(e)}")
{:ok, :noop}
end
end
@ -45,7 +46,7 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
{:ok, :filtered, upload}
rescue
e in ErlangError ->
Logger.warn("#{__MODULE__}: #{inspect(e)}")
Logger.warning("#{__MODULE__}: #{inspect(e)}")
{:ok, :noop}
end
end
@ -53,7 +54,7 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
def filter(_), do: {:ok, :noop}
defp get_blurhash(file) do
with {:ok, blurhash} <- :eblurhash.magick(file) do
with {:ok, blurhash} <- vips_blurhash(file) do
blurhash
else
_ -> nil
@ -77,7 +78,28 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
%{width: width, height: height}
else
nil -> {:error, {:ffprobe, :command_not_found}}
{:error, _} = error -> error
error -> {:error, error}
end
end
defp vips_blurhash(%Vix.Vips.Image{} = image) do
with {:ok, resized_image} <- Operation.thumbnail_image(image, 100),
{height, width} <- {Image.height(resized_image), Image.width(resized_image)},
max <- max(height, width),
{x, y} <- {max(round(width * 5 / max), 1), max(round(height * 5 / max), 1)} do
{:ok, rgb} =
if Image.has_alpha?(resized_image) do
# remove alpha channel
resized_image
|> Operation.extract_band!(0, n: 3)
|> Image.write_to_binary()
else
Image.write_to_binary(resized_image)
end
Blurhash.encode(rgb, width, height, x, y)
else
_ -> nil
end
end
end

View file

@ -0,0 +1,50 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.Exiftool.ReadDescription do
@moduledoc """
Gets a valid description from the related EXIF tags and provides them in the response if no description is provided yet.
It will first check ImageDescription, when that doesn't probide a valid description, it will check iptc:Caption-Abstract.
A valid description means the fields are filled in and not too long (see `:instance, :description_limit`).
"""
@behaviour Pleroma.Upload.Filter
def filter(%Pleroma.Upload{description: description})
when is_binary(description),
do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file} = upload),
do: {:ok, :filtered, upload |> Map.put(:description, read_description_from_exif_data(file))}
def filter(_, _), do: {:ok, :noop}
defp read_description_from_exif_data(file) do
nil
|> read_when_empty(file, "-ImageDescription")
|> read_when_empty(file, "-iptc:Caption-Abstract")
end
defp read_when_empty(current_description, _, _) when is_binary(current_description),
do: current_description
defp read_when_empty(_, file, tag) do
try do
{tag_content, 0} =
System.cmd("exiftool", ["-b", "-s3", tag, file],
stderr_to_stdout: false,
parallelism: true
)
tag_content = String.trim(tag_content)
if tag_content != "" and
String.length(tag_content) <=
Pleroma.Config.get([:instance, :description_limit]),
do: tag_content,
else: nil
rescue
_ in ErlangError -> nil
end
end
end

View file

@ -2,7 +2,7 @@
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.Exiftool do
defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
@moduledoc """
Strips GPS related EXIF tags and overwrites the file in place.
Also strips or replaces filesystem metadata e.g., timestamps.
@ -14,6 +14,7 @@ defmodule Pleroma.Upload.Filter.Exiftool do
# Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/svg" <> _}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do

View file

@ -0,0 +1,20 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.OnlyMedia do
@behaviour Pleroma.Upload.Filter
alias Pleroma.Upload
def filter(%Upload{content_type: content_type}) do
[type, _subtype] = String.split(content_type, "/")
if type in ["image", "video", "audio"] do
{:ok, :noop}
else
{:error, "Disallowed content-type: #{content_type}"}
end
end
def filter(_), do: {:ok, :noop}
end

View file

@ -6,7 +6,8 @@ defmodule Pleroma.Uploaders.S3 do
@behaviour Pleroma.Uploaders.Uploader
require Logger
alias Pleroma.Config
@ex_aws_impl Application.compile_env(:pleroma, [__MODULE__, :ex_aws_impl], ExAws)
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
# The file name is re-encoded with S3's constraints here to comply with previous
# links with less strict filenames
@ -22,7 +23,7 @@ defmodule Pleroma.Uploaders.S3 do
@impl true
def put_file(%Pleroma.Upload{} = upload) do
config = Config.get([__MODULE__])
config = @config_impl.get([__MODULE__])
bucket = Keyword.get(config, :bucket)
streaming = Keyword.get(config, :streaming_enabled)
@ -56,7 +57,7 @@ defmodule Pleroma.Uploaders.S3 do
])
end
case ExAws.request(op) do
case @ex_aws_impl.request(op) do
{:ok, _} ->
{:ok, {:file, s3_name}}
@ -69,9 +70,9 @@ defmodule Pleroma.Uploaders.S3 do
@impl true
def delete_file(file) do
[__MODULE__, :bucket]
|> Config.get()
|> @config_impl.get()
|> ExAws.S3.delete_object(file)
|> ExAws.request()
|> @ex_aws_impl.request()
|> case do
{:ok, %{status_code: 204}} -> :ok
error -> {:error, inspect(error)}
@ -83,3 +84,7 @@ defmodule Pleroma.Uploaders.S3 do
String.replace(name, @regex, "-")
end
end
defmodule Pleroma.Uploaders.S3.ExAwsAPI do
@callback request(op :: ExAws.Operation.t()) :: {:ok, ExAws.Operation.t()} | {:error, term()}
end

View file

@ -124,7 +124,6 @@ defmodule Pleroma.User do
field(:domain_blocks, {:array, :string}, default: [])
field(:is_active, :boolean, default: true)
field(:no_rich_text, :boolean, default: false)
field(:ap_enabled, :boolean, default: false)
field(:is_moderator, :boolean, default: false)
field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true)
@ -326,7 +325,7 @@ defmodule Pleroma.User do
end
def visible_for(%User{} = user, for_user) do
if superuser?(for_user) do
if privileged?(for_user, :users_manage_activation_state) do
:visible
else
visible_account_status(user)
@ -353,10 +352,45 @@ defmodule Pleroma.User do
end
end
@spec superuser?(User.t()) :: boolean()
def superuser?(%User{local: true, is_admin: true}), do: true
def superuser?(%User{local: true, is_moderator: true}), do: true
def superuser?(_), do: false
@spec privileged?(User.t(), atom()) :: boolean()
def privileged?(%User{is_admin: false, is_moderator: false}, _), do: false
def privileged?(
%User{local: true, is_admin: is_admin, is_moderator: is_moderator},
privilege_tag
),
do:
privileged_for?(privilege_tag, is_admin, :admin_privileges) or
privileged_for?(privilege_tag, is_moderator, :moderator_privileges)
def privileged?(_, _), do: false
defp privileged_for?(privilege_tag, true, config_role_key),
do: privilege_tag in Config.get([:instance, config_role_key])
defp privileged_for?(_, _, _), do: false
@spec privileges(User.t()) :: [atom()]
def privileges(%User{local: false}) do
[]
end
def privileges(%User{is_moderator: false, is_admin: false}) do
[]
end
def privileges(%User{local: true, is_moderator: true, is_admin: true}) do
(Config.get([:instance, :moderator_privileges]) ++ Config.get([:instance, :admin_privileges]))
|> Enum.uniq()
end
def privileges(%User{local: true, is_moderator: true, is_admin: false}) do
Config.get([:instance, :moderator_privileges])
end
def privileges(%User{local: true, is_moderator: false, is_admin: true}) do
Config.get([:instance, :admin_privileges])
end
@spec invisible?(User.t()) :: boolean()
def invisible?(%User{invisible: true}), do: true
@ -453,7 +487,6 @@ defmodule Pleroma.User do
:nickname,
:public_key,
:avatar,
:ap_enabled,
:banner,
:is_locked,
:last_refreshed_at,
@ -611,7 +644,13 @@ defmodule Pleroma.User do
{:ok, new_value} <- value_function.(value) do
put_change(changeset, map_field, new_value)
else
_ -> changeset
{:error, :file_too_large} ->
Ecto.Changeset.validate_change(changeset, map_field, fn map_field, _value ->
[{map_field, "file is too large"}]
end)
_ ->
changeset
end
end
@ -706,11 +745,12 @@ defmodule Pleroma.User do
])
|> validate_required([:name, :nickname])
|> unique_constraint(:nickname)
|> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
|> validate_not_restricted_nickname(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_and_featured_address()
|> put_private_key()
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
@ -754,17 +794,9 @@ defmodule Pleroma.User do
|> validate_confirmation(:password)
|> unique_constraint(:email)
|> validate_format(:email, @email_regex)
|> validate_change(:email, fn :email, email ->
valid? =
Config.get([User, :email_blacklist])
|> Enum.all?(fn blacklisted_domain ->
!String.ends_with?(email, ["@" <> blacklisted_domain, "." <> blacklisted_domain])
end)
if valid?, do: [], else: [email: "Invalid email"]
end)
|> validate_email_not_in_blacklisted_domain(:email)
|> unique_constraint(:nickname)
|> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
|> validate_not_restricted_nickname(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, min: 1, max: name_limit)
@ -776,6 +808,36 @@ defmodule Pleroma.User do
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_and_featured_address()
|> put_private_key()
end
def validate_not_restricted_nickname(changeset, field) do
validate_change(changeset, field, fn _, value ->
valid? =
Config.get([User, :restricted_nicknames])
|> Enum.all?(fn restricted_nickname ->
String.downcase(value) != String.downcase(restricted_nickname)
end)
if valid?, do: [], else: [nickname: "Invalid nickname"]
end)
end
def validate_email_not_in_blacklisted_domain(changeset, field) do
validate_change(changeset, field, fn _, value ->
valid? =
Config.get([User, :email_blacklist])
|> Enum.all?(fn blacklisted_domain ->
blacklisted_domain_downcase = String.downcase(blacklisted_domain)
!String.ends_with?(String.downcase(value), [
"@" <> blacklisted_domain_downcase,
"." <> blacklisted_domain_downcase
])
end)
if valid?, do: [], else: [email: "Invalid email"]
end)
end
def maybe_validate_required_email(changeset, true), do: changeset
@ -825,6 +887,11 @@ defmodule Pleroma.User do
|> put_change(:featured_address, featured)
end
defp put_private_key(changeset) do
{:ok, pem} = Keys.generate_rsa_pem()
put_change(changeset, :keys, pem)
end
defp autofollow_users(user) do
candidates = Config.get([:instance, :autofollowed_nicknames])
@ -877,7 +944,7 @@ defmodule Pleroma.User do
end
end
defp send_user_approval_email(user) do
defp send_user_approval_email(%User{email: email} = user) when is_binary(email) do
user
|> Pleroma.Emails.UserEmail.approval_pending_email()
|> Pleroma.Emails.Mailer.deliver_async()
@ -885,6 +952,10 @@ defmodule Pleroma.User do
{:ok, :enqueued}
end
defp send_user_approval_email(_user) do
{:ok, :skipped}
end
defp send_admin_approval_emails(user) do
all_superusers()
|> Enum.filter(fn user -> not is_nil(user.email) end)
@ -988,11 +1059,7 @@ defmodule Pleroma.User do
end
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
if not ap_enabled?(followed) do
follow(follower, followed)
else
{:ok, follower, followed}
end
{:ok, follower, followed}
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@ -1129,24 +1196,10 @@ defmodule Pleroma.User do
|> update_and_set_cache()
end
def update_and_set_cache(%{data: %Pleroma.User{} = user} = changeset) do
was_superuser_before_update = User.superuser?(user)
def update_and_set_cache(changeset) do
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
set_cache(user)
end
|> maybe_remove_report_notifications(was_superuser_before_update)
end
defp maybe_remove_report_notifications({:ok, %Pleroma.User{} = user} = result, true) do
if not User.superuser?(user),
do: user |> Notification.destroy_multiple_from_types(["pleroma:report"])
result
end
defp maybe_remove_report_notifications(result, _) do
result
end
def get_user_friends_ap_ids(user) do
@ -1459,17 +1512,30 @@ defmodule Pleroma.User do
{:ok, list(UserRelationship.t())} | {:error, String.t()}
def mute(%User{} = muter, %User{} = mutee, params \\ %{}) do
notifications? = Map.get(params, :notifications, true)
expires_in = Map.get(params, :expires_in, 0)
duration = Map.get(params, :duration, 0)
with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee),
expires_at =
if duration > 0 do
DateTime.utc_now()
|> DateTime.add(duration)
else
nil
end
with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee, expires_at),
{:ok, user_notification_mute} <-
(notifications? && UserRelationship.create_notification_mute(muter, mutee)) ||
(notifications? &&
UserRelationship.create_notification_mute(
muter,
mutee,
expires_at
)) ||
{:ok, nil} do
if expires_in > 0 do
if duration > 0 do
Pleroma.Workers.MuteExpireWorker.enqueue(
"unmute_user",
%{"muter_id" => muter.id, "mutee_id" => mutee.id},
schedule_in: expires_in
scheduled_at: expires_at
)
end
@ -1494,7 +1560,7 @@ defmodule Pleroma.User do
unmute(muter, mutee)
else
{who, result} = error ->
Logger.warn(
Logger.warning(
"User.unmute/2 failed. #{who}: #{result}, muter_id: #{muter_id}, mutee_id: #{mutee_id}"
)
@ -1540,13 +1606,19 @@ defmodule Pleroma.User do
blocker
end
# clear any requested follows as well
# clear any requested follows from both sides as well
blocked =
case CommonAPI.reject_follow_request(blocked, blocker) do
{:ok, %User{} = updated_blocked} -> updated_blocked
nil -> blocked
end
blocker =
case CommonAPI.reject_follow_request(blocker, blocked) do
{:ok, %User{} = updated_blocker} -> updated_blocker
nil -> blocker
end
unsubscribe(blocked, blocker)
unfollowing_blocked = Config.get([:activitypub, :unfollow_blocked], true)
@ -1820,7 +1892,6 @@ defmodule Pleroma.User do
confirmation_token: nil,
domain_blocks: [],
is_active: false,
ap_enabled: false,
is_moderator: false,
is_admin: false,
mascot: nil,
@ -2046,6 +2117,7 @@ defmodule Pleroma.User do
follower_address: uri <> "/followers"
}
|> change
|> put_private_key()
|> unique_constraint(:nickname)
|> Repo.insert()
|> set_cache()
@ -2064,7 +2136,7 @@ defmodule Pleroma.User do
def public_key(_), do: {:error, "key not found"}
def get_public_key_for_ap_id(ap_id) do
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
with %User{} = user <- get_cached_by_ap_id(ap_id),
{:ok, public_key} <- public_key(user) do
{:ok, public_key}
else
@ -2072,13 +2144,10 @@ defmodule Pleroma.User do
end
end
def ap_enabled?(%User{local: true}), do: true
def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
def ap_enabled?(_), do: false
@doc "Gets or fetch a user by uri or nickname."
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch("https://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
# wait a period of time and return newest version of the User structs
@ -2206,6 +2275,11 @@ defmodule Pleroma.User do
|> Repo.all()
end
@spec all_users_with_privilege(atom()) :: [User.t()]
def all_users_with_privilege(privilege) do
User.Query.build(%{is_privileged: privilege}) |> Repo.all()
end
def muting_reblogs?(%User{} = user, %User{} = target) do
UserRelationship.reblog_mute_exists?(user, target)
end
@ -2311,17 +2385,6 @@ defmodule Pleroma.User do
}
end
def ensure_keys_present(%{keys: keys} = user) when not is_nil(keys), do: {:ok, user}
def ensure_keys_present(%User{} = user) do
with {:ok, pem} <- Keys.generate_rsa_pem() do
user
|> cast(%{keys: pem}, [:keys])
|> validate_required([:keys])
|> update_and_set_cache()
end
end
def get_ap_ids_by_nicknames(nicknames) do
from(u in User,
where: u.nickname in ^nicknames,
@ -2364,6 +2427,38 @@ defmodule Pleroma.User do
|> update_and_set_cache()
end
def alias_users(user) do
user.also_known_as
|> Enum.map(&User.get_cached_by_ap_id/1)
|> Enum.filter(fn user -> user != nil end)
end
def add_alias(user, new_alias_user) do
current_aliases = user.also_known_as || []
new_alias_ap_id = new_alias_user.ap_id
if new_alias_ap_id in current_aliases do
{:ok, user}
else
user
|> cast(%{also_known_as: current_aliases ++ [new_alias_ap_id]}, [:also_known_as])
|> update_and_set_cache()
end
end
def delete_alias(user, alias_user) do
current_aliases = user.also_known_as || []
alias_ap_id = alias_user.ap_id
if alias_ap_id in current_aliases do
user
|> cast(%{also_known_as: current_aliases -- [alias_ap_id]}, [:also_known_as])
|> update_and_set_cache()
else
{:error, :no_such_alias}
end
end
# Internal function; public one is `deactivate/2`
defp set_activation_status(user, status) do
user
@ -2586,6 +2681,8 @@ defmodule Pleroma.User do
|> update_and_set_cache()
end
def update_last_active_at(user), do: user
def active_user_count(days \\ 30) do
active_after = Timex.shift(NaiveDateTime.utc_now(), days: -days)

View file

@ -9,12 +9,14 @@ defmodule Pleroma.User.Backup do
import Ecto.Query
import Pleroma.Web.Gettext
require Logger
require Pleroma.Constants
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.User.Backup.State
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.UserView
@ -25,16 +27,18 @@ defmodule Pleroma.User.Backup do
field(:file_name, :string)
field(:file_size, :integer, default: 0)
field(:processed, :boolean, default: false)
field(:state, State, default: :invalid)
field(:processed_number, :integer, default: 0)
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
timestamps()
end
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
def create(user, admin_id \\ nil) do
with :ok <- validate_email_enabled(),
:ok <- validate_user_email(user),
:ok <- validate_limit(user, admin_id),
with :ok <- validate_limit(user, admin_id),
{:ok, backup} <- user |> new() |> Repo.insert() do
BackupWorker.process(backup, admin_id)
end
@ -48,7 +52,8 @@ defmodule Pleroma.User.Backup do
%__MODULE__{
user_id: user.id,
content_type: "application/zip",
file_name: name
file_name: name,
state: :pending
}
end
@ -86,20 +91,6 @@ defmodule Pleroma.User.Backup do
end
end
defp validate_email_enabled do
if Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do
:ok
else
{:error, dgettext("errors", "Backups require enabled email")}
end
end
defp validate_user_email(%User{email: nil}) do
{:error, dgettext("errors", "Email is required")}
end
defp validate_user_email(%User{email: email}) when is_binary(email), do: :ok
def get_last(user_id) do
__MODULE__
|> where(user_id: ^user_id)
@ -125,27 +116,94 @@ defmodule Pleroma.User.Backup do
def get(id), do: Repo.get(__MODULE__, id)
def process(%__MODULE__{} = backup) do
with {:ok, zip_file} <- export(backup),
{:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- upload(backup, zip_file) do
backup
|> cast(%{file_size: size, processed: true}, [:file_size, :processed])
|> Repo.update()
defp set_state(backup, state, processed_number \\ nil) do
struct =
%{state: state}
|> Pleroma.Maps.put_if_present(:processed_number, processed_number)
backup
|> cast(struct, [:state, :processed_number])
|> Repo.update()
end
def process(
%__MODULE__{} = backup,
processor_module \\ __MODULE__.Processor
) do
set_state(backup, :running, 0)
current_pid = self()
task =
Task.Supervisor.async_nolink(
Pleroma.TaskSupervisor,
processor_module,
:do_process,
[backup, current_pid]
)
wait_backup(backup, backup.processed_number, task)
end
defp wait_backup(backup, current_processed, task) do
wait_time = @config_impl.get([__MODULE__, :process_wait_time])
receive do
{:progress, new_processed} ->
total_processed = current_processed + new_processed
set_state(backup, :running, total_processed)
wait_backup(backup, total_processed, task)
{:DOWN, _ref, _proc, _pid, reason} ->
backup = get(backup.id)
if reason != :normal do
Logger.error("Backup #{backup.id} process ended abnormally: #{inspect(reason)}")
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :exit,
details: reason
}}
else
{:ok, backup}
end
after
wait_time ->
Logger.error(
"Backup #{backup.id} timed out after no response for #{wait_time}ms, terminating"
)
Task.Supervisor.terminate_child(Pleroma.TaskSupervisor, task.pid)
{:ok, backup} = set_state(backup, :failed)
cleanup(backup)
{:error,
%{
backup: backup,
reason: :timeout
}}
end
end
@files ['actor.json', 'outbox.json', 'likes.json', 'bookmarks.json']
def export(%__MODULE__{} = backup) do
def export(%__MODULE__{} = backup, caller_pid) do
backup = Repo.preload(backup, :user)
name = String.trim_trailing(backup.file_name, ".zip")
dir = dir(name)
dir = backup_tempdir(backup)
with :ok <- File.mkdir(dir),
:ok <- actor(dir, backup.user),
:ok <- statuses(dir, backup.user),
:ok <- likes(dir, backup.user),
:ok <- bookmarks(dir, backup.user),
:ok <- actor(dir, backup.user, caller_pid),
:ok <- statuses(dir, backup.user, caller_pid),
:ok <- likes(dir, backup.user, caller_pid),
:ok <- bookmarks(dir, backup.user, caller_pid),
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
{:ok, _} <- File.rm_rf(dir) do
{:ok, to_string(zip_path)}
@ -173,11 +231,12 @@ defmodule Pleroma.User.Backup do
end
end
defp actor(dir, user) do
defp actor(dir, user, caller_pid) do
with {:ok, json} <-
UserView.render("user.json", %{user: user})
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|> Jason.encode() do
send(caller_pid, {:progress, 1})
File.write(Path.join(dir, "actor.json"), json)
end
end
@ -196,47 +255,80 @@ defmodule Pleroma.User.Backup do
)
end
defp write(query, dir, name, fun) do
defp should_report?(num, chunk_size), do: rem(num, chunk_size) == 0
defp backup_tempdir(backup) do
name = String.trim_trailing(backup.file_name, ".zip")
dir(name)
end
defp cleanup(backup) do
dir = backup_tempdir(backup)
File.rm_rf(dir)
end
defp write(query, dir, name, fun, caller_pid) do
path = Path.join(dir, "#{name}.json")
chunk_size = Pleroma.Config.get([__MODULE__, :process_chunk_size])
with {:ok, file} <- File.open(path, [:write, :utf8]),
:ok <- write_header(file, name) do
total =
query
|> Pleroma.Repo.chunk_stream(100)
|> Pleroma.Repo.chunk_stream(chunk_size, _returns_as = :one, timeout: :infinity)
|> Enum.reduce(0, fn i, acc ->
with {:ok, data} <- fun.(i),
with {:ok, data} <-
(try do
fun.(i)
rescue
e -> {:error, e}
end),
{:ok, str} <- Jason.encode(data),
:ok <- IO.write(file, str <> ",\n") do
if should_report?(acc + 1, chunk_size) do
send(caller_pid, {:progress, chunk_size})
end
acc + 1
else
_ -> acc
{:error, e} ->
Logger.warning(
"Error processing backup item: #{inspect(e)}\n The item is: #{inspect(i)}"
)
acc
_ ->
acc
end
end)
send(caller_pid, {:progress, rem(total, chunk_size)})
with :ok <- :file.pwrite(file, {:eof, -2}, "\n],\n \"totalItems\": #{total}}") do
File.close(file)
end
end
end
defp bookmarks(dir, %{id: user_id} = _user) do
defp bookmarks(dir, %{id: user_id} = _user, caller_pid) do
Bookmark
|> where(user_id: ^user_id)
|> join(:inner, [b], activity in assoc(b, :activity))
|> select([b, a], %{id: b.id, object: fragment("(?)->>'object'", a.data)})
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end)
|> write(dir, "bookmarks", fn a -> {:ok, a.object} end, caller_pid)
end
defp likes(dir, user) do
defp likes(dir, user, caller_pid) do
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> select([like], %{id: like.id, object: fragment("(?)->>'object'", like.data)})
|> write(dir, "likes", fn a -> {:ok, a.object} end)
|> write(dir, "likes", fn a -> {:ok, a.object} end, caller_pid)
end
defp statuses(dir, user) do
defp statuses(dir, user, caller_pid) do
opts =
%{}
|> Map.put(:type, ["Create", "Announce"])
@ -249,10 +341,47 @@ defmodule Pleroma.User.Backup do
]
|> Enum.concat()
|> ActivityPub.fetch_activities_query(opts)
|> write(dir, "outbox", fn a ->
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
end)
|> write(
dir,
"outbox",
fn a ->
with {:ok, activity} <- Transmogrifier.prepare_outgoing(a.data) do
{:ok, Map.delete(activity, "@context")}
end
end,
caller_pid
)
end
end
defmodule Pleroma.User.Backup.ProcessorAPI do
@callback do_process(%Pleroma.User.Backup{}, pid()) ::
{:ok, %Pleroma.User.Backup{}} | {:error, any()}
end
defmodule Pleroma.User.Backup.Processor do
@behaviour Pleroma.User.Backup.ProcessorAPI
alias Pleroma.Repo
alias Pleroma.User.Backup
import Ecto.Changeset
@impl true
def do_process(backup, current_pid) do
with {:ok, zip_file} <- Backup.export(backup, current_pid),
{:ok, %{size: size}} <- File.stat(zip_file),
{:ok, _upload} <- Backup.upload(backup, zip_file) do
backup
|> cast(
%{
file_size: size,
processed: true,
state: :complete
},
[:file_size, :processed, :state]
)
|> Repo.update()
end
end
end

View file

@ -29,6 +29,7 @@ defmodule Pleroma.User.Query do
import Ecto.Query
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
alias Pleroma.Config
alias Pleroma.FollowingRelationship
alias Pleroma.User
@ -49,6 +50,7 @@ defmodule Pleroma.User.Query do
is_suggested: boolean(),
is_discoverable: boolean(),
super_users: boolean(),
is_privileged: atom(),
invisible: boolean(),
internal: boolean(),
followers: User.t(),
@ -136,6 +138,43 @@ defmodule Pleroma.User.Query do
)
end
defp compose_query({:is_privileged, privilege}, query) do
moderator_privileged = privilege in Config.get([:instance, :moderator_privileges])
admin_privileged = privilege in Config.get([:instance, :admin_privileges])
query = compose_query({:active, true}, query)
query = compose_query({:local, true}, query)
case {admin_privileged, moderator_privileged} do
{false, false} ->
where(
query,
false
)
{true, true} ->
where(
query,
[u],
u.is_admin or u.is_moderator
)
{true, false} ->
where(
query,
[u],
u.is_admin
)
{false, true} ->
where(
query,
[u],
u.is_moderator
)
end
end
defp compose_query({:local, _}, query), do: location_query(query, true)
defp compose_query({:external, _}, query), do: location_query(query, false)

View file

@ -94,6 +94,7 @@ defmodule Pleroma.User.Search do
|> subquery()
|> order_by(desc: :search_rank)
|> maybe_restrict_local(for_user)
|> filter_deactivated_users()
end
defp select_top_users(query, top_user_ids) do
@ -166,6 +167,10 @@ defmodule Pleroma.User.Search do
from(q in query, where: q.actor_type != "Application")
end
defp filter_deactivated_users(query) do
from(q in query, where: q.is_active == true)
end
defp filter_blocked_user(query, %User{} = blocker) do
query
|> join(:left, [u], b in Pleroma.UserRelationship,

View file

@ -18,16 +18,17 @@ defmodule Pleroma.UserRelationship do
belongs_to(:source, User, type: FlakeId.Ecto.CompatType)
belongs_to(:target, User, type: FlakeId.Ecto.CompatType)
field(:relationship_type, Pleroma.UserRelationship.Type)
field(:expires_at, :utc_datetime)
timestamps(updated_at: false)
end
for relationship_type <- Keyword.keys(Pleroma.UserRelationship.Type.__enum_map__()) do
# `def create_block/2`, `def create_mute/2`, `def create_reblog_mute/2`,
# `def create_notification_mute/2`, `def create_inverse_subscription/2`,
# `def endorsement/2`
def unquote(:"create_#{relationship_type}")(source, target),
do: create(unquote(relationship_type), source, target)
# `def create_block/3`, `def create_mute/3`, `def create_reblog_mute/3`,
# `def create_notification_mute/3`, `def create_inverse_subscription/3`,
# `def endorsement/3`
def unquote(:"create_#{relationship_type}")(source, target, expires_at \\ nil),
do: create(unquote(relationship_type), source, target, expires_at)
# `def delete_block/2`, `def delete_mute/2`, `def delete_reblog_mute/2`,
# `def delete_notification_mute/2`, `def delete_inverse_subscription/2`,
@ -37,9 +38,15 @@ defmodule Pleroma.UserRelationship do
# `def block_exists?/2`, `def mute_exists?/2`, `def reblog_mute_exists?/2`,
# `def notification_mute_exists?/2`, `def inverse_subscription_exists?/2`,
# `def inverse_endorsement?/2`
# `def inverse_endorsement_exists?/2`
def unquote(:"#{relationship_type}_exists?")(source, target),
do: exists?(unquote(relationship_type), source, target)
# `def get_block_expire_date/2`, `def get_mute_expire_date/2`,
# `def get_reblog_mute_expire_date/2`, `def get_notification_mute_exists?/2`,
# `def get_inverse_subscription_expire_date/2`, `def get_inverse_endorsement_expire_date/2`
def unquote(:"get_#{relationship_type}_expire_date")(source, target),
do: get_expire_date(unquote(relationship_type), source, target)
end
def user_relationship_types, do: Keyword.keys(user_relationship_mappings())
@ -48,7 +55,7 @@ defmodule Pleroma.UserRelationship do
def changeset(%UserRelationship{} = user_relationship, params \\ %{}) do
user_relationship
|> cast(params, [:relationship_type, :source_id, :target_id])
|> cast(params, [:relationship_type, :source_id, :target_id, :expires_at])
|> validate_required([:relationship_type, :source_id, :target_id])
|> unique_constraint(:relationship_type,
name: :user_relationships_source_id_relationship_type_target_id_index
@ -62,16 +69,31 @@ defmodule Pleroma.UserRelationship do
|> Repo.exists?()
end
def create(relationship_type, %User{} = source, %User{} = target) do
def get_expire_date(relationship_type, %User{} = source, %User{} = target) do
%UserRelationship{expires_at: expires_at} =
UserRelationship
|> where(
relationship_type: ^relationship_type,
source_id: ^source.id,
target_id: ^target.id
)
|> Repo.one!()
expires_at
end
def create(relationship_type, %User{} = source, %User{} = target, expires_at \\ nil) do
%UserRelationship{}
|> changeset(%{
relationship_type: relationship_type,
source_id: source.id,
target_id: target.id
target_id: target.id,
expires_at: expires_at
})
|> Repo.insert(
on_conflict: {:replace_all_except, [:id]},
conflict_target: [:source_id, :relationship_type, :target_id]
on_conflict: {:replace_all_except, [:id, :inserted_at]},
conflict_target: [:source_id, :relationship_type, :target_id],
returning: true
)
end

View file

@ -136,7 +136,7 @@ defmodule Pleroma.Web do
namespace: Pleroma.Web
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
import Phoenix.Controller, only: [get_csrf_token: 0, view_module: 1]
import Pleroma.Web.ErrorHelpers
import Pleroma.Web.Gettext

View file

@ -96,7 +96,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp increase_replies_count_if_reply(_create_data), do: :noop
@object_types ~w[ChatMessage Question Answer Audio Video Event Article Note Page]
defp increase_quotes_count_if_quote(%{
"object" => %{"quoteUrl" => quote_ap_id} = object,
"type" => "Create"
}) do
if is_public?(object) do
Object.increase_quotes_count(quote_ap_id)
end
end
defp increase_quotes_count_if_quote(_create_data), do: :noop
@object_types ~w[ChatMessage Question Answer Audio Video Image Event Article Note Page]
@impl true
def persist(%{"type" => type} = object, meta) when type in @object_types do
with {:ok, object} <- Object.create(object) do
@ -140,6 +151,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
end)
# Add local posts to search index
if local, do: Pleroma.Search.add_to_index(activity)
{:ok, activity}
else
%Activity{} = activity ->
@ -190,7 +204,16 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
def notify_and_stream(activity) do
Notification.create_notifications(activity)
conversation = create_or_bump_conversation(activity, activity.actor)
original_activity =
case activity do
%{data: %{"type" => "Update"}, object: %{data: %{"id" => id}}} ->
Activity.get_create_by_object_ap_id_with_object(id)
_ ->
activity
end
conversation = create_or_bump_conversation(original_activity, original_activity.actor)
participations = get_participations(conversation)
stream_out(activity)
stream_out_participations(participations)
@ -256,7 +279,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
@impl true
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
when data_type in ["Create", "Announce", "Delete"] do
when data_type in ["Create", "Announce", "Delete", "Update"] do
activity
|> Topics.get_activity_topics()
|> Streamer.stream(activity)
@ -290,6 +313,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
with {:ok, activity} <- insert(create_data, local, fake),
{:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data),
_ <- increase_quotes_count_if_quote(create_data),
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
{:ok, _actor} <- update_last_status_at_if_public(actor, activity),
@ -392,11 +416,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
_ <- notify_and_stream(activity),
:ok <-
maybe_federate(stripped_activity) do
User.all_superusers()
User.all_users_with_privilege(:reports_manage_reports)
|> Enum.filter(fn user -> user.ap_id != actor end)
|> Enum.filter(fn user -> not is_nil(user.email) end)
|> Enum.each(fn superuser ->
superuser
|> Enum.each(fn privileged_user ->
privileged_user
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|> Pleroma.Emails.Mailer.deliver_async()
end)
@ -413,7 +437,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
"type" => "Move",
"actor" => origin.ap_id,
"object" => origin.ap_id,
"target" => target.ap_id
"target" => target.ap_id,
"to" => [origin.follower_address]
}
with true <- origin.ap_id in target.also_known_as,
@ -445,6 +470,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts)
|> restrict_unauthenticated(opts[:user])
|> restrict_blocked(opts)
|> restrict_blockers_visibility(opts)
|> restrict_recipients(recipients, opts[:user])
@ -501,9 +527,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
includes_local_public = Map.get(opts, :includes_local_public, false)
opts = Map.delete(opts, :user)
[Constants.as_public()]
intended_recipients =
if includes_local_public do
[Constants.as_public(), as_local_public()]
else
[Constants.as_public()]
end
intended_recipients
|> fetch_activities_query(opts)
|> restrict_unlisted(opts)
|> fetch_paginated_optimized(opts, pagination)
@ -603,9 +638,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
do: query
defp restrict_thread_visibility(query, %{user: %User{ap_id: ap_id}}, _) do
local_public = as_local_public()
from(
a in query,
where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
where: fragment("thread_visibility(?, (?)->>'id', ?) = true", ^ap_id, a.data, ^local_public)
)
end
@ -692,8 +729,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp user_activities_recipients(%{godmode: true}), do: []
defp user_activities_recipients(%{reading_user: reading_user}) do
if reading_user do
[Constants.as_public(), reading_user.ap_id | User.following(reading_user)]
if not is_nil(reading_user) and reading_user.local do
[
Constants.as_public(),
as_local_public(),
reading_user.ap_id | User.following(reading_user)
]
else
[Constants.as_public()]
end
@ -1134,8 +1175,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
[activity, object: o] in query,
where:
fragment(
"(?)->>'type' = 'Create' and coalesce((?)->'object'->>'id', (?)->>'object') = any (?)",
activity.data,
"(?)->>'type' = 'Create' and associated_object_id((?)) = any (?)",
activity.data,
activity.data,
^ids
@ -1191,6 +1231,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp restrict_filtered(query, _), do: query
defp restrict_unauthenticated(query, nil) do
local = Config.restrict_unauthenticated_access?(:activities, :local)
remote = Config.restrict_unauthenticated_access?(:activities, :remote)
cond do
local and remote ->
from(activity in query, where: false)
local ->
from(activity in query, where: activity.local == false)
remote ->
from(activity in query, where: activity.local == true)
true ->
query
end
end
defp restrict_unauthenticated(query, _), do: query
defp restrict_quote_url(query, %{quote_url: quote_url}) do
from([_activity, object] in query,
where: fragment("(?)->'quoteUrl' = ?", object.data, ^quote_url)
)
end
defp restrict_quote_url(query, _), do: query
defp restrict_rule(query, %{rule_id: rule_id}) do
from(
activity in query,
@ -1224,15 +1293,15 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
defp exclude_invisible_actors(query, %{type: "Flag"}), do: query
defp exclude_invisible_actors(query, %{invisible_actors: true}), do: query
defp exclude_invisible_actors(query, _opts) do
invisible_ap_ids =
User.Query.build(%{invisible: true, select: [:ap_id]})
|> Repo.all()
|> Enum.map(fn %{ap_id: ap_id} -> ap_id end)
from([activity] in query, where: activity.actor not in ^invisible_ap_ids)
query
|> join(:inner, [activity], u in User,
as: :u,
on: activity.actor == u.ap_id and u.invisible == false
)
end
defp exclude_id(query, %{exclude_id: id}) when is_binary(id) do
@ -1363,7 +1432,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|> restrict_announce_object_actor(opts)
|> restrict_filtered(opts)
|> restrict_rule(opts)
|> Activity.restrict_deactivated_users()
|> restrict_quote_url(opts)
|> maybe_restrict_deactivated_users(opts)
|> exclude_poll_votes(opts)
|> exclude_chat_messages(opts)
|> exclude_invisible_actors(opts)
@ -1439,13 +1509,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
def upload(file, opts \\ []) do
with {:ok, data} <- Upload.store(file, opts) do
with {:ok, data} <- Upload.store(sanitize_upload_file(file), opts) do
obj_data = Maps.put_if_present(data, "actor", opts[:actor])
Repo.insert(%Object{data: obj_data})
end
end
defp sanitize_upload_file(%Plug.Upload{filename: filename} = upload) when is_binary(filename) do
%Plug.Upload{
upload
| filename: Path.basename(filename)
}
end
defp sanitize_upload_file(upload), do: upload
@spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
@ -1468,7 +1547,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
defp normalize_image(_), do: nil
defp object_to_user_data(data) do
defp object_to_user_data(data, additional) do
fields =
data
|> Map.get("attachment", [])
@ -1500,15 +1579,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
public_key =
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
data["publicKey"]["publicKeyPem"]
else
nil
end
shared_inbox =
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
data["endpoints"]["sharedInbox"]
else
nil
end
birthday =
@ -1517,16 +1592,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
{:ok, date} -> date
{:error, _} -> nil
end
else
nil
end
show_birthday = !!birthday
user_data = %{
# if WebFinger request was already done, we probably have acct, otherwise
# we request WebFinger here
nickname = additional[:nickname_from_acct] || generate_nickname(data)
%{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
ap_enabled: true,
banner: normalize_image(data["image"]),
fields: fields,
emoji: emojis,
@ -1545,23 +1621,29 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
inbox: data["inbox"],
shared_inbox: shared_inbox,
accepts_chat_messages: accepts_chat_messages,
pinned_objects: pinned_objects,
birthday: birthday,
show_birthday: show_birthday
show_birthday: show_birthday,
pinned_objects: pinned_objects,
nickname: nickname
}
end
# nickname can be nil because of virtual actors
if data["preferredUsername"] do
Map.put(
user_data,
:nickname,
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
)
defp generate_nickname(%{"preferredUsername" => username} = data) when is_binary(username) do
generated = "#{username}@#{URI.parse(data["id"]).host}"
if Config.get([WebFinger, :update_nickname_on_user_fetch]) do
case WebFinger.finger(generated) do
{:ok, %{"subject" => "acct:" <> acct}} -> acct
_ -> generated
end
else
Map.put(user_data, :nickname, nil)
generated
end
end
# nickname can be nil because of virtual actors
defp generate_nickname(_), do: nil
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
@ -1633,17 +1715,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
defp collection_private(_data), do: {:ok, true}
def user_data_from_user_object(data) do
def user_data_from_user_object(data, additional \\ []) do
with {:ok, data} <- MRF.filter(data) do
{:ok, object_to_user_data(data)}
{:ok, object_to_user_data(data, additional)}
else
e -> {:error, e}
end
end
def fetch_and_prepare_user_from_ap_id(ap_id) do
defp fetch_and_prepare_user_from_ap_id(ap_id, additional) do
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
{:ok, data} <- user_data_from_user_object(data) do
{:ok, data} <- user_data_from_user_object(data, additional) do
{:ok, maybe_update_follow_information(data)}
else
# If this has been deleted, only log a debug and not an error
@ -1694,6 +1776,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end)
end
def pin_data_from_featured_collection(obj) do
Logger.error("Could not parse featured collection #{inspect(obj)}")
%{}
end
def fetch_and_prepare_featured_from_ap_id(nil) do
{:ok, %{}}
end
@ -1721,34 +1808,31 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
end
end
def make_user_from_ap_id(ap_id) do
def make_user_from_ap_id(ap_id, additional \\ []) do
user = User.get_cached_by_ap_id(ap_id)
if user && !User.ap_enabled?(user) do
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
if user do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
else
maybe_handle_clashing_nickname(data)
if user do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
else
maybe_handle_clashing_nickname(data)
data
|> User.remote_user_changeset()
|> Repo.insert()
|> User.set_cache()
end
data
|> User.remote_user_changeset()
|> Repo.insert()
|> User.set_cache()
end
end
end
def make_user_from_nickname(nickname) do
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
make_user_from_ap_id(ap_id)
with {:ok, %{"ap_id" => ap_id, "subject" => "acct:" <> acct}} when not is_nil(ap_id) <-
WebFinger.finger(nickname) do
make_user_from_ap_id(ap_id, nickname_from_acct: acct)
else
_e -> {:error, "No AP id in WebFinger"}
end
@ -1770,4 +1854,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|> restrict_visibility(%{visibility: "direct"})
|> order_by([activity], asc: activity.id)
end
defp maybe_restrict_deactivated_users(activity, %{type: "Flag"}), do: activity
defp maybe_restrict_deactivated_users(activity, _opts),
do: Activity.restrict_deactivated_users(activity)
end

View file

@ -66,8 +66,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
def user(conn, %{"nickname" => nickname}) do
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
{:ok, user} <- User.ensure_keys_present(user) do
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@ -174,7 +173,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
def following(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "page" => page}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user),
{:show_follows, true} <-
{:show_follows, (for_user && for_user == user) || !user.hide_follows} do
{page, _} = Integer.parse(page)
@ -192,8 +190,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
def following(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@ -213,7 +210,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
def followers(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname, "page" => page}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user),
{:show_followers, true} <-
{:show_followers, (for_user && for_user == user) || !user.hide_followers} do
{page, _} = Integer.parse(page)
@ -231,8 +227,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
def followers(%{assigns: %{user: for_user}} = conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{user, for_user} <- ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@ -245,8 +240,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
%{"nickname" => nickname, "page" => page?} = params
)
when page? in [true, "true"] do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{:ok, user} <- User.ensure_keys_present(user) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
# "include_poll_votes" is a hack because postgres generates inefficient
# queries when filtering by 'Answer', poll votes will be hidden by the
# visibility filter in this case anyway
@ -270,8 +264,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
def outbox(conn, %{"nickname" => nickname}) do
with %User{} = user <- User.get_cached_by_nickname(nickname),
{:ok, user} <- User.ensure_keys_present(user) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
@ -280,12 +273,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
with %User{} = recipient <- User.get_cached_by_nickname(nickname),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
with %User{is_active: true} = recipient <- User.get_cached_by_nickname(nickname),
{:ok, %User{is_active: true} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
true <- Utils.recipient_in_message(recipient, actor, params),
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
Federator.incoming_ap_doc(params)
json(conn, "ok")
else
_ ->
conn
|> put_status(:bad_request)
|> json("Invalid request.")
end
end
@ -294,10 +292,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
json(conn, "ok")
end
def inbox(%{assigns: %{valid_signature: false}} = conn, _params) do
conn
|> put_status(:bad_request)
|> json("Invalid HTTP Signature")
def inbox(%{assigns: %{valid_signature: false}, req_headers: req_headers} = conn, params) do
Federator.incoming_ap_doc(%{req_headers: req_headers, params: params})
json(conn, "ok")
end
# POST /relay/inbox -or- POST /internal/fetch/inbox
@ -328,14 +325,10 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
end
defp represent_service_actor(%User{} = user, conn) do
with {:ok, user} <- User.ensure_keys_present(user) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
|> render("user.json", %{user: user})
else
nil -> {:error, :not_found}
end
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
|> render("user.json", %{user: user})
end
defp represent_service_actor(nil, _), do: {:error, :not_found}
@ -388,12 +381,10 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
def read_inbox(%{assigns: %{user: %User{nickname: nickname} = user}} = conn, %{
"nickname" => nickname
}) do
with {:ok, user} <- User.ensure_keys_present(user) do
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
|> render("activity_collection.json", %{iri: "#{user.ap_id}/inbox"})
end
conn
|> put_resp_content_type("application/activity+json")
|> put_view(UserView)
|> render("activity_collection.json", %{iri: "#{user.ap_id}/inbox"})
end
def read_inbox(%{assigns: %{user: %User{nickname: as_nickname}}} = conn, %{
@ -489,7 +480,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|> json(message)
e ->
Logger.warn(fn -> "AP C2S: #{inspect(e)}" end)
Logger.warning(fn -> "AP C2S: #{inspect(e)}" end)
conn
|> put_status(:bad_request)
@ -530,19 +521,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
conn
end
defp ensure_user_keys_present_and_maybe_refresh_for_user(user, for_user) do
{:ok, new_user} = User.ensure_keys_present(user)
for_user =
if new_user != user and match?(%User{}, for_user) do
User.get_cached_by_nickname(for_user.nickname)
else
for_user
end
{new_user, for_user}
end
def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = data) do
with {:ok, object} <-
ActivityPub.upload(

View file

@ -16,6 +16,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft
alias Pleroma.Web.Endpoint
require Pleroma.Constants
@ -54,13 +55,87 @@ defmodule Pleroma.Web.ActivityPub.Builder do
{:ok, data, []}
end
defp unicode_emoji_react(_object, data, emoji) do
data
|> Map.put("content", emoji)
|> Map.put("type", "EmojiReact")
end
defp add_emoji_content(data, emoji, url) do
tag = [
%{
"id" => url,
"type" => "Emoji",
"name" => Emoji.maybe_quote(emoji),
"icon" => %{
"type" => "Image",
"url" => url
}
}
]
data
|> Map.put("content", Emoji.maybe_quote(emoji))
|> Map.put("type", "EmojiReact")
|> Map.put("tag", tag)
end
defp remote_custom_emoji_react(
%{data: %{"reactions" => existing_reactions}},
data,
emoji
) do
[emoji_code, instance] = String.split(Emoji.maybe_strip_name(emoji), "@")
matching_reaction =
Enum.find(
existing_reactions,
fn [name, _, url] ->
if url != nil do
url = URI.parse(url)
url.host == instance && name == emoji_code
end
end
)
if matching_reaction do
[name, _, url] = matching_reaction
add_emoji_content(data, name, url)
else
{:error, "Could not react"}
end
end
defp remote_custom_emoji_react(_object, _data, _emoji) do
{:error, "Could not react"}
end
defp local_custom_emoji_react(data, emoji) do
with %{file: path} = emojo <- Emoji.get(emoji) do
url = "#{Endpoint.url()}#{path}"
add_emoji_content(data, emojo.code, url)
else
_ -> {:error, "Emoji does not exist"}
end
end
defp custom_emoji_react(object, data, emoji) do
if String.contains?(emoji, "@") do
remote_custom_emoji_react(object, data, emoji)
else
local_custom_emoji_react(data, emoji)
end
end
@spec emoji_react(User.t(), Object.t(), String.t()) :: {:ok, map(), keyword()}
def emoji_react(actor, object, emoji) do
with {:ok, data, meta} <- object_action(actor, object) do
data =
data
|> Map.put("content", emoji)
|> Map.put("type", "EmojiReact")
if Emoji.is_unicode_emoji?(emoji) do
unicode_emoji_react(object, data, emoji)
else
custom_emoji_react(object, data, emoji)
end
{:ok, data, meta}
end
@ -142,6 +217,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
"tag" => Keyword.values(draft.tags) |> Enum.uniq()
}
|> add_in_reply_to(draft.in_reply_to)
|> add_quote(draft.quote_post)
|> Map.merge(draft.extra)
{:ok, data, []}
@ -157,6 +233,16 @@ defmodule Pleroma.Web.ActivityPub.Builder do
end
end
defp add_quote(object, nil), do: object
defp add_quote(object, quote_post) do
with %Object{} = quote_object <- Object.normalize(quote_post, fetch: false) do
Map.put(object, "quoteUrl", quote_object.data["id"])
else
_ -> object
end
end
def chat_message(actor, recipient, content, opts \\ []) do
basic = %{
"id" => Utils.generate_object_id(),
@ -218,10 +304,16 @@ defmodule Pleroma.Web.ActivityPub.Builder do
end
end
# Retricted to user updates for now, always public
@spec update(User.t(), Object.t()) :: {:ok, map(), keyword()}
def update(actor, object) do
to = [Pleroma.Constants.as_public(), actor.follower_address]
{to, cc} =
if object["type"] in Pleroma.Constants.actor_types() do
# User updates, always public
{[Pleroma.Constants.as_public(), actor.follower_address], []}
else
# Status updates, follow the recipients in the object
{object["to"] || [], object["cc"] || []}
end
{:ok,
%{
@ -229,7 +321,8 @@ defmodule Pleroma.Web.ActivityPub.Builder do
"type" => "Update",
"actor" => actor.ap_id,
"object" => object,
"to" => to
"to" => to,
"cc" => cc
}, []}
end

View file

@ -53,10 +53,55 @@ defmodule Pleroma.Web.ActivityPub.MRF do
@required_description_keys [:key, :related_policy]
def filter_one(policy, message) do
Code.ensure_loaded(policy)
should_plug_history? =
if function_exported?(policy, :history_awareness, 0) do
policy.history_awareness()
else
:manual
end
|> Kernel.==(:auto)
if not should_plug_history? do
policy.filter(message)
else
main_result = policy.filter(message)
with {_, {:ok, main_message}} <- {:main, main_result},
{_,
%{
"formerRepresentations" => %{
"orderedItems" => [_ | _]
}
}} = {_, object} <- {:object, message["object"]},
{_, {:ok, new_history}} <-
{:history,
Pleroma.Object.Updater.for_each_history_item(
object["formerRepresentations"],
object,
fn item ->
with {:ok, filtered} <- policy.filter(Map.put(message, "object", item)) do
{:ok, filtered["object"]}
else
e -> e
end
end
)} do
{:ok, put_in(main_message, ["object", "formerRepresentations"], new_history)}
else
{:main, _} -> main_result
{:object, _} -> main_result
{:history, e} -> e
end
end
end
def filter(policies, %{} = message) do
policies
|> Enum.reduce({:ok, message}, fn
policy, {:ok, message} -> policy.filter(message)
policy, {:ok, message} -> filter_one(policy, message)
_, error -> error
end)
end
@ -145,6 +190,8 @@ defmodule Pleroma.Web.ActivityPub.MRF do
def config_descriptions(policies) do
Enum.reduce(policies, @mrf_config_descriptions, fn policy, acc ->
Code.ensure_loaded(policy)
if function_exported?(policy, :config_description, 0) do
description =
@default_description
@ -156,7 +203,7 @@ defmodule Pleroma.Web.ActivityPub.MRF do
if Enum.all?(@required_description_keys, &Map.has_key?(description, &1)) do
[description | acc]
else
Logger.warn(
Logger.warning(
"#{policy} config description doesn't have one or all required keys #{inspect(@required_description_keys)}"
)

View file

@ -9,6 +9,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy do
require Logger
@impl true
def history_awareness, do: :auto
# has the user successfully posted before?
defp old_user?(%User{} = u) do
u.note_count > 0 || u.follower_count > 0

View file

@ -0,0 +1,281 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.EmojiPolicy do
require Pleroma.Constants
alias Pleroma.Object.Updater
alias Pleroma.Web.ActivityPub.MRF.Utils
@moduledoc "Reject or force-unlisted emojis with certain URLs or names"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp config_remove_url do
Pleroma.Config.get([:mrf_emoji, :remove_url], [])
end
defp config_remove_shortcode do
Pleroma.Config.get([:mrf_emoji, :remove_shortcode], [])
end
defp config_unlist_url do
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_url], [])
end
defp config_unlist_shortcode do
Pleroma.Config.get([:mrf_emoji, :federated_timeline_removal_shortcode], [])
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :manual
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => type, "object" => %{"type" => objtype} = object} = message)
when type in ["Create", "Update"] and objtype in Pleroma.Constants.status_object_types() do
with {:ok, object} <-
Updater.do_with_history(object, fn object ->
{:ok, process_remove(object, :url, config_remove_url())}
end),
{:ok, object} <-
Updater.do_with_history(object, fn object ->
{:ok, process_remove(object, :shortcode, config_remove_shortcode())}
end),
activity <- Map.put(message, "object", object),
activity <- maybe_delist(activity) do
{:ok, activity}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => type} = object) when type in Pleroma.Constants.actor_types() do
with object <- process_remove(object, :url, config_remove_url()),
object <- process_remove(object, :shortcode, config_remove_shortcode()) do
{:ok, object}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"type" => "EmojiReact"} = object) do
with {:ok, _} <-
matched_emoji_checker(config_remove_url(), config_remove_shortcode()).(object) do
{:ok, object}
else
_ ->
{:reject, "[EmojiPolicy] Rejected for having disallowed emoji"}
end
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(message) do
{:ok, message}
end
defp match_string?(string, pattern) when is_binary(pattern) do
string == pattern
end
defp match_string?(string, %Regex{} = pattern) do
String.match?(string, pattern)
end
defp match_any?(string, patterns) do
Enum.any?(patterns, &match_string?(string, &1))
end
defp url_from_tag(%{"icon" => %{"url" => url}}), do: url
defp url_from_tag(_), do: nil
defp url_from_emoji({_name, url}), do: url
defp shortcode_from_tag(%{"name" => name}) when is_binary(name), do: String.trim(name, ":")
defp shortcode_from_tag(_), do: nil
defp shortcode_from_emoji({name, _url}), do: name
defp process_remove(object, :url, patterns) do
process_remove_impl(object, &url_from_tag/1, &url_from_emoji/1, patterns)
end
defp process_remove(object, :shortcode, patterns) do
process_remove_impl(object, &shortcode_from_tag/1, &shortcode_from_emoji/1, patterns)
end
defp process_remove_impl(object, extract_from_tag, extract_from_emoji, patterns) do
object =
if object["tag"] do
Map.put(
object,
"tag",
Enum.filter(
object["tag"],
fn
%{"type" => "Emoji"} = tag ->
str = extract_from_tag.(tag)
if is_binary(str) do
not match_any?(str, patterns)
else
true
end
_ ->
true
end
)
)
else
object
end
object =
if object["emoji"] do
Map.put(
object,
"emoji",
object["emoji"]
|> Enum.reduce(%{}, fn {name, url} = emoji, acc ->
if not match_any?(extract_from_emoji.(emoji), patterns) do
Map.put(acc, name, url)
else
acc
end
end)
)
else
object
end
object
end
defp matched_emoji_checker(urls, shortcodes) do
fn object ->
if any_emoji_match?(object, &url_from_tag/1, &url_from_emoji/1, urls) or
any_emoji_match?(
object,
&shortcode_from_tag/1,
&shortcode_from_emoji/1,
shortcodes
) do
{:matched, nil}
else
{:ok, %{}}
end
end
end
defp maybe_delist(%{"object" => object, "to" => to, "type" => "Create"} = activity) do
check = matched_emoji_checker(config_unlist_url(), config_unlist_shortcode())
should_delist? = fn object ->
with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check) do
false
else
_ -> true
end
end
if Pleroma.Constants.as_public() in to and should_delist?.(object) do
to = List.delete(to, Pleroma.Constants.as_public())
cc = [Pleroma.Constants.as_public() | activity["cc"] || []]
activity
|> Map.put("to", to)
|> Map.put("cc", cc)
else
activity
end
end
defp maybe_delist(activity), do: activity
defp any_emoji_match?(object, extract_from_tag, extract_from_emoji, patterns) do
Kernel.||(
Enum.any?(
object["tag"] || [],
fn
%{"type" => "Emoji"} = tag ->
str = extract_from_tag.(tag)
if is_binary(str) do
match_any?(str, patterns)
else
false
end
_ ->
false
end
),
(object["emoji"] || [])
|> Enum.any?(fn emoji -> match_any?(extract_from_emoji.(emoji), patterns) end)
)
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def describe do
mrf_emoji =
Pleroma.Config.get(:mrf_emoji, [])
|> Enum.map(fn {key, value} ->
{key, Enum.map(value, &Utils.describe_regex_or_string/1)}
end)
|> Enum.into(%{})
{:ok, %{mrf_emoji: mrf_emoji}}
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def config_description do
%{
key: :mrf_emoji,
related_policy: "Pleroma.Web.ActivityPub.MRF.EmojiPolicy",
label: "MRF Emoji",
description:
"Reject or force-unlisted emojis whose URLs or names match a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html).",
children: [
%{
key: :remove_url,
type: {:list, :string},
description: """
A list of patterns which result in emoji whose URL matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
},
%{
key: :remove_shortcode,
type: {:list, :string},
description: """
A list of patterns which result in emoji whose shortcode matches being removed from the message. This will apply to statuses, emoji reactions, and user profiles.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["foo", ~r/foo/iu]
},
%{
key: :federated_timeline_removal_url,
type: {:list, :string},
description: """
A list of patterns which result in message with emojis whose URLs match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["https://example.org/foo.png", ~r/example.org\/foo/iu]
},
%{
key: :federated_timeline_removal_shortcode,
type: {:list, :string},
description: """
A list of patterns which result in message with emojis whose shortcodes match being removed from federated timelines (a.k.a unlisted). This will apply only to statuses.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["foo", ~r/foo/iu]
}
]
}
end
end

View file

@ -10,6 +10,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
@reply_prefix Regex.compile!("^re:[[:space:]]*", [:caseless])
def history_awareness, do: :auto
def filter_by_summary(
%{data: %{"summary" => parent_summary}} = _in_reply_to,
%{"summary" => child_summary} = child
@ -27,8 +29,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
def filter_by_summary(_in_reply_to, child), do: child
def filter(%{"type" => "Create", "object" => child_object} = object)
when is_map(child_object) do
def filter(%{"type" => type, "object" => child_object} = object)
when type in ["Create", "Update"] and is_map(child_object) do
child =
child_object["inReplyTo"]
|> Object.normalize(fetch: false)

View file

@ -19,7 +19,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do
try_follow(follower, message)
else
nil ->
Logger.warn(
Logger.warning(
"#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname
account does not exist, or the account is not correctly configured as a bot."
)

View file

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@ -11,6 +11,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@impl true
def history_awareness, do: :auto
defp do_extract({:a, attrs, _}, acc) do
if Enum.find(attrs, fn {name, value} ->
name == "class" && value in ["mention", "u-url mention", "mention u-url"]
@ -74,11 +77,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
@impl true
def filter(
%{
"type" => "Create",
"type" => type,
"object" => %{"type" => "Note", "to" => to, "inReplyTo" => in_reply_to}
} = object
)
when is_list(to) and is_binary(in_reply_to) do
when type in ["Create", "Update"] and is_list(to) and is_binary(in_reply_to) do
# image-only posts from pleroma apparently reach this MRF without the content field
content = object["object"]["content"] || ""
@ -92,11 +95,13 @@ defmodule Pleroma.Web.ActivityPub.MRF.ForceMentionsInContent do
|> Enum.reject(&is_nil/1)
|> sort_replied_user(replied_to_user)
explicitly_mentioned_uris = extract_mention_uris_from_content(content)
explicitly_mentioned_uris =
extract_mention_uris_from_content(content)
|> MapSet.new()
added_mentions =
Enum.reduce(mention_users, "", fn %User{ap_id: uri} = user, acc ->
unless uri in explicitly_mentioned_uris do
Enum.reduce(mention_users, "", fn %User{ap_id: ap_id, uri: uri} = user, acc ->
if MapSet.disjoint?(MapSet.new([ap_id, uri]), explicitly_mentioned_uris) do
acc <> Formatter.mention_from_user(user, %{mentions_format: :compact}) <> " "
else
acc

View file

@ -16,6 +16,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@impl true
def history_awareness, do: :manual
defp check_reject(message, hashtags) do
if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do
{:reject, "[HashtagPolicy] Matches with rejected keyword"}
@ -47,22 +50,46 @@ defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do
defp check_ftl_removal(message, _hashtags), do: {:ok, message}
defp check_sensitive(message, hashtags) do
if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do
{:ok, Kernel.put_in(message, ["object", "sensitive"], true)}
else
{:ok, message}
end
defp check_sensitive(message) do
{:ok, new_object} =
Object.Updater.do_with_history(message["object"], fn object ->
hashtags = Object.hashtags(%Object{data: object})
if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do
{:ok, Map.put(object, "sensitive", true)}
else
{:ok, object}
end
end)
{:ok, Map.put(message, "object", new_object)}
end
@impl true
def filter(%{"type" => "Create", "object" => object} = message) do
hashtags = Object.hashtags(%Object{data: object})
def filter(%{"type" => type, "object" => object} = message) when type in ["Create", "Update"] do
history_items =
with %{"formerRepresentations" => %{"orderedItems" => items}} <- object do
items
else
_ -> []
end
historical_hashtags =
Enum.reduce(history_items, [], fn item, acc ->
acc ++ Object.hashtags(%Object{data: item})
end)
hashtags = Object.hashtags(%Object{data: object}) ++ historical_hashtags
if hashtags != [] do
with {:ok, message} <- check_reject(message, hashtags),
{:ok, message} <- check_ftl_removal(message, hashtags),
{:ok, message} <- check_sensitive(message, hashtags) do
{:ok, message} <-
(if "type" == "Create" do
check_ftl_removal(message, hashtags)
else
{:ok, message}
end),
{:ok, message} <- check_sensitive(message) do
{:ok, message}
end
else

View file

@ -0,0 +1,78 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy do
@moduledoc "Force a quote line into the message content."
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
defp build_inline_quote(template, url) do
quote_line = String.replace(template, "{url}", "<a href=\"#{url}\">#{url}</a>")
"<span class=\"quote-inline\"><br/><br/>#{quote_line}</span>"
end
defp has_inline_quote?(content, quote_url) do
cond do
# Does the quote URL exist in the content?
content =~ quote_url -> true
# Does the content already have a .quote-inline span?
content =~ "<span class=\"quote-inline\">" -> true
# No inline quote found
true -> false
end
end
defp filter_object(%{"quoteUrl" => quote_url} = object) do
content = object["content"] || ""
if has_inline_quote?(content, quote_url) do
object
else
template = Pleroma.Config.get([:mrf_inline_quote, :template])
content =
if String.ends_with?(content, "</p>"),
do:
String.trim_trailing(content, "</p>") <>
build_inline_quote(template, quote_url) <> "</p>",
else: content <> build_inline_quote(template, quote_url)
Map.put(object, "content", content)
end
end
@impl true
def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
{:ok, Map.put(activity, "object", filter_object(object))}
end
@impl true
def filter(object), do: {:ok, object}
@impl true
def describe, do: {:ok, %{}}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :auto
@impl true
def config_description do
%{
key: :mrf_inline_quote,
related_policy: "Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy",
label: "MRF Inline Quote Policy",
type: :group,
description: "Force quote url to appear in post content.",
children: [
%{
key: :template,
type: :string,
description:
"The template to append to the post. `{url}` will be replaced with the actual link to the quoted post.",
suggestions: ["<bdi>RT:</bdi> {url}"]
}
]
}
end
end

View file

@ -5,6 +5,8 @@
defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
require Pleroma.Constants
alias Pleroma.Web.ActivityPub.MRF.Utils
@moduledoc "Reject or Word-Replace messages with a keyword or regex"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@ -27,24 +29,46 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
end
defp check_reject(%{"object" => %{} = object} = message) do
payload = object_payload(object)
with {:ok, _new_object} <-
Pleroma.Object.Updater.do_with_history(object, fn object ->
payload = object_payload(object)
if Enum.any?(Pleroma.Config.get([:mrf_keyword, :reject]), fn pattern ->
string_matches?(payload, pattern)
end) do
{:reject, "[KeywordPolicy] Matches with rejected keyword"}
else
if Enum.any?(Pleroma.Config.get([:mrf_keyword, :reject]), fn pattern ->
string_matches?(payload, pattern)
end) do
{:reject, "[KeywordPolicy] Matches with rejected keyword"}
else
{:ok, message}
end
end) do
{:ok, message}
else
e -> e
end
end
defp check_ftl_removal(%{"to" => to, "object" => %{} = object} = message) do
payload = object_payload(object)
defp check_ftl_removal(%{"type" => "Create", "to" => to, "object" => %{} = object} = message) do
check_keyword = fn object ->
payload = object_payload(object)
if Pleroma.Constants.as_public() in to and
Enum.any?(Pleroma.Config.get([:mrf_keyword, :federated_timeline_removal]), fn pattern ->
if Enum.any?(Pleroma.Config.get([:mrf_keyword, :federated_timeline_removal]), fn pattern ->
string_matches?(payload, pattern)
end) do
{:should_delist, nil}
else
{:ok, %{}}
end
end
should_delist? = fn object ->
with {:ok, _} <- Pleroma.Object.Updater.do_with_history(object, check_keyword) do
false
else
_ -> true
end
end
if Pleroma.Constants.as_public() in to and should_delist?.(object) do
to = List.delete(to, Pleroma.Constants.as_public())
cc = [Pleroma.Constants.as_public() | message["cc"] || []]
@ -59,8 +83,12 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
end
end
defp check_ftl_removal(message) do
{:ok, message}
end
defp check_replace(%{"object" => %{} = object} = message) do
object =
replace_kw = fn object ->
["content", "name", "summary"]
|> Enum.filter(fn field -> Map.has_key?(object, field) && object[field] end)
|> Enum.reduce(object, fn field, object ->
@ -73,6 +101,10 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
Map.put(object, field, data)
end)
|> (fn object -> {:ok, object} end).()
end
{:ok, object} = Pleroma.Object.Updater.do_with_history(object, replace_kw)
message = Map.put(message, "object", object)
@ -80,7 +112,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
end
@impl true
def filter(%{"type" => "Create", "object" => %{"content" => _content}} = message) do
def filter(%{"type" => type, "object" => %{"content" => _content}} = message)
when type in ["Create", "Update"] do
with {:ok, message} <- check_reject(message),
{:ok, message} <- check_ftl_removal(message),
{:ok, message} <- check_replace(message) do
@ -97,7 +130,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
@impl true
def describe do
# This horror is needed to convert regex sigils to strings
mrf_keyword =
Pleroma.Config.get(:mrf_keyword, [])
|> Enum.map(fn {key, value} ->
@ -105,21 +137,12 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
Enum.map(value, fn
{pattern, replacement} ->
%{
"pattern" =>
if not is_binary(pattern) do
inspect(pattern)
else
pattern
end,
"pattern" => Utils.describe_regex_or_string(pattern),
"replacement" => replacement
}
pattern ->
if not is_binary(pattern) do
inspect(pattern)
else
pattern
end
Utils.describe_regex_or_string(pattern)
end)}
end)
|> Enum.into(%{})

View file

@ -16,6 +16,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
recv_timeout: 10_000
]
@impl true
def history_awareness, do: :auto
defp prefetch(url) do
# Fetching only proxiable resources
if MediaProxy.enabled?() and MediaProxy.url_proxiable?(url) do
@ -54,10 +57,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
end
@impl true
def filter(
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
)
when is_list(attachments) and length(attachments) > 0 do
def filter(%{"type" => type, "object" => %{"attachment" => attachments} = _object} = message)
when type in ["Create", "Update"] and is_list(attachments) and length(attachments) > 0 do
preload(message)
{:ok, message}

View file

@ -11,6 +11,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoEmptyPolicy do
@impl true
def filter(%{"actor" => actor} = object) do
with true <- is_local?(actor),
true <- is_eligible_type?(object),
true <- is_note?(object),
false <- has_attachment?(object),
true <- only_mentions?(object) do
@ -32,7 +33,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoEmptyPolicy do
end
defp has_attachment?(%{
"type" => "Create",
"object" => %{"type" => "Note", "attachment" => attachments}
})
when length(attachments) > 0,
@ -40,7 +40,13 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoEmptyPolicy do
defp has_attachment?(_), do: false
defp only_mentions?(%{"type" => "Create", "object" => %{"type" => "Note", "source" => source}}) do
defp only_mentions?(%{"object" => %{"type" => "Note", "source" => source}}) do
source =
case source do
%{"content" => text} -> text
_ -> source
end
non_mentions =
source |> String.split() |> Enum.filter(&(not String.starts_with?(&1, "@"))) |> length
@ -53,9 +59,12 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoEmptyPolicy do
defp only_mentions?(_), do: false
defp is_note?(%{"type" => "Create", "object" => %{"type" => "Note"}}), do: true
defp is_note?(%{"object" => %{"type" => "Note"}}), do: true
defp is_note?(_), do: false
defp is_eligible_type?(%{"type" => type}) when type in ["Create", "Update"], do: true
defp is_eligible_type?(_), do: false
@impl true
def describe, do: {:ok, %{}}
end

View file

@ -6,14 +6,17 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
@moduledoc "Ensure no content placeholder is present (such as the dot from mastodon)"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@impl true
def history_awareness, do: :auto
@impl true
def filter(
%{
"type" => "Create",
"type" => type,
"object" => %{"content" => content, "attachment" => _} = _child_object
} = object
)
when content in [".", "<p>.</p>"] do
when type in ["Create", "Update"] and content in [".", "<p>.</p>"] do
{:ok, put_in(object, ["object", "content"], "")}
end

View file

@ -9,7 +9,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.NormalizeMarkup do
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
@impl true
def filter(%{"type" => "Create", "object" => child_object} = object) do
def history_awareness, do: :auto
@impl true
def filter(%{"type" => type, "object" => child_object} = object)
when type in ["Create", "Update"] do
scrub_policy = Pleroma.Config.get([:mrf_normalize_markup, :scrub_policy])
content =

View file

@ -131,7 +131,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy do
type: {:list, :atom},
description:
"A list of actions to apply to the post. `:delist` removes the post from public timelines; " <>
"`:strip_followers` removes followers from the ActivityPub recipient list ensuring they won't be delivered to home timelines; " <>
"`:strip_followers` removes followers from the ActivityPub recipient list ensuring they won't be delivered to home timelines, additionally for followers-only it degrades to a direct message; " <>
"`:reject` rejects the message entirely",
suggestions: [:delist, :strip_followers, :reject]
}

View file

@ -12,5 +12,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.Policy do
label: String.t(),
description: String.t()
}
@optional_callbacks config_description: 0
@callback history_awareness() :: :auto | :manual
@optional_callbacks config_description: 0, history_awareness: 0
end

View file

@ -0,0 +1,49 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.QuoteToLinkTagPolicy do
@moduledoc "Force a Link tag for posts quoting another post. (may break outgoing federation of quote posts with older Pleroma versions)"
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
require Pleroma.Constants
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(%{"object" => %{"quoteUrl" => _} = object} = activity) do
{:ok, Map.put(activity, "object", filter_object(object))}
end
@impl Pleroma.Web.ActivityPub.MRF.Policy
def filter(object), do: {:ok, object}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def describe, do: {:ok, %{}}
@impl Pleroma.Web.ActivityPub.MRF.Policy
def history_awareness, do: :auto
defp filter_object(%{"quoteUrl" => quote_url} = object) do
tags = object["tag"] || []
if Enum.any?(tags, fn tag ->
CommonFixes.is_object_link_tag(tag) and tag["href"] == quote_url
end) do
object
else
object
|> Map.put(
"tag",
tags ++
[
%{
"type" => "Link",
"mediaType" => Pleroma.Constants.activity_json_canonical_mime_type(),
"href" => quote_url
}
]
)
end
end
end

View file

@ -40,9 +40,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
defp check_media_removal(
%{host: actor_host} = _actor_info,
%{"type" => "Create", "object" => %{"attachment" => child_attachment}} = object
%{"type" => type, "object" => %{"attachment" => child_attachment}} = object
)
when length(child_attachment) > 0 do
when length(child_attachment) > 0 and type in ["Create", "Update"] do
media_removal =
instance_list(:media_removal)
|> MRF.subdomains_regex()
@ -63,10 +63,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
defp check_media_nsfw(
%{host: actor_host} = _actor_info,
%{
"type" => "Create",
"type" => type,
"object" => %{} = _child_object
} = object
) do
)
when type in ["Create", "Update"] do
media_nsfw =
instance_list(:media_nsfw)
|> MRF.subdomains_regex()

View file

@ -12,6 +12,14 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
defp accept_host?(host), do: host in Config.get([:mrf_steal_emoji, :hosts], [])
defp shortcode_matches?(shortcode, pattern) when is_binary(pattern) do
shortcode == pattern
end
defp shortcode_matches?(shortcode, pattern) do
String.match?(shortcode, pattern)
end
defp steal_emoji({shortcode, url}, emoji_dir_path) do
url = Pleroma.Web.MediaProxy.url(url)
@ -33,7 +41,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
shortcode
e ->
Logger.warn("MRF.StealEmojiPolicy: Failed to write to #{file_path}: #{inspect(e)}")
Logger.warning("MRF.StealEmojiPolicy: Failed to write to #{file_path}: #{inspect(e)}")
nil
end
else
@ -45,7 +53,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
end
else
e ->
Logger.warn("MRF.StealEmojiPolicy: Failed to fetch #{url}: #{inspect(e)}")
Logger.warning("MRF.StealEmojiPolicy: Failed to fetch #{url}: #{inspect(e)}")
nil
end
end
@ -72,7 +80,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
reject_emoji? =
[:mrf_steal_emoji, :rejected_shortcodes]
|> Config.get([])
|> Enum.find(false, fn regex -> String.match?(shortcode, regex) end)
|> Enum.find(false, fn pattern -> shortcode_matches?(shortcode, pattern) end)
!reject_emoji?
end)
@ -122,8 +130,12 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
%{
key: :rejected_shortcodes,
type: {:list, :string},
description: "Regex-list of shortcodes to reject",
suggestions: [""]
description: """
A list of patterns or matches to reject shortcodes with.
Each pattern can be a string or [Regex](https://hexdocs.pm/elixir/Regex.html) in the format of `~r/PATTERN/`.
""",
suggestions: ["foo", ~r/foo/]
},
%{
key: :size_limit,

Some files were not shown because too many files have changed in this diff Show more