Merge branch 'develop' into feature/gen-magic

This commit is contained in:
Mark Felder 2020-09-10 16:02:11 -05:00
commit 55562ca936
836 changed files with 22077 additions and 16442 deletions

View file

@ -3,15 +3,54 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Pleroma do
@apps [
:restarter,
:ecto,
:ecto_sql,
:postgrex,
:db_connection,
:cachex,
:flake_id,
:swoosh,
:timex
]
@cachex_children ["object", "user", "scrubber"]
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Pleroma.Config.Holder.save_default()
Pleroma.Config.Oban.warn()
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
if Pleroma.Config.get(:env) != :test do
Application.put_env(:logger, :console, level: :debug)
end
{:ok, _} = Application.ensure_all_started(:pleroma)
adapter = Application.get_env(:tesla, :adapter)
apps =
if adapter == Tesla.Adapter.Gun do
[:gun | @apps]
else
[:hackney | @apps]
end
Enum.each(apps, &Application.ensure_all_started/1)
children =
[
Pleroma.Repo,
{Pleroma.Config.TransferTask, false},
Pleroma.Web.Endpoint,
{Oban, Pleroma.Config.get(Oban)}
] ++
http_children(adapter)
cachex_children = Enum.map(@cachex_children, &Pleroma.Application.build_cachex(&1, []))
Supervisor.start_link(children ++ cachex_children,
strategy: :one_for_one,
name: Pleroma.Supervisor
)
if Pleroma.Config.get(:env) not in [:test, :benchmark] do
pleroma_rebooted?()
@ -82,4 +121,11 @@ defmodule Mix.Pleroma do
def escape_sh_path(path) do
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
end
defp http_children(Tesla.Adapter.Gun) do
Pleroma.Gun.ConnectionPool.children() ++
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
end
defp http_children(_), do: []
end

View file

@ -91,20 +91,17 @@ defmodule Mix.Tasks.Pleroma.Benchmark do
"Without conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool, receive_conn: false]
pool: :no_pool,
receive_conn: false
)
end,
"Without conn and with pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [receive_conn: false]
)
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
end,
"With reused conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool]
)
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
end,
"With reused conn and with pool" => fn ->
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")

View file

@ -52,6 +52,7 @@ defmodule Mix.Tasks.Pleroma.Config do
defp do_migrate_to_db(config_file) do
if File.exists?(config_file) do
shell_info("Migrating settings from file: #{Path.expand(config_file)}")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE config;")
Ecto.Adapters.SQL.query!(Repo, "ALTER SEQUENCE config_id_seq RESTART;")
@ -72,8 +73,7 @@ defmodule Mix.Tasks.Pleroma.Config do
group
|> Pleroma.Config.Loader.filter_group(settings)
|> Enum.each(fn {key, value} ->
key = inspect(key)
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
{:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value})
shell_info("Settings for key #{key} migrated.")
end)
@ -83,7 +83,7 @@ defmodule Mix.Tasks.Pleroma.Config do
defp migrate_from_db(opts) do
if Pleroma.Config.get([:configurable_from_database]) do
env = opts[:env] || "prod"
env = opts[:env] || Pleroma.Config.get(:env)
config_path =
if Pleroma.Config.get(:release) do
@ -105,6 +105,10 @@ defmodule Mix.Tasks.Pleroma.Config do
:ok = File.close(file)
System.cmd("mix", ["format", config_path])
shell_info(
"Database configuration settings have been exported to config/#{env}.exported_from_db.secret.exs"
)
else
migration_error()
end
@ -112,7 +116,7 @@ defmodule Mix.Tasks.Pleroma.Config do
defp migration_error do
shell_error(
"Migration is not allowed in config. You can change this behavior by setting `configurable_from_database` to true."
"Migration is not allowed in config. You can change this behavior by setting `config :pleroma, configurable_from_database: true`"
)
end
@ -131,12 +135,9 @@ defmodule Mix.Tasks.Pleroma.Config do
end
defp write(config, file) do
value =
config.value
|> ConfigDB.from_binary()
|> inspect(limit: :infinity)
value = inspect(config.value, limit: :infinity)
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n")
IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n")
config
end

View file

@ -10,6 +10,7 @@ defmodule Mix.Tasks.Pleroma.Database do
alias Pleroma.User
require Logger
require Pleroma.Constants
import Ecto.Query
import Mix.Pleroma
use Mix.Task
@ -53,8 +54,6 @@ defmodule Mix.Tasks.Pleroma.Database do
end
def run(["prune_objects" | args]) do
import Ecto.Query
{options, [], []} =
OptionParser.parse(
args,
@ -94,8 +93,6 @@ defmodule Mix.Tasks.Pleroma.Database do
end
def run(["fix_likes_collections"]) do
import Ecto.Query
start_pleroma()
from(object in Object,
@ -130,4 +127,38 @@ defmodule Mix.Tasks.Pleroma.Database do
Maintenance.vacuum(args)
end
def run(["ensure_expiration"]) do
start_pleroma()
days = Pleroma.Config.get([:mrf_activity_expiration, :days], 365)
Pleroma.Activity
|> join(:inner, [a], o in Object,
on:
fragment(
"(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
o.data,
a.data,
a.data
)
)
|> where(local: true)
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|> where([_a, o], fragment("?->>'type' = 'Note'", o.data))
|> Pleroma.RepoStreamer.chunk_stream(100)
|> Stream.each(fn activities ->
Enum.each(activities, fn activity ->
expires_at =
activity.inserted_at
|> DateTime.from_naive!("Etc/UTC")
|> Timex.shift(days: days)
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
activity_id: activity.id,
expires_at: expires_at
})
end)
end)
|> Stream.run()
end
end

View file

@ -41,6 +41,10 @@ defmodule Mix.Tasks.Pleroma.Ecto.Migrate do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
if Application.get_env(:pleroma, Pleroma.Repo)[:ssl] do
Application.ensure_all_started(:ssl)
end
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,

View file

@ -40,6 +40,10 @@ defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
if Application.get_env(:pleroma, Pleroma.Repo)[:ssl] do
Application.ensure_all_started(:ssl)
end
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,

View file

@ -15,7 +15,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
{options, [], []} = parse_global_opts(args)
url_or_path = options[:manifest] || default_manifest()
manifest = fetch_and_decode(url_or_path)
manifest = fetch_and_decode!(url_or_path)
Enum.each(manifest, fn {name, info} ->
to_print = [
@ -42,7 +42,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
url_or_path = options[:manifest] || default_manifest()
manifest = fetch_and_decode(url_or_path)
manifest = fetch_and_decode!(url_or_path)
for pack_name <- pack_names do
if Map.has_key?(manifest, pack_name) do
@ -92,7 +92,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
])
)
files = fetch_and_decode(files_loc)
files = fetch_and_decode!(files_loc)
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
@ -183,7 +183,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
IO.puts("Downloading the pack and generating SHA256")
binary_archive = Tesla.get!(client(), src).body
{:ok, %{body: binary_archive}} = Pleroma.HTTP.get(src)
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
IO.puts("SHA256 is #{archive_sha}")
@ -243,14 +243,16 @@ defmodule Mix.Tasks.Pleroma.Emoji do
IO.puts("Emoji packs have been reloaded.")
end
defp fetch_and_decode(from) do
defp fetch_and_decode!(from) do
with {:ok, json} <- fetch(from) do
Jason.decode!(json)
else
{:error, error} -> raise "#{from} cannot be fetched. Error: #{error} occur."
end
end
defp fetch("http" <> _ = from) do
with {:ok, %{body: body}} <- Tesla.get(client(), from) do
with {:ok, %{body: body}} <- Pleroma.HTTP.get(from) do
{:ok, body}
end
end
@ -269,13 +271,5 @@ defmodule Mix.Tasks.Pleroma.Emoji do
)
end
defp client do
middleware = [
{Tesla.Middleware.FollowRedirects, [max_redirects: 3]}
]
Tesla.client(middleware)
end
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
end

View file

@ -0,0 +1,141 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Frontend do
use Mix.Task
import Mix.Pleroma
@shortdoc "Manages bundled Pleroma frontends"
@moduledoc File.read!("docs/administration/CLI_tasks/frontend.md")
def run(["install", "none" | _args]) do
shell_info("Skipping frontend installation because none was requested")
"none"
end
def run(["install", frontend | args]) do
log_level = Logger.level()
Logger.configure(level: :warn)
start_pleroma()
{options, [], []} =
OptionParser.parse(
args,
strict: [
ref: :string,
static_dir: :string,
build_url: :string,
build_dir: :string,
file: :string
]
)
instance_static_dir =
with nil <- options[:static_dir] do
Pleroma.Config.get!([:instance, :static_dir])
end
cmd_frontend_info = %{
"name" => frontend,
"ref" => options[:ref],
"build_url" => options[:build_url],
"build_dir" => options[:build_dir]
}
config_frontend_info = Pleroma.Config.get([:frontends, :available, frontend], %{})
frontend_info =
Map.merge(config_frontend_info, cmd_frontend_info, fn _key, config, cmd ->
# This only overrides things that are actually set
cmd || config
end)
ref = frontend_info["ref"]
unless ref do
raise "No ref given or configured"
end
dest =
Path.join([
instance_static_dir,
"frontends",
frontend,
ref
])
fe_label = "#{frontend} (#{ref})"
tmp_dir = Path.join([instance_static_dir, "frontends", "tmp"])
with {_, :ok} <-
{:download_or_unzip, download_or_unzip(frontend_info, tmp_dir, options[:file])},
shell_info("Installing #{fe_label} to #{dest}"),
:ok <- install_frontend(frontend_info, tmp_dir, dest) do
File.rm_rf!(tmp_dir)
shell_info("Frontend #{fe_label} installed to #{dest}")
Logger.configure(level: log_level)
else
{:download_or_unzip, _} ->
shell_info("Could not download or unzip the frontend")
_e ->
shell_info("Could not install the frontend")
end
end
defp download_or_unzip(frontend_info, temp_dir, file) do
if file do
with {:ok, zip} <- File.read(Path.expand(file)) do
unzip(zip, temp_dir)
end
else
download_build(frontend_info, temp_dir)
end
end
def unzip(zip, dest) do
with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do
File.rm_rf!(dest)
File.mkdir_p!(dest)
Enum.each(unzipped, fn {filename, data} ->
path = filename
new_file_path = Path.join(dest, path)
new_file_path
|> Path.dirname()
|> File.mkdir_p!()
File.write!(new_file_path, data)
end)
:ok
end
end
defp download_build(frontend_info, dest) do
shell_info("Downloading pre-built bundle for #{frontend_info["name"]}")
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
with {:ok, %{status: 200, body: zip_body}} <-
Pleroma.HTTP.get(url, [], pool: :media, recv_timeout: 120_000) do
unzip(zip_body, dest)
else
e -> {:error, e}
end
end
defp install_frontend(frontend_info, source, dest) do
from = frontend_info["build_dir"] || "dist"
File.rm_rf!(dest)
File.mkdir_p!(dest)
File.cp_r!(Path.join([source, from]), dest)
:ok
end
end

View file

@ -145,7 +145,7 @@ defmodule Mix.Tasks.Pleroma.Instance do
options,
:uploads_dir,
"What directory should media uploads go in (when using the local uploader)?",
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
Config.get([Pleroma.Uploaders.Local, :uploads])
)
|> Path.expand()
@ -154,7 +154,7 @@ defmodule Mix.Tasks.Pleroma.Instance do
options,
:static_dir,
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
Pleroma.Config.get([:instance, :static_dir])
Config.get([:instance, :static_dir])
)
|> Path.expand()

View file

@ -3,8 +3,8 @@ defmodule Mix.Tasks.Pleroma.NotificationSettings do
@moduledoc """
Example:
> mix pleroma.notification_settings --privacy-option=false --nickname-users="parallel588" # set false only for parallel588 user
> mix pleroma.notification_settings --privacy-option=true # set true for all users
> mix pleroma.notification_settings --hide-notification-contents=false --nickname-users="parallel588" # set false only for parallel588 user
> mix pleroma.notification_settings --hide-notification-contents=true # set true for all users
"""
@ -19,16 +19,16 @@ defmodule Mix.Tasks.Pleroma.NotificationSettings do
OptionParser.parse(
args,
strict: [
privacy_option: :boolean,
hide_notification_contents: :boolean,
email_users: :string,
nickname_users: :string
]
)
privacy_option = Keyword.get(options, :privacy_option)
hide_notification_contents = Keyword.get(options, :hide_notification_contents)
if not is_nil(privacy_option) do
privacy_option
if not is_nil(hide_notification_contents) do
hide_notification_contents
|> build_query(options)
|> Pleroma.Repo.update_all([])
end
@ -36,15 +36,15 @@ defmodule Mix.Tasks.Pleroma.NotificationSettings do
shell_info("Done")
end
defp build_query(privacy_option, options) do
defp build_query(hide_notification_contents, options) do
query =
from(u in Pleroma.User,
update: [
set: [
notification_settings:
fragment(
"jsonb_set(notification_settings, '{privacy_option}', ?)",
^privacy_option
"jsonb_set(notification_settings, '{hide_notification_contents}', ?)",
^hide_notification_contents
)
]
]

View file

@ -17,30 +17,53 @@ defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
def run([]) do
Mix.Pleroma.start_pleroma()
["public", "unlisted", "private", "direct"]
|> Enum.each(fn visibility ->
count = status_visibility_count_query(visibility)
name = "status_visibility_#{visibility}"
CounterCache.set(name, count)
Mix.Pleroma.shell_info("Set #{name} to #{count}")
instances =
Activity
|> distinct([a], true)
|> select([a], fragment("split_part(?, '/', 3)", a.actor))
|> Repo.all()
instances
|> Enum.with_index(1)
|> Enum.each(fn {instance, i} ->
counters = instance_counters(instance)
CounterCache.set(instance, counters)
Mix.Pleroma.shell_info(
"[#{i}/#{length(instances)}] Setting #{instance} counters: #{inspect(counters)}"
)
end)
Mix.Pleroma.shell_info("Done")
end
defp status_visibility_count_query(visibility) do
defp instance_counters(instance) do
counters = %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
Activity
|> where(
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|> where([a], fragment("split_part(?, '/', 3) = ?", a.actor, ^instance))
|> select(
[a],
{fragment(
"activity_visibility(?, ?, ?)",
a.actor,
a.recipients,
a.data
), count(a.id)}
)
|> group_by(
[a],
fragment(
"activity_visibility(?, ?, ?) = ?",
"activity_visibility(?, ?, ?)",
a.actor,
a.recipients,
a.data,
^visibility
a.data
)
)
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|> Repo.aggregate(:count, :id, timeout: :timer.minutes(30))
|> Repo.all(timeout: :timer.minutes(30))
|> Enum.reduce(counters, fn {visibility, count}, acc ->
Map.put(acc, visibility, count)
end)
end
end

View file

@ -35,10 +35,16 @@ defmodule Mix.Tasks.Pleroma.Relay do
def run(["list"]) do
start_pleroma()
with {:ok, list} <- Relay.list(true) do
list |> Enum.each(&shell_info(&1))
with {:ok, list} <- Relay.list() do
Enum.each(list, &print_relay_url/1)
else
{:error, e} -> shell_error("Error while fetching relay subscription list: #{inspect(e)}")
end
end
defp print_relay_url(%{followed_back: false} = relay) do
shell_info("#{relay.actor} - no Accept received (relay didn't follow back)")
end
defp print_relay_url(relay), do: shell_info(relay.actor)
end

View file

@ -232,7 +232,7 @@ defmodule Mix.Tasks.Pleroma.User do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.tag(tags)
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
shell_info("Tags of #{user.nickname}: #{inspect(user.tags)}")
else
_ ->
shell_error("Could not change user tags for #{nickname}")
@ -245,7 +245,7 @@ defmodule Mix.Tasks.Pleroma.User do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.untag(tags)
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
shell_info("Tags of #{user.nickname}: #{inspect(user.tags)}")
else
_ ->
shell_error("Could not change user tags for #{nickname}")

View file

@ -7,7 +7,6 @@ defmodule Pleroma.Activity do
alias Pleroma.Activity
alias Pleroma.Activity.Queries
alias Pleroma.ActivityExpiration
alias Pleroma.Bookmark
alias Pleroma.Notification
alias Pleroma.Object
@ -60,8 +59,6 @@ defmodule Pleroma.Activity do
# typical case.
has_one(:object, Object, on_delete: :nothing, foreign_key: :id)
has_one(:expiration, ActivityExpiration, on_delete: :delete_all)
timestamps()
end
@ -304,14 +301,14 @@ defmodule Pleroma.Activity do
|> Repo.all()
end
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
def follow_requests_for_actor(%User{ap_id: ap_id}) do
ap_id
|> Queries.by_object_id()
|> Queries.by_type("Follow")
|> where([a], fragment("? ->> 'state' = 'pending'", a.data))
end
def following_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
def following_requests_for_actor(%User{ap_id: ap_id}) do
Queries.by_type("Follow")
|> where([a], fragment("?->>'state' = 'pending'", a.data))
|> where([a], a.actor == ^ap_id)
@ -340,4 +337,10 @@ defmodule Pleroma.Activity do
_ -> nil
end
end
@spec pinned_by_actor?(Activity.t()) :: boolean()
def pinned_by_actor?(%Activity{} = activity) do
actor = user_actor(activity)
activity.id in actor.pinned_activities
end
end

View file

@ -1,67 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ActivityExpiration do
use Ecto.Schema
alias Pleroma.Activity
alias Pleroma.ActivityExpiration
alias Pleroma.Repo
import Ecto.Changeset
import Ecto.Query
@type t :: %__MODULE__{}
@min_activity_lifetime :timer.hours(1)
schema "activity_expirations" do
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
field(:scheduled_at, :naive_datetime)
end
def changeset(%ActivityExpiration{} = expiration, attrs) do
expiration
|> cast(attrs, [:scheduled_at])
|> validate_required([:scheduled_at])
|> validate_scheduled_at()
end
def get_by_activity_id(activity_id) do
ActivityExpiration
|> where([exp], exp.activity_id == ^activity_id)
|> Repo.one()
end
def create(%Activity{} = activity, scheduled_at) do
%ActivityExpiration{activity_id: activity.id}
|> changeset(%{scheduled_at: scheduled_at})
|> Repo.insert()
end
def due_expirations(offset \\ 0) do
naive_datetime =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(offset, :millisecond)
ActivityExpiration
|> where([exp], exp.scheduled_at < ^naive_datetime)
|> Repo.all()
end
def validate_scheduled_at(changeset) do
validate_change(changeset, :scheduled_at, fn _, scheduled_at ->
if not expires_late_enough?(scheduled_at) do
[scheduled_at: "an ephemeral activity must live for at least one hour"]
else
[]
end
end)
end
def expires_late_enough?(scheduled_at) do
now = NaiveDateTime.utc_now()
diff = NaiveDateTime.diff(scheduled_at, now, :millisecond)
diff > @min_activity_lifetime
end
end

View file

@ -22,26 +22,42 @@ defmodule Pleroma.Application do
def repository, do: @repository
def user_agent do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
if Process.whereis(Pleroma.Web.Endpoint) do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
custom ->
custom
custom ->
custom
end
else
# fallback, if endpoint is not started yet
"Pleroma Data Loader"
end
end
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
Pleroma.Config.Holder.save_default()
# Scrubbers are compiled at runtime and therefore will cause a conflict
# every time the application is restarted, so we disable module
# conflicts at runtime
Code.compiler_options(ignore_module_conflict: true)
# Disable warnings_as_errors at runtime, it breaks Phoenix live reload
# due to protocol consolidation warnings
Code.compiler_options(warnings_as_errors: false)
Pleroma.Telemetry.Logger.attach()
Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn()
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.Repo.check_migrations_applied!()
Pleroma.ApplicationRequirements.verify!()
setup_instrumenters()
load_custom_modules()
check_system_commands()
Pleroma.Docs.JSON.compile()
adapter = Application.get_env(:tesla, :adapter)
@ -149,7 +165,8 @@ defmodule Pleroma.Application do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500)
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
]
end
@ -162,7 +179,8 @@ defmodule Pleroma.Application do
defp seconds_valid_interval,
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
defp build_cachex(type, opts),
@spec build_cachex(String.t(), keyword()) :: map()
def build_cachex(type, opts),
do: %{
id: String.to_atom("cachex_" <> type),
start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]},
@ -217,9 +235,7 @@ defmodule Pleroma.Application do
# start hackney and gun pools in tests
defp http_children(_, :test) do
hackney_options = Config.get([:hackney_pools, :federation])
hackney_pool = :hackney_pool.child_spec(:federation, hackney_options)
[hackney_pool, Pleroma.Pool.Supervisor]
http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil)
end
defp http_children(Tesla.Adapter.Hackney, _) do
@ -238,7 +254,27 @@ defmodule Pleroma.Application do
end
end
defp http_children(Tesla.Adapter.Gun, _), do: [Pleroma.Pool.Supervisor]
defp http_children(Tesla.Adapter.Gun, _) do
Pleroma.Gun.ConnectionPool.children() ++
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
end
defp http_children(_, _), do: []
defp check_system_commands do
filters = Config.get([Pleroma.Upload, :filters])
check_filter = fn filter, command_required ->
with true <- filter in filters,
false <- Pleroma.Utils.command_available?(command_required) do
Logger.error(
"#{filter} is specified in list of Pleroma.Upload filters, but the #{command_required} command is not found"
)
end
end
check_filter.(Pleroma.Upload.Filters.Exiftool, "exiftool")
check_filter.(Pleroma.Upload.Filters.Mogrify, "mogrify")
check_filter.(Pleroma.Upload.Filters.Mogrifun, "mogrify")
end
end

View file

@ -0,0 +1,143 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ApplicationRequirements do
@moduledoc """
The module represents the collection of validations to runs before start server.
"""
defmodule VerifyError, do: defexception([:message])
import Ecto.Query
require Logger
@spec verify!() :: :ok | VerifyError.t()
def verify! do
:ok
|> check_confirmation_accounts!
|> check_migrations_applied!()
|> check_welcome_message_config!()
|> check_rum!()
|> handle_result()
end
defp handle_result(:ok), do: :ok
defp handle_result({:error, message}), do: raise(VerifyError, message: message)
defp check_welcome_message_config!(:ok) do
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
not Pleroma.Emails.Mailer.enabled?() do
Logger.error("""
To send welcome email do you need to enable mail.
\nconfig :pleroma, Pleroma.Emails.Mailer, enabled: true
""")
{:error, "The mail disabled."}
else
:ok
end
end
defp check_welcome_message_config!(result), do: result
# Checks account confirmation email
#
def check_confirmation_accounts!(:ok) do
if Pleroma.Config.get([:instance, :account_activation_required]) &&
not Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do
Logger.error(
"Account activation enabled, but no Mailer settings enabled.\nPlease set config :pleroma, :instance, account_activation_required: false\nOtherwise setup and enable Mailer."
)
{:error,
"Account activation enabled, but Mailer is disabled. Cannot send confirmation emails."}
else
:ok
end
end
def check_confirmation_accounts!(result), do: result
# Checks for pending migrations.
#
def check_migrations_applied!(:ok) do
unless Pleroma.Config.get(
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
false
) do
{_, res, _} =
Ecto.Migrator.with_repo(Pleroma.Repo, fn repo ->
down_migrations =
Ecto.Migrator.migrations(repo)
|> Enum.reject(fn
{:up, _, _} -> true
{:down, _, _} -> false
end)
if length(down_migrations) > 0 do
down_migrations_text =
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
Logger.error(
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
)
{:error, "Unapplied Migrations detected"}
else
:ok
end
end)
res
else
:ok
end
end
def check_migrations_applied!(result), do: result
# Checks for settings of RUM indexes.
#
defp check_rum!(:ok) do
{_, res, _} =
Ecto.Migrator.with_repo(Pleroma.Repo, fn repo ->
migrate =
from(o in "columns",
where: o.table_name == "objects",
where: o.column_name == "fts_content"
)
|> repo.exists?(prefix: "information_schema")
setting = Pleroma.Config.get([:database, :rum_enabled], false)
do_check_rum!(setting, migrate)
end)
res
end
defp check_rum!(result), do: result
defp do_check_rum!(setting, migrate) do
case {setting, migrate} do
{true, false} ->
Logger.error(
"Use `RUM` index is enabled, but were not applied migrations for it.\nIf you want to start Pleroma anyway, set\nconfig :pleroma, :database, rum_enabled: false\nOtherwise apply the following migrations:\n`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`"
)
{:error, "Unapplied RUM Migrations detected"}
{false, true} ->
Logger.error(
"Detected applied migrations to use `RUM` index, but `RUM` isn't enable in settings.\nIf you want to use `RUM`, set\nconfig :pleroma, :database, rum_enabled: true\nOtherwise roll `RUM` migrations back.\n`mix ecto.rollback --migrations-path priv/repo/optional_migrations/rum_indexing/`"
)
{:error, "RUM Migrations detected"}
_ ->
:ok
end
end
end

View file

@ -21,7 +21,8 @@ defmodule Pleroma.Captcha.Kocaptcha do
type: :kocaptcha,
token: json_resp["token"],
url: endpoint <> json_resp["url"],
answer_data: json_resp["md5"]
answer_data: json_resp["md5"],
seconds_valid: Pleroma.Config.get([Pleroma.Captcha, :seconds_valid])
}
end
end

View file

@ -17,7 +17,8 @@ defmodule Pleroma.Captcha.Native do
type: :native,
token: token(),
url: "data:image/png;base64," <> Base.encode64(img_binary),
answer_data: answer_data
answer_data: answer_data,
seconds_valid: Pleroma.Config.get([Pleroma.Captcha, :seconds_valid])
}
end
end

View file

@ -11,12 +11,10 @@ defmodule Pleroma.Config do
def get([key], default), do: get(key, default)
def get([parent_key | keys], default) do
case :pleroma
|> Application.get_env(parent_key)
|> get_in(keys) do
nil -> default
any -> any
def get([_ | _] = path, default) do
case fetch(path) do
{:ok, value} -> value
:error -> default
end
end
@ -34,6 +32,24 @@ defmodule Pleroma.Config do
end
end
def fetch(key) when is_atom(key), do: fetch([key])
def fetch([root_key | keys]) do
Enum.reduce_while(keys, Application.fetch_env(:pleroma, root_key), fn
key, {:ok, config} when is_map(config) or is_list(config) ->
case Access.fetch(config, key) do
:error ->
{:halt, :error}
value ->
{:cont, value}
end
_key, _config ->
{:halt, :error}
end)
end
def put([key], value), do: put(key, value)
def put([parent_key | keys], value) do
@ -50,18 +66,31 @@ defmodule Pleroma.Config do
def delete([key]), do: delete(key)
def delete([parent_key | keys]) do
{_, parent} =
Application.get_env(:pleroma, parent_key)
|> get_and_update_in(keys, fn _ -> :pop end)
def delete([parent_key | keys] = path) do
with {:ok, _} <- fetch(path) do
{_, parent} =
parent_key
|> get()
|> get_and_update_in(keys, fn _ -> :pop end)
Application.put_env(:pleroma, parent_key, parent)
Application.put_env(:pleroma, parent_key, parent)
end
end
def delete(key) do
Application.delete_env(:pleroma, key)
end
def restrict_unauthenticated_access?(resource, kind) do
setting = get([:restrict_unauthenticated, resource, kind])
if setting in [nil, :if_instance_is_private] do
!get!([:instance, :public])
else
setting
end
end
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []

View file

@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
import Ecto.Query, only: [select: 3]
import Pleroma.Web.Gettext
alias __MODULE__
@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do
@type t :: %__MODULE__{}
@full_key_update [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
@full_subkey_update [
{:pleroma, :assets, :mascots},
{:pleroma, :emoji, :groups},
@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do
{:pleroma, :mrf_keyword, :replace}
]
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
schema "config" do
field(:key, :string)
field(:group, :string)
field(:value, :binary)
field(:key, Pleroma.EctoType.Config.Atom)
field(:group, Pleroma.EctoType.Config.Atom)
field(:value, Pleroma.EctoType.Config.BinaryValue)
field(:db, {:array, :string}, virtual: true, default: [])
timestamps()
@ -51,10 +37,6 @@ defmodule Pleroma.ConfigDB do
|> select([c], {c.group, c.key, c.value})
|> Repo.all()
|> Enum.reduce([], fn {group, key, value}, acc ->
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = from_binary(value)
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
end)
end
@ -64,50 +46,41 @@ defmodule Pleroma.ConfigDB do
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
def changeset(config, params \\ %{}) do
params = Map.put(params, :value, transform(params[:value]))
config
|> cast(params, [:key, :group, :value])
|> validate_required([:key, :group, :value])
|> unique_constraint(:key, name: :config_group_key_index)
end
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def create(params) do
defp create(params) do
%ConfigDB{}
|> changeset(params)
|> Repo.insert()
end
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update(%ConfigDB{} = config, %{value: value}) do
defp update(%ConfigDB{} = config, %{value: value}) do
config
|> changeset(%{value: value})
|> Repo.update()
end
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
def get_db_keys(%ConfigDB{} = config) do
config.value
|> ConfigDB.from_binary()
|> get_db_keys(config.key)
end
@spec get_db_keys(keyword(), any()) :: [String.t()]
def get_db_keys(value, key) do
if Keyword.keyword?(value) do
value |> Keyword.keys() |> Enum.map(&convert(&1))
else
[convert(key)]
end
keys =
if Keyword.keyword?(value) do
Keyword.keys(value)
else
[key]
end
Enum.map(keys, &to_json_types(&1))
end
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
def merge_group(group, key, old_value, new_value) do
new_keys = to_map_set(new_value)
new_keys = to_mapset(new_value)
intersect_keys =
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
merged_value = ConfigDB.merge(old_value, new_value)
@ -120,12 +93,10 @@ defmodule Pleroma.ConfigDB do
[]
end)
|> List.flatten()
|> Enum.reduce(merged_value, fn subkey, acc ->
Keyword.put(acc, subkey, new_value[subkey])
end)
|> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
end
defp to_map_set(keyword) do
defp to_mapset(keyword) do
keyword
|> Keyword.keys()
|> MapSet.new()
@ -159,57 +130,54 @@ defmodule Pleroma.ConfigDB do
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update_or_create(params) do
params = Map.put(params, :value, to_elixir_types(params[:value]))
search_opts = Map.take(params, [:group, :key])
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{:partial_update, true, config} <-
{:partial_update, can_be_partially_updated?(config), config},
old_value <- from_binary(config.value),
transformed_value <- do_transform(params[:value]),
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config},
new_value <-
merge_group(
ConfigDB.from_string(config.group),
ConfigDB.from_string(config.key),
old_value,
transformed_value
) do
ConfigDB.update(config, %{value: new_value})
{_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
{_, true, config} <-
{:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
new_value = merge_group(config.group, config.key, config.value, params[:value])
update(config, %{value: new_value})
else
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
ConfigDB.update(config, params)
update(config, params)
nil ->
ConfigDB.create(params)
create(params)
end
end
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
defp only_full_update?(%ConfigDB{} = config) do
config_group = ConfigDB.from_string(config.group)
config_key = ConfigDB.from_string(config.key)
defp only_full_update?(%ConfigDB{group: group, key: key}) do
full_key_update = [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist},
{:logger, :backends}
]
Enum.any?(@full_key_update, fn
{group, key} when is_list(key) ->
config_group == group and config_key in key
{group, key} ->
config_group == group and config_key == key
Enum.any?(full_key_update, fn
{s_group, s_key} ->
group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
end)
end
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
@spec delete(ConfigDB.t() | map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def delete(%ConfigDB{} = config), do: Repo.delete(config)
def delete(params) do
search_opts = Map.delete(params, :subkeys)
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
old_value <- from_binary(config.value),
keys <- Enum.map(sub_keys, &do_transform_string(&1)),
{:partial_remove, config, new_value} when new_value != [] <-
{:partial_remove, config, Keyword.drop(old_value, keys)} do
ConfigDB.update(config, %{value: new_value})
keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
{_, config, new_value} when new_value != [] <-
{:partial_remove, config, Keyword.drop(config.value, keys)} do
update(config, %{value: new_value})
else
{:partial_remove, config, []} ->
Repo.delete(config)
@ -225,37 +193,32 @@ defmodule Pleroma.ConfigDB do
end
end
@spec from_binary(binary()) :: term()
def from_binary(binary), do: :erlang.binary_to_term(binary)
@spec from_binary_with_convert(binary()) :: any()
def from_binary_with_convert(binary) do
binary
|> from_binary()
|> do_convert()
@spec to_json_types(term()) :: map() | list() | boolean() | String.t()
def to_json_types(entity) when is_list(entity) do
Enum.map(entity, &to_json_types/1)
end
@spec from_string(String.t()) :: atom() | no_return()
def from_string(string), do: do_transform_string(string)
def to_json_types(%Regex{} = entity), do: inspect(entity)
@spec convert(any()) :: any()
def convert(entity), do: do_convert(entity)
defp do_convert(entity) when is_list(entity) do
for v <- entity, into: [], do: do_convert(v)
def to_json_types(entity) when is_map(entity) do
Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
end
defp do_convert(%Regex{} = entity), do: inspect(entity)
def to_json_types({:args, args}) when is_list(args) do
arguments =
Enum.map(args, fn
arg when is_tuple(arg) -> inspect(arg)
arg -> to_json_types(arg)
end)
defp do_convert(entity) when is_map(entity) do
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
%{"tuple" => [":args", arguments]}
end
defp do_convert({:proxy_url, {type, :localhost, port}}) do
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]}
def to_json_types({:proxy_url, {type, :localhost, port}}) do
%{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
end
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
ip =
host
|> :inet_parse.ntoa()
@ -264,66 +227,64 @@ defmodule Pleroma.ConfigDB do
%{
"tuple" => [
":proxy_url",
%{"tuple" => [do_convert(type), ip, port]}
%{"tuple" => [to_json_types(type), ip, port]}
]
}
end
defp do_convert({:proxy_url, {type, host, port}}) do
def to_json_types({:proxy_url, {type, host, port}}) do
%{
"tuple" => [
":proxy_url",
%{"tuple" => [do_convert(type), to_string(host), port]}
%{"tuple" => [to_json_types(type), to_string(host), port]}
]
}
end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
def to_json_types({:partial_chain, entity}),
do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do
def to_json_types(entity) when is_tuple(entity) do
value =
entity
|> Tuple.to_list()
|> do_convert()
|> to_json_types()
%{"tuple" => value}
end
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
def to_json_types(entity) when is_binary(entity), do: entity
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
entity
end
defp do_convert(entity)
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
":#{entity}"
end
defp do_convert(entity) when is_atom(entity), do: inspect(entity)
def to_json_types(entity) when is_atom(entity), do: inspect(entity)
defp do_convert(entity) when is_binary(entity), do: entity
@spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
arguments =
Enum.map(args, fn arg ->
if String.contains?(arg, ["{", "}"]) do
{elem, []} = Code.eval_string(arg)
elem
else
to_elixir_types(arg)
end
end)
@spec transform(any()) :: binary() | no_return()
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
entity
|> do_transform()
|> to_binary()
{:args, arguments}
end
def transform(entity), do: to_binary(entity)
@spec transform_with_out_binary(any()) :: any()
def transform_with_out_binary(entity), do: do_transform(entity)
@spec to_binary(any()) :: binary()
def to_binary(entity), do: :erlang.term_to_binary(entity)
defp do_transform(%Regex{} = entity), do: entity
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} =
entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
@ -332,25 +293,51 @@ defmodule Pleroma.ConfigDB do
{:partial_chain, partial_chain}
end
defp do_transform(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
def to_elixir_types(%{"tuple" => entity}) do
Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
end
defp do_transform(entity) when is_map(entity) do
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
def to_elixir_types(entity) when is_map(entity) do
Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
end
defp do_transform(entity) when is_list(entity) do
for v <- entity, into: [], do: do_transform(v)
def to_elixir_types(entity) when is_list(entity) do
Enum.map(entity, &to_elixir_types/1)
end
defp do_transform(entity) when is_binary(entity) do
def to_elixir_types(entity) when is_binary(entity) do
entity
|> String.trim()
|> do_transform_string()
|> string_to_elixir_types()
end
defp do_transform(entity), do: entity
def to_elixir_types(entity), do: entity
@spec string_to_elixir_types(String.t()) ::
atom() | Regex.t() | module() | String.t() | no_return()
def string_to_elixir_types("~r" <> _pattern = regex) do
pattern =
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
Regex.named_captures(pattern, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
def string_to_elixir_types(value) do
if module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
defp parse_host("localhost"), do: :localhost
@ -387,27 +374,8 @@ defmodule Pleroma.ConfigDB do
end
end
defp do_transform_string("~r" <> _pattern = regex) do
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
Regex.named_captures(@regex, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
defp do_transform_string(value) do
if is_module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
@spec is_module_name?(String.t()) :: boolean()
def is_module_name?(string) do
@spec module_name?(String.t()) :: boolean()
def module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger"]
end

View file

@ -3,9 +3,23 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Config.DeprecationWarnings do
alias Pleroma.Config
require Logger
alias Pleroma.Config
@type config_namespace() :: atom() | [atom()]
@type config_map() :: {config_namespace(), config_namespace(), String.t()}
@mrf_config_map [
{[:instance, :rewrite_policy], [:mrf, :policies],
"\n* `config :pleroma, :instance, rewrite_policy` is now `config :pleroma, :mrf, policies`"},
{[:instance, :mrf_transparency], [:mrf, :transparency],
"\n* `config :pleroma, :instance, mrf_transparency` is now `config :pleroma, :mrf, transparency`"},
{[:instance, :mrf_transparency_exclusions], [:mrf, :transparency_exclusions],
"\n* `config :pleroma, :instance, mrf_transparency_exclusions` is now `config :pleroma, :mrf, transparency_exclusions`"}
]
def check_hellthread_threshold do
if Config.get([:mrf_hellthread, :threshold]) do
Logger.warn("""
@ -39,5 +53,126 @@ defmodule Pleroma.Config.DeprecationWarnings do
def warn do
check_hellthread_threshold()
mrf_user_allowlist()
check_old_mrf_config()
check_media_proxy_whitelist_config()
check_welcome_message_config()
check_gun_pool_options()
check_activity_expiration_config()
end
def check_welcome_message_config do
instance_config = Pleroma.Config.get([:instance])
use_old_config =
Keyword.has_key?(instance_config, :welcome_user_nickname) or
Keyword.has_key?(instance_config, :welcome_message)
if use_old_config do
Logger.error("""
!!!DEPRECATION WARNING!!!
Your config is using the old namespace for Welcome messages configuration. You need to change to the new namespace:
\n* `config :pleroma, :instance, welcome_user_nickname` is now `config :pleroma, :welcome, :direct_message, :sender_nickname`
\n* `config :pleroma, :instance, welcome_message` is now `config :pleroma, :welcome, :direct_message, :message`
""")
end
end
def check_old_mrf_config do
warning_preface = """
!!!DEPRECATION WARNING!!!
Your config is using old namespaces for MRF configuration. They should work for now, but you are advised to change to new namespaces to prevent possible issues later:
"""
move_namespace_and_warn(@mrf_config_map, warning_preface)
end
@spec move_namespace_and_warn([config_map()], String.t()) :: :ok | nil
def move_namespace_and_warn(config_map, warning_preface) do
warning =
Enum.reduce(config_map, "", fn
{old, new, err_msg}, acc ->
old_config = Config.get(old)
if old_config do
Config.put(new, old_config)
acc <> err_msg
else
acc
end
end)
if warning != "" do
Logger.warn(warning_preface <> warning)
end
end
@spec check_media_proxy_whitelist_config() :: :ok | nil
def check_media_proxy_whitelist_config do
whitelist = Config.get([:media_proxy, :whitelist])
if Enum.any?(whitelist, &(not String.starts_with?(&1, "http"))) do
Logger.warn("""
!!!DEPRECATION WARNING!!!
Your config is using old format (only domain) for MediaProxy whitelist option. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
""")
end
end
def check_gun_pool_options do
pool_config = Config.get(:connections_pool)
if timeout = pool_config[:await_up_timeout] do
Logger.warn("""
!!!DEPRECATION WARNING!!!
Your config is using old setting name `await_up_timeout` instead of `connect_timeout`. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
""")
Config.put(:connections_pool, Keyword.put_new(pool_config, :connect_timeout, timeout))
end
pools_configs = Config.get(:pools)
warning_preface = """
!!!DEPRECATION WARNING!!!
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
"""
updated_config =
Enum.reduce(pools_configs, [], fn {pool_name, config}, acc ->
if timeout = config[:timeout] do
Keyword.put(acc, pool_name, Keyword.put_new(config, :recv_timeout, timeout))
else
acc
end
end)
if updated_config != [] do
pool_warnings =
updated_config
|> Keyword.keys()
|> Enum.map(fn pool_name ->
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
end)
Logger.warn(Enum.join([warning_preface | pool_warnings]))
Config.put(:pools, updated_config)
end
end
@spec check_activity_expiration_config() :: :ok | nil
def check_activity_expiration_config do
warning_preface = """
!!!DEPRECATION WARNING!!!
Your config is using old namespace for activity expiration configuration. Setting should work for now, but you are advised to change to new namespace to prevent possible issues later:
"""
move_namespace_and_warn(
[
{Pleroma.ActivityExpiration, Pleroma.Workers.PurgeExpiredActivity,
"\n* `config :pleroma, Pleroma.ActivityExpiration` is now `config :pleroma, Pleroma.Workers.PurgeExpiredActivity`"}
],
warning_preface
)
end
end

View file

@ -0,0 +1,17 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Config.Helpers do
alias Pleroma.Config
def instance_name, do: Config.get([:instance, :name])
defp instance_notify_email do
Config.get([:instance, :notify_email]) || Config.get([:instance, :email])
end
def sender do
{instance_name(), instance_notify_email()}
end
end

View file

@ -12,6 +12,11 @@ defmodule Pleroma.Config.Loader do
:swarm
]
@reject_groups [
:postgrex,
:tesla
]
if Code.ensure_loaded?(Config.Reader) do
@reader Config.Reader
@ -47,7 +52,8 @@ defmodule Pleroma.Config.Loader do
@spec filter_group(atom(), keyword()) :: keyword()
def filter_group(group, configs) do
Enum.reject(configs[group], fn {key, _v} ->
key in @reject_keys or (group == :phoenix and key == :serve_endpoints) or group == :postgrex
key in @reject_keys or group in @reject_groups or
(group == :phoenix and key == :serve_endpoints)
end)
end
end

View file

@ -0,0 +1,34 @@
defmodule Pleroma.Config.Oban do
require Logger
def warn do
oban_config = Pleroma.Config.get(Oban)
crontab =
[
Pleroma.Workers.Cron.StatsWorker,
Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker,
Pleroma.Workers.Cron.ClearOauthTokenWorker
]
|> Enum.reduce(oban_config[:crontab], fn removed_worker, acc ->
with acc when is_list(acc) <- acc,
setting when is_tuple(setting) <-
Enum.find(acc, fn {_, worker} -> worker == removed_worker end) do
"""
!!!OBAN CONFIG WARNING!!!
You are using old workers in Oban crontab settings, which were removed.
Please, remove setting from crontab in your config file (prod.secret.exs): #{
inspect(setting)
}
"""
|> Logger.warn()
List.delete(acc, setting)
else
_ -> acc
end
end)
Pleroma.Config.put(Oban, Keyword.put(oban_config, :crontab, crontab))
end
end

View file

@ -28,15 +28,11 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
{:pleroma, Pleroma.Upload, [:proxy_remote]},
{:pleroma, :instance, [:upload_limit]},
{:pleroma, :email_notifications, [:digest]},
{:pleroma, :oauth2, [:clean_expired_tokens]},
{:pleroma, Pleroma.ActivityExpiration, [:enabled]},
{:pleroma, Pleroma.ScheduledActivity, [:enabled]},
{:pleroma, :gopher, [:enabled]}
]
def start_link(_) do
load_and_update_env()
def start_link(restart_pleroma? \\ true) do
load_and_update_env([], restart_pleroma?)
if Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
:ignore
end
@ -48,7 +44,7 @@ defmodule Pleroma.Config.TransferTask do
{logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&transform_and_merge/1)
|> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
logger
@ -92,11 +88,7 @@ defmodule Pleroma.Config.TransferTask do
end
end
defp transform_and_merge(%{group: group, key: key, value: value} = setting) do
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = ConfigDB.from_binary(value)
defp merge_with_default(%{group: group, key: key, value: value} = setting) do
default = Config.Holder.default_config(group, key)
merged =

View file

@ -162,10 +162,13 @@ defmodule Pleroma.Conversation.Participation do
for_user(user, params)
|> Enum.map(fn participation ->
activity_id =
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{
user: user,
blocking_user: user
})
ActivityPub.fetch_latest_direct_activity_id_for_context(
participation.conversation.ap_id,
%{
user: user,
blocking_user: user
}
)
%{
participation

View file

@ -10,32 +10,70 @@ defmodule Pleroma.CounterCache do
import Ecto.Query
schema "counter_cache" do
field(:name, :string)
field(:count, :integer)
field(:instance, :string)
field(:public, :integer)
field(:unlisted, :integer)
field(:private, :integer)
field(:direct, :integer)
end
def changeset(struct, params) do
struct
|> cast(params, [:name, :count])
|> validate_required([:name])
|> unique_constraint(:name)
|> cast(params, [:instance, :public, :unlisted, :private, :direct])
|> validate_required([:instance])
|> unique_constraint(:instance)
end
def get_as_map(names) when is_list(names) do
def get_by_instance(instance) do
CounterCache
|> where([cc], cc.name in ^names)
|> Repo.all()
|> Enum.group_by(& &1.name, & &1.count)
|> Map.new(fn {k, v} -> {k, hd(v)} end)
|> select([c], %{
"public" => c.public,
"unlisted" => c.unlisted,
"private" => c.private,
"direct" => c.direct
})
|> where([c], c.instance == ^instance)
|> Repo.one()
|> case do
nil -> %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
val -> val
end
end
def set(name, count) do
def get_sum do
CounterCache
|> select([c], %{
"public" => type(sum(c.public), :integer),
"unlisted" => type(sum(c.unlisted), :integer),
"private" => type(sum(c.private), :integer),
"direct" => type(sum(c.direct), :integer)
})
|> Repo.one()
end
def set(instance, values) do
params =
Enum.reduce(
["public", "private", "unlisted", "direct"],
%{"instance" => instance},
fn param, acc ->
Map.put_new(acc, param, Map.get(values, param, 0))
end
)
%CounterCache{}
|> changeset(%{"name" => name, "count" => count})
|> changeset(params)
|> Repo.insert(
on_conflict: [set: [count: count]],
on_conflict: [
set: [
public: params["public"],
private: params["private"],
unlisted: params["unlisted"],
direct: params["direct"]
]
],
returning: true,
conflict_target: :name
conflict_target: :instance
)
end
end

View file

@ -6,16 +6,21 @@ defmodule Pleroma.Docs.Generator do
implementation.process(descriptions)
end
@spec list_modules_in_dir(String.t(), String.t()) :: [module()]
def list_modules_in_dir(dir, start) do
with {:ok, files} <- File.ls(dir) do
files
|> Enum.filter(&String.ends_with?(&1, ".ex"))
|> Enum.map(fn filename ->
module = filename |> String.trim_trailing(".ex") |> Macro.camelize()
String.to_atom(start <> module)
end)
end
@spec list_behaviour_implementations(behaviour :: module()) :: [module()]
def list_behaviour_implementations(behaviour) do
:code.all_loaded()
|> Enum.filter(fn {module, _} ->
# This shouldn't be needed as all modules are expected to have module_info/1,
# but in test enviroments some transient modules `:elixir_compiler_XX`
# are loaded for some reason (where XX is a random integer).
if function_exported?(module, :module_info, 1) do
module.module_info(:attributes)
|> Keyword.get_values(:behaviour)
|> List.flatten()
|> Enum.member?(behaviour)
end
end)
|> Enum.map(fn {module, _} -> module end)
end
@doc """
@ -87,6 +92,12 @@ defmodule Pleroma.Docs.Generator do
else: string
end
defp format_suggestions({:list_behaviour_implementations, behaviour}) do
behaviour
|> list_behaviour_implementations()
|> format_suggestions()
end
defp format_suggestions([]), do: []
defp format_suggestions([suggestion | tail]) do

View file

@ -1,5 +1,19 @@
defmodule Pleroma.Docs.JSON do
@behaviour Pleroma.Docs.Generator
@external_resource "config/description.exs"
@raw_config Pleroma.Config.Loader.read("config/description.exs")
@raw_descriptions @raw_config[:pleroma][:config_description]
@term __MODULE__.Compiled
@spec compile :: :ok
def compile do
:persistent_term.put(@term, Pleroma.Docs.Generator.convert_to_strings(@raw_descriptions))
end
@spec compiled_descriptions :: Map.t()
def compiled_descriptions do
:persistent_term.get(@term)
end
@spec process(keyword()) :: {:ok, String.t()}
def process(descriptions) do
@ -13,11 +27,4 @@ defmodule Pleroma.Docs.JSON do
{:ok, path}
end
end
def compile do
with config <- Pleroma.Config.Loader.read("config/description.exs") do
config[:pleroma][:config_description]
|> Pleroma.Docs.Generator.convert_to_strings()
end
end
end

View file

@ -68,6 +68,11 @@ defmodule Pleroma.Docs.Markdown do
IO.write(file, " #{list_mark}`#{inspect(suggestion)}`\n")
end
defp print_suggestions(file, {:list_behaviour_implementations, behaviour}) do
suggestions = Pleroma.Docs.Generator.list_behaviour_implementations(behaviour)
print_suggestions(file, suggestions)
end
defp print_suggestions(_file, nil), do: nil
defp print_suggestions(_file, ""), do: nil

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime do
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime do
@moduledoc """
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
DateTime can't parse this, but it can parse the related iso8601. This

View file

@ -0,0 +1,34 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Emoji do
use Ecto.Type
def type, do: :map
def cast(data) when is_map(data) do
has_invalid_emoji? =
Enum.find(data, fn
{name, uri} when is_binary(name) and is_binary(uri) ->
# based on ObjectValidators.Uri.cast()
case URI.parse(uri) do
%URI{host: nil} -> true
%URI{host: ""} -> true
%URI{scheme: scheme} when scheme in ["https", "http"] -> false
_ -> true
end
{_name, _uri} ->
true
end)
if has_invalid_emoji?, do: :error, else: {:ok, data}
end
def cast(_data), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
end

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID do
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID do
use Ecto.Type
def type, do: :string

View file

@ -1,7 +1,11 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Recipients do
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients do
use Ecto.Type
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID
alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
def type, do: {:array, ObjectID}

View file

@ -2,7 +2,7 @@
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeText do
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText do
use Ecto.Type
alias Pleroma.HTML

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Uri do
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Uri do
use Ecto.Type
def type, do: :string

View file

@ -0,0 +1,26 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.Atom do
use Ecto.Type
def type, do: :atom
def cast(key) when is_atom(key) do
{:ok, key}
end
def cast(key) when is_binary(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def cast(_), do: :error
def load(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def dump(key) when is_atom(key), do: {:ok, inspect(key)}
def dump(_), do: :error
end

View file

@ -0,0 +1,27 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.BinaryValue do
use Ecto.Type
def type, do: :term
def cast(value) when is_binary(value) do
if String.valid?(value) do
{:ok, value}
else
{:ok, :erlang.binary_to_term(value)}
end
end
def cast(value), do: {:ok, value}
def load(value) when is_binary(value) do
{:ok, :erlang.binary_to_term(value)}
end
def dump(value) do
{:ok, :erlang.term_to_binary(value)}
end
end

View file

@ -8,9 +8,10 @@ defmodule Pleroma.Emails.AdminEmail do
import Swoosh.Email
alias Pleroma.Config
alias Pleroma.HTML
alias Pleroma.Web.Router.Helpers
defp instance_config, do: Pleroma.Config.get(:instance)
defp instance_config, do: Config.get(:instance)
defp instance_name, do: instance_config()[:name]
defp instance_notify_email do
@ -72,6 +73,8 @@ defmodule Pleroma.Emails.AdminEmail do
<p>Reported Account: <a href="#{user_url(account)}">#{account.nickname}</a></p>
#{comment_html}
#{statuses_html}
<p>
<a href="#{Pleroma.Web.base_url()}/pleroma/admin/#/reports/index">View Reports in AdminFE</a>
"""
new()
@ -80,4 +83,18 @@ defmodule Pleroma.Emails.AdminEmail do
|> subject("#{instance_name()} Report")
|> html_body(html_body)
end
def new_unapproved_registration(to, account) do
html_body = """
<p>New account for review: <a href="#{user_url(account)}">@#{account.nickname}</a></p>
<blockquote>#{HTML.strip_tags(account.registration_reason)}</blockquote>
<a href="#{Pleroma.Web.base_url()}/pleroma/admin">Visit AdminFE</a>
"""
new()
|> to({to.name, to.email})
|> from({instance_name(), instance_notify_email()})
|> subject("New account up for review on #{instance_name()} (@#{account.nickname})")
|> html_body(html_body)
end
end

View file

@ -12,17 +12,22 @@ defmodule Pleroma.Emails.UserEmail do
alias Pleroma.Web.Endpoint
alias Pleroma.Web.Router
defp instance_name, do: Config.get([:instance, :name])
defp sender do
email = Config.get([:instance, :notify_email]) || Config.get([:instance, :email])
{instance_name(), email}
end
import Pleroma.Config.Helpers, only: [instance_name: 0, sender: 0]
defp recipient(email, nil), do: email
defp recipient(email, name), do: {name, email}
defp recipient(%User{} = user), do: recipient(user.email, user.name)
@spec welcome(User.t(), map()) :: Swoosh.Email.t()
def welcome(user, opts \\ %{}) do
new()
|> to(recipient(user))
|> from(Map.get(opts, :sender, sender()))
|> subject(Map.get(opts, :subject, "Welcome to #{instance_name()}!"))
|> html_body(Map.get(opts, :html, "Welcome to #{instance_name()}!"))
|> text_body(Map.get(opts, :text, "Welcome to #{instance_name()}!"))
end
def password_reset_email(user, token) when is_binary(token) do
password_reset_url = Router.Helpers.reset_password_url(Endpoint, :reset, token)
@ -102,25 +107,34 @@ defmodule Pleroma.Emails.UserEmail do
|> Enum.filter(&(&1.activity.data["type"] == "Create"))
|> Enum.map(fn notification ->
object = Pleroma.Object.normalize(notification.activity)
object = update_in(object.data["content"], &format_links/1)
%{
data: notification,
object: object,
from: User.get_by_ap_id(notification.activity.actor)
}
if not is_nil(object) do
object = update_in(object.data["content"], &format_links/1)
%{
data: notification,
object: object,
from: User.get_by_ap_id(notification.activity.actor)
}
end
end)
|> Enum.filter(& &1)
followers =
notifications
|> Enum.filter(&(&1.activity.data["type"] == "Follow"))
|> Enum.map(fn notification ->
%{
data: notification,
object: Pleroma.Object.normalize(notification.activity),
from: User.get_by_ap_id(notification.activity.actor)
}
from = User.get_by_ap_id(notification.activity.actor)
if not is_nil(from) do
%{
data: notification,
object: Pleroma.Object.normalize(notification.activity),
from: User.get_by_ap_id(notification.activity.actor)
}
end
end)
|> Enum.filter(& &1)
unless Enum.empty?(mentions) do
styling = Config.get([__MODULE__, :styling])

View file

@ -108,7 +108,7 @@ defmodule Pleroma.Emoji.Loader do
if File.exists?(emoji_txt) do
load_from_file(emoji_txt, emoji_groups)
else
extensions = Pleroma.Config.get([:emoji, :pack_extensions])
extensions = Config.get([:emoji, :pack_extensions])
Logger.info(
"No emoji.txt found for pack \"#{pack_name}\", assuming all #{

View file

@ -1,6 +1,7 @@
defmodule Pleroma.Emoji.Pack do
@derive {Jason.Encoder, only: [:files, :pack]}
@derive {Jason.Encoder, only: [:files, :pack, :files_count]}
defstruct files: %{},
files_count: 0,
pack_file: nil,
path: nil,
pack: %{},
@ -8,6 +9,7 @@ defmodule Pleroma.Emoji.Pack do
@type t() :: %__MODULE__{
files: %{String.t() => Path.t()},
files_count: non_neg_integer(),
pack_file: Path.t(),
path: Path.t(),
pack: map(),
@ -16,7 +18,7 @@ defmodule Pleroma.Emoji.Pack do
alias Pleroma.Emoji
@spec create(String.t()) :: :ok | {:error, File.posix()} | {:error, :empty_values}
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
def create(name) do
with :ok <- validate_not_empty([name]),
dir <- Path.join(emoji_path(), name),
@ -26,10 +28,28 @@ defmodule Pleroma.Emoji.Pack do
end
end
@spec show(String.t()) :: {:ok, t()} | {:error, atom()}
def show(name) do
defp paginate(entities, 1, page_size), do: Enum.take(entities, page_size)
defp paginate(entities, page, page_size) do
entities
|> Enum.chunk_every(page_size)
|> Enum.at(page - 1)
end
@spec show(keyword()) :: {:ok, t()} | {:error, atom()}
def show(opts) do
name = opts[:name]
with :ok <- validate_not_empty([name]),
{:ok, pack} <- load_pack(name) do
shortcodes =
pack.files
|> Map.keys()
|> Enum.sort()
|> paginate(opts[:page], opts[:page_size])
pack = Map.put(pack, :files, Map.take(pack.files, shortcodes))
{:ok, validate_pack(pack)}
end
end
@ -120,10 +140,10 @@ defmodule Pleroma.Emoji.Pack do
end
end
@spec list_local() :: {:ok, map()}
def list_local do
@spec list_local(keyword()) :: {:ok, map(), non_neg_integer()}
def list_local(opts) do
with {:ok, results} <- list_packs_dir() do
packs =
all_packs =
results
|> Enum.map(fn name ->
case load_pack(name) do
@ -132,9 +152,13 @@ defmodule Pleroma.Emoji.Pack do
end
end)
|> Enum.reject(&is_nil/1)
packs =
all_packs
|> paginate(opts[:page], opts[:page_size])
|> Map.new(fn pack -> {pack.name, validate_pack(pack)} end)
{:ok, packs}
{:ok, packs, length(all_packs)}
end
end
@ -146,7 +170,7 @@ defmodule Pleroma.Emoji.Pack do
end
end
@spec download(String.t(), String.t(), String.t()) :: :ok | {:error, atom()}
@spec download(String.t(), String.t(), String.t()) :: {:ok, t()} | {:error, atom()}
def download(name, url, as) do
uri = url |> String.trim() |> URI.parse()
@ -197,7 +221,12 @@ defmodule Pleroma.Emoji.Pack do
|> Map.put(:path, Path.dirname(pack_file))
|> Map.put(:name, name)
{:ok, pack}
files_count =
pack.files
|> Map.keys()
|> length()
{:ok, Map.put(pack, :files_count, files_count)}
else
{:error, :not_found}
end
@ -296,7 +325,9 @@ defmodule Pleroma.Emoji.Pack do
# Otherwise, they'd have to download it from external-src
pack.pack["share-files"] &&
Enum.all?(pack.files, fn {_, file} ->
File.exists?(Path.join(pack.path, file))
pack.path
|> Path.join(file)
|> File.exists?()
end)
end
@ -440,7 +471,7 @@ defmodule Pleroma.Emoji.Pack do
# with the API so it should be sufficient
with {:create_dir, :ok} <- {:create_dir, File.mkdir_p(emoji_path)},
{:ls, {:ok, results}} <- {:ls, File.ls(emoji_path)} do
{:ok, results}
{:ok, Enum.sort(results)}
else
{:create_dir, {:error, e}} -> {:error, :create_dir, e}
{:ls, {:error, e}} -> {:error, :ls, e}

View file

@ -34,10 +34,18 @@ defmodule Pleroma.Filter do
Repo.one(query)
end
def get_filters(%User{id: user_id} = _user) do
def get_active(query) do
from(f in query, where: is_nil(f.expires_at) or f.expires_at > ^NaiveDateTime.utc_now())
end
def get_irreversible(query) do
from(f in query, where: f.hide)
end
def get_filters(query \\ __MODULE__, %User{id: user_id}) do
query =
from(
f in Pleroma.Filter,
f in query,
where: f.user_id == ^user_id,
order_by: [desc: :id]
)
@ -95,4 +103,34 @@ defmodule Pleroma.Filter do
|> validate_required([:phrase, :context])
|> Repo.update()
end
def compose_regex(user_or_filters, format \\ :postgres)
def compose_regex(%User{} = user, format) do
__MODULE__
|> get_active()
|> get_irreversible()
|> get_filters(user)
|> compose_regex(format)
end
def compose_regex([_ | _] = filters, format) do
phrases =
filters
|> Enum.map(& &1.phrase)
|> Enum.join("|")
case format do
:postgres ->
"\\y(#{phrases})\\y"
:re ->
~r/\b#{phrases}\b/i
_ ->
nil
end
end
def compose_regex(_, _), do: nil
end

View file

@ -95,7 +95,11 @@ defmodule Pleroma.FollowingRelationship do
|> where([r], r.state == ^:follow_accept)
end
def followers_ap_ids(%User{} = user, from_ap_ids \\ nil) do
def followers_ap_ids(user, from_ap_ids \\ nil)
def followers_ap_ids(_, []), do: []
def followers_ap_ids(%User{} = user, from_ap_ids) do
query =
user
|> followers_query()
@ -124,6 +128,7 @@ defmodule Pleroma.FollowingRelationship do
|> join(:inner, [r], f in assoc(r, :follower))
|> where([r], r.state == ^:follow_pending)
|> where([r], r.following_id == ^id)
|> where([r, f], f.deactivated != true)
|> select([r, f], f)
|> Repo.all()
end
@ -259,4 +264,12 @@ defmodule Pleroma.FollowingRelationship do
end
end)
end
@spec following_ap_ids(User.t()) :: [String.t()]
def following_ap_ids(%User{} = user) do
user
|> following_query()
|> select([r, u], u.ap_id)
|> Repo.all()
end
end

View file

@ -10,11 +10,15 @@ defmodule Pleroma.Formatter do
@link_regex ~r"((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+"ui
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
@auto_linker_config hashtag: true,
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
mention: true,
mention_handler: &Pleroma.Formatter.mention_handler/4,
scheme: true
defp linkify_opts do
Pleroma.Config.get(Pleroma.Formatter) ++
[
hashtag: true,
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
mention: true,
mention_handler: &Pleroma.Formatter.mention_handler/4
]
end
def escape_mention_handler("@" <> nickname = mention, buffer, _, _) do
case User.get_cached_by_nickname(nickname) do
@ -80,19 +84,19 @@ defmodule Pleroma.Formatter do
@spec linkify(String.t(), keyword()) ::
{String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]}
def linkify(text, options \\ []) do
options = options ++ @auto_linker_config
options = linkify_opts() ++ options
if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do
%{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text)
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
{text_mentions, %{mentions: mentions}} = AutoLinker.link_map(mentions, acc, options)
{text_rest, %{tags: tags}} = AutoLinker.link_map(rest, acc, options)
{text_mentions, %{mentions: mentions}} = Linkify.link_map(mentions, acc, options)
{text_rest, %{tags: tags}} = Linkify.link_map(rest, acc, options)
{text_mentions <> text_rest, MapSet.to_list(mentions), MapSet.to_list(tags)}
else
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
{text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options)
{text, %{mentions: mentions, tags: tags}} = Linkify.link_map(text, acc, options)
{text, MapSet.to_list(mentions), MapSet.to_list(tags)}
end
@ -111,9 +115,9 @@ defmodule Pleroma.Formatter do
if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do
%{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text)
AutoLinker.link(mentions, options) <> AutoLinker.link(rest, options)
Linkify.link(mentions, options) <> Linkify.link(rest, options)
else
AutoLinker.link(text, options)
Linkify.link(text, options)
end
end

View file

@ -96,16 +96,18 @@ defmodule Pleroma.Gopher.Server.ProtocolHandler do
def response("/main/public") do
posts =
ActivityPub.fetch_public_activities(%{"type" => ["Create"], "local_only" => true})
|> render_activities
%{type: ["Create"], local_only: true}
|> ActivityPub.fetch_public_activities()
|> render_activities()
info("Welcome to the Public Timeline!") <> posts <> ".\r\n"
end
def response("/main/all") do
posts =
ActivityPub.fetch_public_activities(%{"type" => ["Create"]})
|> render_activities
%{type: ["Create"]}
|> ActivityPub.fetch_public_activities()
|> render_activities()
info("Welcome to the Federated Timeline!") <> posts <> ".\r\n"
end
@ -130,13 +132,14 @@ defmodule Pleroma.Gopher.Server.ProtocolHandler do
def response("/users/" <> nickname) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
params = %{
"type" => ["Create"],
"actor_id" => user.ap_id
type: ["Create"],
actor_id: user.ap_id
}
activities =
ActivityPub.fetch_public_activities(params)
|> render_activities
params
|> ActivityPub.fetch_public_activities()
|> render_activities()
info("Posts by #{user.nickname}") <> activities <> ".\r\n"
else

View file

@ -19,7 +19,8 @@ defmodule Pleroma.Gun.API do
:tls_opts,
:tcp_opts,
:socks_opts,
:ws_opts
:ws_opts,
:supervise
]
@impl Gun

View file

@ -3,85 +3,33 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.Conn do
@moduledoc """
Struct for gun connection data
"""
alias Pleroma.Gun
alias Pleroma.Pool.Connections
require Logger
@type gun_state :: :up | :down
@type conn_state :: :active | :idle
@type t :: %__MODULE__{
conn: pid(),
gun_state: gun_state(),
conn_state: conn_state(),
used_by: [pid()],
last_reference: pos_integer(),
crf: float(),
retries: pos_integer()
}
defstruct conn: nil,
gun_state: :open,
conn_state: :init,
used_by: [],
last_reference: 0,
crf: 1,
retries: 0
@spec open(String.t() | URI.t(), atom(), keyword()) :: :ok | nil
def open(url, name, opts \\ [])
def open(url, name, opts) when is_binary(url), do: open(URI.parse(url), name, opts)
def open(%URI{} = uri, name, opts) do
def open(%URI{} = uri, opts) do
pool_opts = Pleroma.Config.get([:connections_pool], [])
opts =
opts
|> Enum.into(%{})
|> Map.put_new(:retry, pool_opts[:retry] || 1)
|> Map.put_new(:retry_timeout, pool_opts[:retry_timeout] || 1000)
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|> Map.put_new(:connect_timeout, pool_opts[:connect_timeout] || 5_000)
|> Map.put_new(:supervise, false)
|> maybe_add_tls_opts(uri)
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
max_connections = pool_opts[:max_connections] || 250
conn_pid =
if Connections.count(name) < max_connections do
do_open(uri, opts)
else
close_least_used_and_do_open(name, uri, opts)
end
if is_pid(conn_pid) do
conn = %Pleroma.Gun.Conn{
conn: conn_pid,
gun_state: :up,
conn_state: :active,
last_reference: :os.system_time(:second)
}
:ok = Gun.set_owner(conn_pid, Process.whereis(name))
Connections.add_conn(name, key, conn)
end
do_open(uri, opts)
end
defp maybe_add_tls_opts(opts, %URI{scheme: "http"}), do: opts
defp maybe_add_tls_opts(opts, %URI{scheme: "https", host: host}) do
defp maybe_add_tls_opts(opts, %URI{scheme: "https"}) do
tls_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun:
{&:ssl_verify_hostname.verify_fun/3,
[check_hostname: Pleroma.HTTP.Connection.format_host(host)]}
log_level: :warning,
customize_hostname_check: [match_fun: :public_key.pkix_verify_hostname_match_fun(:https)]
]
tls_opts =
@ -102,10 +50,10 @@ defmodule Pleroma.Gun.Conn do
with open_opts <- Map.delete(opts, :tls_opts),
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]),
stream <- Gun.connect(conn, connect_opts),
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
conn
{:ok, conn}
else
error ->
Logger.warn(
@ -140,8 +88,8 @@ defmodule Pleroma.Gun.Conn do
|> Map.put(:socks_opts, socks_opts)
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
{:ok, conn}
else
error ->
Logger.warn(
@ -155,11 +103,11 @@ defmodule Pleroma.Gun.Conn do
end
defp do_open(%URI{host: host, port: port} = uri, opts) do
host = Pleroma.HTTP.Connection.parse_host(host)
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
with {:ok, conn} <- Gun.open(host, port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
{:ok, conn}
else
error ->
Logger.warn(
@ -171,7 +119,7 @@ defmodule Pleroma.Gun.Conn do
end
defp destination_opts(%URI{host: host, port: port}) do
host = Pleroma.HTTP.Connection.parse_host(host)
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
%{host: host, port: port}
end
@ -181,17 +129,6 @@ defmodule Pleroma.Gun.Conn do
defp add_http2_opts(opts, _, _), do: opts
defp close_least_used_and_do_open(name, uri, opts) do
with [{key, conn} | _conns] <- Connections.get_unused_conns(name),
:ok <- Gun.close(conn.conn) do
Connections.remove_conn(name, key)
do_open(uri, opts)
else
[] -> {:error, :pool_overflowed}
end
end
def compose_uri_log(%URI{scheme: scheme, host: host, path: path}) do
"#{scheme}://#{host}#{path}"
end

View file

@ -0,0 +1,82 @@
defmodule Pleroma.Gun.ConnectionPool do
@registry __MODULE__
alias Pleroma.Gun.ConnectionPool.WorkerSupervisor
def children do
[
{Registry, keys: :unique, name: @registry},
Pleroma.Gun.ConnectionPool.WorkerSupervisor
]
end
@spec get_conn(URI.t(), keyword()) :: {:ok, pid()} | {:error, term()}
def get_conn(uri, opts) do
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
case Registry.lookup(@registry, key) do
# The key has already been registered, but connection is not up yet
[{worker_pid, nil}] ->
get_gun_pid_from_worker(worker_pid, true)
[{worker_pid, {gun_pid, _used_by, _crf, _last_reference}}] ->
GenServer.call(worker_pid, :add_client)
{:ok, gun_pid}
[] ->
# :gun.set_owner fails in :connected state for whatevever reason,
# so we open the connection in the process directly and send it's pid back
# We trust gun to handle timeouts by itself
case WorkerSupervisor.start_worker([key, uri, opts, self()]) do
{:ok, worker_pid} ->
get_gun_pid_from_worker(worker_pid, false)
{:error, {:already_started, worker_pid}} ->
get_gun_pid_from_worker(worker_pid, true)
err ->
err
end
end
end
defp get_gun_pid_from_worker(worker_pid, register) do
# GenServer.call will block the process for timeout length if
# the server crashes on startup (which will happen if gun fails to connect)
# so instead we use cast + monitor
ref = Process.monitor(worker_pid)
if register, do: GenServer.cast(worker_pid, {:add_client, self()})
receive do
{:conn_pid, pid} ->
Process.demonitor(ref)
{:ok, pid}
{:DOWN, ^ref, :process, ^worker_pid, reason} ->
case reason do
{:shutdown, {:error, _} = error} -> error
{:shutdown, error} -> {:error, error}
_ -> {:error, reason}
end
end
end
@spec release_conn(pid()) :: :ok
def release_conn(conn_pid) do
# :ets.fun2ms(fn {_, {worker_pid, {gun_pid, _, _, _}}} when gun_pid == conn_pid ->
# worker_pid end)
query_result =
Registry.select(@registry, [
{{:_, :"$1", {:"$2", :_, :_, :_}}, [{:==, :"$2", conn_pid}], [:"$1"]}
])
case query_result do
[worker_pid] ->
GenServer.call(worker_pid, :remove_client)
[] ->
:ok
end
end
end

View file

@ -0,0 +1,85 @@
defmodule Pleroma.Gun.ConnectionPool.Reclaimer do
use GenServer, restart: :temporary
@registry Pleroma.Gun.ConnectionPool
def start_monitor do
pid =
case :gen_server.start(__MODULE__, [], name: {:via, Registry, {@registry, "reclaimer"}}) do
{:ok, pid} ->
pid
{:error, {:already_registered, pid}} ->
pid
end
{pid, Process.monitor(pid)}
end
@impl true
def init(_) do
{:ok, nil, {:continue, :reclaim}}
end
@impl true
def handle_continue(:reclaim, _) do
max_connections = Pleroma.Config.get([:connections_pool, :max_connections])
reclaim_max =
[:connections_pool, :reclaim_multiplier]
|> Pleroma.Config.get()
|> Kernel.*(max_connections)
|> round
|> max(1)
:telemetry.execute([:pleroma, :connection_pool, :reclaim, :start], %{}, %{
max_connections: max_connections,
reclaim_max: reclaim_max
})
# :ets.fun2ms(
# fn {_, {worker_pid, {_, used_by, crf, last_reference}}} when used_by == [] ->
# {worker_pid, crf, last_reference} end)
unused_conns =
Registry.select(
@registry,
[
{{:_, :"$1", {:_, :"$2", :"$3", :"$4"}}, [{:==, :"$2", []}], [{{:"$1", :"$3", :"$4"}}]}
]
)
case unused_conns do
[] ->
:telemetry.execute(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: 0},
%{
max_connections: max_connections
}
)
{:stop, :no_unused_conns, nil}
unused_conns ->
reclaimed =
unused_conns
|> Enum.sort(fn {_pid1, crf1, last_reference1}, {_pid2, crf2, last_reference2} ->
crf1 <= crf2 and last_reference1 <= last_reference2
end)
|> Enum.take(reclaim_max)
reclaimed
|> Enum.each(fn {pid, _, _} ->
DynamicSupervisor.terminate_child(Pleroma.Gun.ConnectionPool.WorkerSupervisor, pid)
end)
:telemetry.execute(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: Enum.count(reclaimed)},
%{max_connections: max_connections}
)
{:stop, :normal, nil}
end
end
end

View file

@ -0,0 +1,146 @@
defmodule Pleroma.Gun.ConnectionPool.Worker do
alias Pleroma.Gun
use GenServer, restart: :temporary
@registry Pleroma.Gun.ConnectionPool
def start_link([key | _] = opts) do
GenServer.start_link(__MODULE__, opts, name: {:via, Registry, {@registry, key}})
end
@impl true
def init([_key, _uri, _opts, _client_pid] = opts) do
{:ok, nil, {:continue, {:connect, opts}}}
end
@impl true
def handle_continue({:connect, [key, uri, opts, client_pid]}, _) do
with {:ok, conn_pid} <- Gun.Conn.open(uri, opts),
Process.link(conn_pid) do
time = :erlang.monotonic_time(:millisecond)
{_, _} =
Registry.update_value(@registry, key, fn _ ->
{conn_pid, [client_pid], 1, time}
end)
send(client_pid, {:conn_pid, conn_pid})
{:noreply,
%{key: key, timer: nil, client_monitors: %{client_pid => Process.monitor(client_pid)}},
:hibernate}
else
err ->
{:stop, {:shutdown, err}, nil}
end
end
@impl true
def handle_cast({:add_client, client_pid}, state) do
case handle_call(:add_client, {client_pid, nil}, state) do
{:reply, conn_pid, state, :hibernate} ->
send(client_pid, {:conn_pid, conn_pid})
{:noreply, state, :hibernate}
end
end
@impl true
def handle_cast({:remove_client, client_pid}, state) do
case handle_call(:remove_client, {client_pid, nil}, state) do
{:reply, _, state, :hibernate} ->
{:noreply, state, :hibernate}
end
end
@impl true
def handle_call(:add_client, {client_pid, _}, %{key: key} = state) do
time = :erlang.monotonic_time(:millisecond)
{{conn_pid, _, _, _}, _} =
Registry.update_value(@registry, key, fn {conn_pid, used_by, crf, last_reference} ->
{conn_pid, [client_pid | used_by], crf(time - last_reference, crf), time}
end)
state =
if state.timer != nil do
Process.cancel_timer(state[:timer])
%{state | timer: nil}
else
state
end
ref = Process.monitor(client_pid)
state = put_in(state.client_monitors[client_pid], ref)
{:reply, conn_pid, state, :hibernate}
end
@impl true
def handle_call(:remove_client, {client_pid, _}, %{key: key} = state) do
{{_conn_pid, used_by, _crf, _last_reference}, _} =
Registry.update_value(@registry, key, fn {conn_pid, used_by, crf, last_reference} ->
{conn_pid, List.delete(used_by, client_pid), crf, last_reference}
end)
{ref, state} = pop_in(state.client_monitors[client_pid])
# DOWN message can receive right after `remove_client` call and cause worker to terminate
state =
if is_nil(ref) do
state
else
Process.demonitor(ref)
timer =
if used_by == [] do
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
Process.send_after(self(), :idle_close, max_idle)
else
nil
end
%{state | timer: timer}
end
{:reply, :ok, state, :hibernate}
end
@impl true
def handle_info(:idle_close, state) do
# Gun monitors the owner process, and will close the connection automatically
# when it's terminated
{:stop, :normal, state}
end
@impl true
def handle_info({:gun_up, _pid, _protocol}, state) do
{:noreply, state, :hibernate}
end
# Gracefully shutdown if the connection got closed without any streams left
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
{:stop, :normal, state}
end
# Otherwise, wait for retry
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
{:noreply, state, :hibernate}
end
@impl true
def handle_info({:DOWN, _ref, :process, pid, reason}, state) do
:telemetry.execute(
[:pleroma, :connection_pool, :client_death],
%{client_pid: pid, reason: reason},
%{key: state.key}
)
handle_cast({:remove_client, pid}, state)
end
# LRFU policy: https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.55.1478
defp crf(time_delta, prev_crf) do
1 + :math.pow(0.5, 0.0001 * time_delta) * prev_crf
end
end

View file

@ -0,0 +1,45 @@
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
use DynamicSupervisor
def start_link(opts) do
DynamicSupervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(_opts) do
DynamicSupervisor.init(
strategy: :one_for_one,
max_children: Pleroma.Config.get([:connections_pool, :max_connections])
)
end
def start_worker(opts, retry \\ false) do
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
if retry or free_pool() == :error do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
else
start_worker(opts, true)
end
res ->
res
end
end
defp free_pool do
wait_for_reclaimer_finish(Pleroma.Gun.ConnectionPool.Reclaimer.start_monitor())
end
defp wait_for_reclaimer_finish({pid, mon}) do
receive do
{:DOWN, ^mon, :process, ^pid, :no_unused_conns} ->
:error
{:DOWN, ^mon, :process, ^pid, :normal} ->
:ok
end
end
end

View file

@ -100,20 +100,27 @@ defmodule Pleroma.HTML do
end)
end
def extract_first_external_url(_, nil), do: {:error, "No content"}
def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
when is_binary(content) do
unless object.data["fake"] do
key = "URL|#{object.id}"
def extract_first_external_url(object, content) do
key = "URL|#{object.id}"
Cachex.fetch!(:scrubber_cache, key, fn _key ->
{:commit, {:ok, extract_first_external_url(content)}}
end)
else
{:ok, extract_first_external_url(content)}
end
end
Cachex.fetch!(:scrubber_cache, key, fn _key ->
result =
content
|> Floki.parse_fragment!()
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|> Floki.attribute("a", "href")
|> Enum.at(0)
def extract_first_external_url_from_object(_), do: {:error, :no_content}
{:commit, {:ok, result}}
end)
def extract_first_external_url(content) do
content
|> Floki.parse_fragment!()
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|> Enum.take(1)
|> Floki.attribute("href")
|> Enum.at(0)
end
end

View file

@ -3,32 +3,28 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper do
alias Pleroma.HTTP.Connection
@moduledoc """
Configure Tesla.Client with default and customized adapter options.
"""
@defaults [pool: :federation, connect_timeout: 5_000, recv_timeout: 5_000]
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | :inet.ip_address()
alias Pleroma.HTTP.AdapterHelper
require Logger
@type proxy ::
{Connection.host(), pos_integer()}
| {Connection.proxy_type(), Connection.host(), pos_integer()}
@callback options(keyword(), URI.t()) :: keyword()
@callback after_request(keyword()) :: :ok
@spec options(keyword(), URI.t()) :: keyword()
def options(opts, _uri) do
proxy = Pleroma.Config.get([:http, :proxy_url], nil)
maybe_add_proxy(opts, format_proxy(proxy))
end
@spec maybe_get_conn(URI.t(), keyword()) :: keyword()
def maybe_get_conn(_uri, opts), do: opts
@spec after_request(keyword()) :: :ok
def after_request(_opts), do: :ok
@spec format_proxy(String.t() | tuple() | nil) :: proxy() | nil
def format_proxy(nil), do: nil
def format_proxy(proxy_url) do
case Connection.parse_proxy(proxy_url) do
case parse_proxy(proxy_url) do
{:ok, host, port} -> {host, port}
{:ok, type, host, port} -> {type, host, port}
_ -> nil
@ -38,4 +34,88 @@ defmodule Pleroma.HTTP.AdapterHelper do
@spec maybe_add_proxy(keyword(), proxy() | nil) :: keyword()
def maybe_add_proxy(opts, nil), do: opts
def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
@doc """
Merge default connection & adapter options with received ones.
"""
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do
case adapter() do
Tesla.Adapter.Gun -> AdapterHelper.Gun
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
_ -> AdapterHelper.Default
end
end
@spec parse_proxy(String.t() | tuple() | nil) ::
{:ok, host(), pos_integer()}
| {:ok, proxy_type(), host(), pos_integer()}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil
def parse_proxy(proxy) when is_binary(proxy) do
with [host, port] <- String.split(proxy, ":"),
{port, ""} <- Integer.parse(port) do
{:ok, parse_host(host), port}
else
{_, _} ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
def parse_proxy(proxy) when is_tuple(proxy) do
with {type, host, port} <- proxy do
{:ok, type, parse_host(host), port}
else
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@spec parse_host(String.t() | atom() | charlist()) :: charlist() | :inet.ip_address()
def parse_host(host) when is_list(host), do: host
def parse_host(host) when is_atom(host), do: to_charlist(host)
def parse_host(host) when is_binary(host) do
host = to_charlist(host)
case :inet.parse_address(host) do
{:error, :einval} -> host
{:ok, ip} -> ip
end
end
@spec format_host(String.t()) :: charlist()
def format_host(host) do
host_charlist = to_charlist(host)
case :inet.parse_address(host_charlist) do
{:error, :einval} ->
:idna.encode(host_charlist)
{:ok, _ip} ->
host_charlist
end
end
end

View file

@ -0,0 +1,14 @@
defmodule Pleroma.HTTP.AdapterHelper.Default do
alias Pleroma.HTTP.AdapterHelper
@behaviour Pleroma.HTTP.AdapterHelper
@spec options(keyword(), URI.t()) :: keyword()
def options(opts, _uri) do
proxy = Pleroma.Config.get([:http, :proxy_url], nil)
AdapterHelper.maybe_add_proxy(opts, AdapterHelper.format_proxy(proxy))
end
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()}
def get_conn(_uri, opts), do: {:ok, opts}
end

View file

@ -5,73 +5,78 @@
defmodule Pleroma.HTTP.AdapterHelper.Gun do
@behaviour Pleroma.HTTP.AdapterHelper
alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
alias Pleroma.Pool.Connections
require Logger
@defaults [
connect_timeout: 5_000,
domain_lookup_timeout: 5_000,
tls_handshake_timeout: 5_000,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
retry_timeout: 1_000
]
@type pool() :: :federation | :upload | :media | :default
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do
proxy =
Pleroma.Config.get([:http, :proxy_url])
[:http, :proxy_url]
|> Config.get()
|> AdapterHelper.format_proxy()
config_opts = Pleroma.Config.get([:http, :adapter], [])
config_opts = Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> add_scheme_opts(uri)
|> AdapterHelper.maybe_add_proxy(proxy)
|> maybe_get_conn(uri, incoming_opts)
end
@spec after_request(keyword()) :: :ok
def after_request(opts) do
if opts[:conn] && opts[:body_as] != :chunks do
Connections.checkout(opts[:conn], self(), :gun_connections)
end
:ok
|> Keyword.merge(incoming_opts)
|> put_timeout()
end
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %{scheme: "https"}) do
opts
|> Keyword.put(:certificates_verification, true)
|> Keyword.put(:tls_opts, log_level: :warning)
Keyword.put(opts, :certificates_verification, true)
end
defp maybe_get_conn(adapter_opts, uri, incoming_opts) do
{receive_conn?, opts} =
adapter_opts
|> Keyword.merge(incoming_opts)
|> Keyword.pop(:receive_conn, true)
if Connections.alive?(:gun_connections) and receive_conn? do
checkin_conn(uri, opts)
else
opts
end
defp put_timeout(opts) do
{recv_timeout, opts} = Keyword.pop(opts, :recv_timeout, pool_timeout(opts[:pool]))
# this is the timeout to receive a message from Gun
# `:timeout` key is used in Tesla
Keyword.put(opts, :timeout, recv_timeout)
end
defp checkin_conn(uri, opts) do
case Connections.checkin(uri, :gun_connections) do
nil ->
Task.start(Pleroma.Gun.Conn, :open, [uri, :gun_connections, opts])
opts
@spec pool_timeout(pool()) :: non_neg_integer()
def pool_timeout(pool) do
default = Config.get([:pools, :default, :recv_timeout], 5_000)
conn when is_pid(conn) ->
Keyword.merge(opts, conn: conn, close_conn: false)
end
Config.get([:pools, pool, :recv_timeout], default)
end
@prefix Pleroma.Gun.ConnectionPool
def limiter_setup do
wait = Config.get([:connections_pool, :connection_acquisition_wait])
retries = Config.get([:connections_pool, :connection_acquisition_retries])
:pools
|> Config.get([])
|> Enum.each(fn {name, opts} ->
max_running = Keyword.get(opts, :size, 50)
max_waiting = Keyword.get(opts, :max_waiting, 10)
result =
ConcurrentLimiter.new(:"#{@prefix}.#{name}", max_running, max_waiting,
wait: wait,
max_retries: retries
)
case result do
:ok -> :ok
{:error, :existing} -> :ok
end
end)
:ok
end
end

View file

@ -2,11 +2,8 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
@behaviour Pleroma.HTTP.AdapterHelper
@defaults [
connect_timeout: 10_000,
recv_timeout: 20_000,
follow_redirect: true,
force_redirect: true,
pool: :federation
force_redirect: true
]
@spec options(keyword(), URI.t()) :: keyword()
@ -19,10 +16,21 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|> Keyword.merge(config_opts)
|> Keyword.merge(connection_opts)
|> add_scheme_opts(uri)
|> maybe_add_with_body()
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
end
defp add_scheme_opts(opts, %URI{scheme: "https"}) do
Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
end
defp add_scheme_opts(opts, _), do: opts
def after_request(_), do: :ok
defp maybe_add_with_body(opts) do
if opts[:max_body] do
Keyword.put(opts, :with_body, true)
else
opts
end
end
end

View file

@ -1,124 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.Connection do
@moduledoc """
Configure Tesla.Client with default and customized adapter options.
"""
alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
require Logger
@defaults [pool: :federation]
@type ip_address :: ipv4_address() | ipv6_address()
@type ipv4_address :: {0..255, 0..255, 0..255, 0..255}
@type ipv6_address ::
{0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535}
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | ip_address()
@doc """
Merge default connection & adapter options with received ones.
"""
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
|> pool_timeout()
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
defp pool_timeout(opts) do
{config_key, default} =
if adapter() == Tesla.Adapter.Gun do
{:pools, Config.get([:pools, :default, :timeout])}
else
{:hackney_pools, 10_000}
end
timeout = Config.get([config_key, opts[:pool], :timeout], default)
Keyword.merge(opts, timeout: timeout)
end
@spec after_request(keyword()) :: :ok
def after_request(opts), do: adapter_helper().after_request(opts)
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do
case adapter() do
Tesla.Adapter.Gun -> AdapterHelper.Gun
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
_ -> AdapterHelper
end
end
@spec parse_proxy(String.t() | tuple() | nil) ::
{:ok, host(), pos_integer()}
| {:ok, proxy_type(), host(), pos_integer()}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil
def parse_proxy(proxy) when is_binary(proxy) do
with [host, port] <- String.split(proxy, ":"),
{port, ""} <- Integer.parse(port) do
{:ok, parse_host(host), port}
else
{_, _} ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
def parse_proxy(proxy) when is_tuple(proxy) do
with {type, host, port} <- proxy do
{:ok, type, parse_host(host), port}
else
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@spec parse_host(String.t() | atom() | charlist()) :: charlist() | ip_address()
def parse_host(host) when is_list(host), do: host
def parse_host(host) when is_atom(host), do: to_charlist(host)
def parse_host(host) when is_binary(host) do
host = to_charlist(host)
case :inet.parse_address(host) do
{:error, :einval} -> host
{:ok, ip} -> ip
end
end
@spec format_host(String.t()) :: charlist()
def format_host(host) do
host_charlist = to_charlist(host)
case :inet.parse_address(host_charlist) do
{:error, :einval} ->
:idna.encode(host_charlist)
{:ok, _ip} ->
host_charlist
end
end
end

View file

@ -0,0 +1,24 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.ExAws do
@moduledoc false
@behaviour ExAws.Request.HttpClient
alias Pleroma.HTTP
@impl true
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
http_opts = Keyword.put_new(http_opts, :pool, :upload)
case HTTP.request(method, url, body, headers, http_opts) do
{:ok, env} ->
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
{:error, reason} ->
{:error, %{reason: reason}}
end
end
end

View file

@ -7,7 +7,7 @@ defmodule Pleroma.HTTP do
Wrapper for `Tesla.request/2`.
"""
alias Pleroma.HTTP.Connection
alias Pleroma.HTTP.AdapterHelper
alias Pleroma.HTTP.Request
alias Pleroma.HTTP.RequestBuilder, as: Builder
alias Tesla.Client
@ -16,6 +16,7 @@ defmodule Pleroma.HTTP do
require Logger
@type t :: __MODULE__
@type method() :: :get | :post | :put | :delete | :head
@doc """
Performs GET request.
@ -28,6 +29,9 @@ defmodule Pleroma.HTTP do
def get(nil, _, _), do: nil
def get(url, headers, options), do: request(:get, url, "", headers, options)
@spec head(Request.url(), Request.headers(), keyword()) :: {:ok, Env.t()} | {:error, any()}
def head(url, headers \\ [], options \\ []), do: request(:head, url, "", headers, options)
@doc """
Performs POST request.
@ -42,7 +46,7 @@ defmodule Pleroma.HTTP do
Builds and performs http request.
# Arguments:
`method` - :get, :post, :put, :delete
`method` - :get, :post, :put, :delete, :head
`url` - full url
`body` - request body
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
@ -52,52 +56,26 @@ defmodule Pleroma.HTTP do
`{:ok, %Tesla.Env{}}` or `{:error, error}`
"""
@spec request(atom(), Request.url(), String.t(), Request.headers(), keyword()) ::
@spec request(method(), Request.url(), String.t(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def request(method, url, body, headers, options) when is_binary(url) do
uri = URI.parse(url)
adapter_opts = Connection.options(uri, options[:adapter] || [])
adapter_opts = AdapterHelper.options(uri, options || [])
options = put_in(options[:adapter], adapter_opts)
params = options[:params] || []
request = build_request(method, headers, options, url, body, params)
adapter = Application.get_env(:tesla, :adapter)
client = Tesla.client([Tesla.Middleware.FollowRedirects], adapter)
pid = Process.whereis(adapter_opts[:pool])
client = Tesla.client(adapter_middlewares(adapter), adapter)
pool_alive? =
if adapter == Tesla.Adapter.Gun && pid do
Process.alive?(pid)
else
false
end
request_opts =
maybe_limit(
fn ->
request(client, request)
end,
adapter,
adapter_opts
|> Enum.into(%{})
|> Map.put(:env, Pleroma.Config.get([:env]))
|> Map.put(:pool_alive?, pool_alive?)
response = request(client, request, request_opts)
Connection.after_request(adapter_opts)
response
end
@spec request(Client.t(), keyword(), map()) :: {:ok, Env.t()} | {:error, any()}
def request(%Client{} = client, request, %{env: :test}), do: request(client, request)
def request(%Client{} = client, request, %{body_as: :chunks}), do: request(client, request)
def request(%Client{} = client, request, %{pool_alive?: false}), do: request(client, request)
def request(%Client{} = client, request, %{pool: pool, timeout: timeout}) do
:poolboy.transaction(
pool,
&Pleroma.Pool.Request.execute(&1, client, request, timeout),
timeout
)
end
@ -114,4 +92,19 @@ defmodule Pleroma.HTTP do
|> Builder.add_param(:query, :query, params)
|> Builder.convert_to_keyword()
end
@prefix Pleroma.Gun.ConnectionPool
defp maybe_limit(fun, Tesla.Adapter.Gun, opts) do
ConcurrentLimiter.limit(:"#{@prefix}.#{opts[:pool] || :default}", fun)
end
defp maybe_limit(fun, _, _) do
fun.()
end
defp adapter_middlewares(Tesla.Adapter.Gun) do
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
end
defp adapter_middlewares(_), do: []
end

View file

@ -34,10 +34,12 @@ defmodule Pleroma.HTTP.RequestBuilder do
@spec headers(Request.t(), Request.headers()) :: Request.t()
def headers(request, headers) do
headers_list =
if Pleroma.Config.get([:http, :send_user_agent]) do
with true <- Pleroma.Config.get([:http, :send_user_agent]),
nil <- Enum.find(headers, fn {key, _val} -> String.downcase(key) == "user-agent" end) do
[{"user-agent", Pleroma.Application.user_agent()} | headers]
else
headers
_ ->
headers
end
%{request | headers: headers_list}

View file

@ -0,0 +1,29 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.Tzdata do
@moduledoc false
@behaviour Tzdata.HTTPClient
alias Pleroma.HTTP
@impl true
def get(url, headers, options) do
options = Keyword.put_new(options, :pool, :default)
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
{:ok, {env.status, env.headers, env.body}}
end
end
@impl true
def head(url, headers, options) do
options = Keyword.put_new(options, :pool, :default)
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
{:ok, {env.status, env.headers}}
end
end
end

View file

@ -14,9 +14,13 @@ defmodule Pleroma.Instances.Instance do
import Ecto.Query
import Ecto.Changeset
require Logger
schema "instances" do
field(:host, :string)
field(:unreachable_since, :naive_datetime_usec)
field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime)
timestamps()
end
@ -25,7 +29,7 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:host, :unreachable_since])
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
|> validate_required([:host])
|> unique_constraint(:host)
end
@ -120,4 +124,59 @@ defmodule Pleroma.Instances.Instance do
end
defp parse_datetime(datetime), do: datetime
def get_or_update_favicon(%URI{host: host} = instance_uri) do
existing_record = Repo.get_by(Instance, %{host: host})
now = NaiveDateTime.utc_now()
if existing_record && existing_record.favicon_updated_at &&
NaiveDateTime.diff(now, existing_record.favicon_updated_at) < 86_400 do
existing_record.favicon
else
favicon = scrape_favicon(instance_uri)
if existing_record do
existing_record
|> changeset(%{favicon: favicon, favicon_updated_at: now})
|> Repo.update()
else
%Instance{}
|> changeset(%{host: host, favicon: favicon, favicon_updated_at: now})
|> Repo.insert()
end
favicon
end
rescue
e ->
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
nil
end
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
adapter: [pool: :media]
),
favicon_rel <-
html
|> Floki.parse_document!()
|> Floki.attribute("link[rel=icon]", "href")
|> List.first(),
favicon <- URI.merge(instance_uri, favicon_rel) |> to_string(),
true <- is_binary(favicon) do
favicon
else
_ -> nil
end
rescue
e ->
Logger.warn(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
)
nil
end
end
end

View file

@ -15,8 +15,8 @@ defmodule Pleroma.JobQueueMonitor do
@impl true
def init(state) do
:telemetry.attach("oban-monitor-failure", [:oban, :failure], &handle_event/4, nil)
:telemetry.attach("oban-monitor-success", [:oban, :success], &handle_event/4, nil)
:telemetry.attach("oban-monitor-failure", [:oban, :job, :exception], &handle_event/4, nil)
:telemetry.attach("oban-monitor-success", [:oban, :job, :stop], &handle_event/4, nil)
{:ok, state}
end
@ -25,8 +25,11 @@ defmodule Pleroma.JobQueueMonitor do
GenServer.call(__MODULE__, :stats)
end
def handle_event([:oban, status], %{duration: duration}, meta, _) do
GenServer.cast(__MODULE__, {:process_event, status, duration, meta})
def handle_event([:oban, :job, event], %{duration: duration}, meta, _) do
GenServer.cast(
__MODULE__,
{:process_event, mapping_status(event), duration, meta}
)
end
@impl true
@ -75,4 +78,7 @@ defmodule Pleroma.JobQueueMonitor do
|> Map.update!(:processed_jobs, &(&1 + 1))
|> Map.update!(status, &(&1 + 1))
end
defp mapping_status(:stop), do: :success
defp mapping_status(:exception), do: :failure
end

View file

@ -10,10 +10,11 @@ defmodule Pleroma.MFA.Token do
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.OAuth.Authorization
alias Pleroma.Web.OAuth.Token, as: OAuthToken
@expires 300
@type t() :: %__MODULE__{}
schema "mfa_tokens" do
field(:token, :string)
field(:valid_until, :naive_datetime_usec)
@ -24,6 +25,7 @@ defmodule Pleroma.MFA.Token do
timestamps()
end
@spec get_by_token(String.t()) :: {:ok, t()} | {:error, :not_found}
def get_by_token(token) do
from(
t in __MODULE__,
@ -33,33 +35,40 @@ defmodule Pleroma.MFA.Token do
|> Repo.find_resource()
end
def validate(token) do
with {:fetch_token, {:ok, token}} <- {:fetch_token, get_by_token(token)},
{:expired, false} <- {:expired, is_expired?(token)} do
@spec validate(String.t()) :: {:ok, t()} | {:error, :not_found} | {:error, :expired_token}
def validate(token_str) do
with {:ok, token} <- get_by_token(token_str),
false <- expired?(token) do
{:ok, token}
else
{:expired, _} -> {:error, :expired_token}
{:fetch_token, _} -> {:error, :not_found}
error -> {:error, error}
end
end
def create_token(%User{} = user) do
%__MODULE__{}
|> change
|> assign_user(user)
|> put_token
|> put_valid_until
|> Repo.insert()
defp expired?(%__MODULE__{valid_until: valid_until}) do
with true <- NaiveDateTime.diff(NaiveDateTime.utc_now(), valid_until) > 0 do
{:error, :expired_token}
end
end
def create_token(user, authorization) do
@spec create(User.t(), Authorization.t() | nil) :: {:ok, t()} | {:error, Ecto.Changeset.t()}
def create(user, authorization \\ nil) do
with {:ok, token} <- do_create(user, authorization) do
Pleroma.Workers.PurgeExpiredToken.enqueue(%{
token_id: token.id,
valid_until: DateTime.from_naive!(token.valid_until, "Etc/UTC"),
mod: __MODULE__
})
{:ok, token}
end
end
defp do_create(user, authorization) do
%__MODULE__{}
|> change
|> change()
|> assign_user(user)
|> assign_authorization(authorization)
|> put_token
|> put_valid_until
|> maybe_assign_authorization(authorization)
|> put_token()
|> put_valid_until()
|> Repo.insert()
end
@ -69,15 +78,19 @@ defmodule Pleroma.MFA.Token do
|> validate_required([:user])
end
defp assign_authorization(changeset, authorization) do
defp maybe_assign_authorization(changeset, %Authorization{} = authorization) do
changeset
|> put_assoc(:authorization, authorization)
|> validate_required([:authorization])
end
defp maybe_assign_authorization(changeset, _), do: changeset
defp put_token(changeset) do
token = Pleroma.Web.OAuth.Token.Utils.generate_token()
changeset
|> change(%{token: OAuthToken.Utils.generate_token()})
|> change(%{token: token})
|> validate_required([:token])
|> unique_constraint(:token)
end
@ -89,18 +102,4 @@ defmodule Pleroma.MFA.Token do
|> change(%{valid_until: expires_in})
|> validate_required([:valid_until])
end
def is_expired?(%__MODULE__{valid_until: valid_until}) do
NaiveDateTime.diff(NaiveDateTime.utc_now(), valid_until) > 0
end
def is_expired?(_), do: false
def delete_expired_tokens do
from(
q in __MODULE__,
where: fragment("?", q.valid_until) < ^Timex.now()
)
|> Repo.delete_all()
end
end

View file

@ -3,7 +3,6 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.MigrationHelper.NotificationBackfill do
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
@ -18,25 +17,34 @@ defmodule Pleroma.MigrationHelper.NotificationBackfill do
)
query
|> Repo.all()
|> Repo.chunk_stream(100)
|> Enum.each(fn notification ->
type =
notification.activity
|> type_from_activity()
notification
|> Notification.changeset(%{type: type})
|> Ecto.Changeset.change(%{type: type})
|> Repo.update()
end)
end
defp get_by_ap_id(ap_id) do
q =
from(u in User,
select: u.id
)
Repo.get_by(q, ap_id: ap_id)
end
# This is copied over from Notifications to keep this stable.
defp type_from_activity(%{data: %{"type" => type}} = activity) do
case type do
"Follow" ->
accepted_function = fn activity ->
with %User{} = follower <- User.get_by_ap_id(activity.data["actor"]),
%User{} = followed <- User.get_by_ap_id(activity.data["object"]) do
with %User{} = follower <- get_by_ap_id(activity.data["actor"]),
%User{} = followed <- get_by_ap_id(activity.data["object"]) do
Pleroma.FollowingRelationship.following?(follower, followed)
end
end

View file

@ -409,6 +409,17 @@ defmodule Pleroma.ModerationLog do
"@#{actor_nickname} deactivated users: #{users_to_nicknames_string(users)}"
end
@spec get_log_entry_message(ModerationLog) :: String.t()
def get_log_entry_message(%ModerationLog{
data: %{
"actor" => %{"nickname" => actor_nickname},
"action" => "approve",
"subject" => users
}
}) do
"@#{actor_nickname} approved users: #{users_to_nicknames_string(users)}"
end
@spec get_log_entry_message(ModerationLog) :: String.t()
def get_log_entry_message(%ModerationLog{
data: %{

View file

@ -15,6 +15,7 @@ defmodule Pleroma.Notification do
alias Pleroma.Repo
alias Pleroma.ThreadMute
alias Pleroma.User
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
@ -130,6 +131,7 @@ defmodule Pleroma.Notification do
|> preload([n, a, o], activity: {a, object: o})
|> exclude_notification_muted(user, exclude_notification_muted_opts)
|> exclude_blocked(user, exclude_blocked_opts)
|> exclude_filtered(user)
|> exclude_visibility(opts)
end
@ -158,6 +160,20 @@ defmodule Pleroma.Notification do
|> where([n, a, o, tm], is_nil(tm.user_id))
end
defp exclude_filtered(query, user) do
case Pleroma.Filter.compose_regex(user) do
nil ->
query
regex ->
from([_n, a, o] in query,
where:
fragment("not(?->>'content' ~* ?)", o.data, ^regex) or
fragment("?->>'actor' = ?", o.data, ^user.ap_id)
)
end
end
@valid_visibilities ~w[direct unlisted public private]
defp exclude_visibility(query, %{exclude_visibilities: visibility})
@ -337,6 +353,7 @@ defmodule Pleroma.Notification do
end
end
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
def create_notifications(activity, options \\ [])
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
@ -367,6 +384,7 @@ defmodule Pleroma.Notification do
do_send = do_send && user in enabled_receivers
create_notification(activity, user, do_send)
end)
|> Enum.reject(&is_nil/1)
{:ok, notifications}
end
@ -424,6 +442,7 @@ defmodule Pleroma.Notification do
|> Multi.insert(:notification, %Notification{
user_id: user.id,
activity: activity,
seen: mark_as_read?(activity, user),
type: type_from_activity(activity)
})
|> Marker.multi_set_last_read_id(user, "notifications")
@ -480,6 +499,10 @@ defmodule Pleroma.Notification do
end
end
def get_potential_receiver_ap_ids(%{data: %{"type" => "Follow", "object" => object_id}}) do
[object_id]
end
def get_potential_receiver_ap_ids(activity) do
[]
|> Utils.maybe_notify_to_recipients(activity)
@ -550,11 +573,9 @@ defmodule Pleroma.Notification do
[
:self,
:invisible,
:followers,
:follows,
:non_followers,
:non_follows,
:recently_followed
:block_from_strangers,
:recently_followed,
:filtered
]
|> Enum.find(&skip?(&1, activity, user))
end
@ -573,45 +594,15 @@ defmodule Pleroma.Notification do
end
def skip?(
:followers,
:block_from_strangers,
%Activity{} = activity,
%User{notification_settings: %{followers: false}} = user
) do
actor = activity.data["actor"]
follower = User.get_cached_by_ap_id(actor)
User.following?(follower, user)
end
def skip?(
:non_followers,
%Activity{} = activity,
%User{notification_settings: %{non_followers: false}} = user
%User{notification_settings: %{block_from_strangers: true}} = user
) do
actor = activity.data["actor"]
follower = User.get_cached_by_ap_id(actor)
!User.following?(follower, user)
end
def skip?(
:follows,
%Activity{} = activity,
%User{notification_settings: %{follows: false}} = user
) do
actor = activity.data["actor"]
followed = User.get_cached_by_ap_id(actor)
User.following?(user, followed)
end
def skip?(
:non_follows,
%Activity{} = activity,
%User{notification_settings: %{non_follows: false}} = user
) do
actor = activity.data["actor"]
followed = User.get_cached_by_ap_id(actor)
!User.following?(user, followed)
end
# To do: consider defining recency in hours and checking FollowingRelationship with a single SQL
def skip?(:recently_followed, %Activity{data: %{"type" => "Follow"}} = activity, %User{} = user) do
actor = activity.data["actor"]
@ -623,8 +614,33 @@ defmodule Pleroma.Notification do
end)
end
def skip?(:filtered, %{data: %{"type" => type}}, _) when type in ["Follow", "Move"], do: false
def skip?(:filtered, activity, user) do
object = Object.normalize(activity)
cond do
is_nil(object) ->
false
object.data["actor"] == user.ap_id ->
false
not is_nil(regex = Pleroma.Filter.compose_regex(user, :re)) ->
Regex.match?(regex, object.data["content"])
true ->
false
end
end
def skip?(_, _, _), do: false
def mark_as_read?(activity, target_user) do
user = Activity.user_actor(activity)
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(target_user, activity)
end
def for_user_and_activity(user, activity) do
from(n in __MODULE__,
where: n.user_id == ^user.id,
@ -632,4 +648,16 @@ defmodule Pleroma.Notification do
)
|> Repo.one()
end
@spec mark_context_as_read(User.t(), String.t()) :: {integer(), nil | [term()]}
def mark_context_as_read(%User{id: id}, context) do
from(
n in Notification,
join: a in assoc(n, :activity),
where: n.user_id == ^id,
where: n.seen == false,
where: fragment("?->>'context'", a.data) == ^context
)
|> Repo.update_all(set: [seen: true])
end
end

View file

@ -255,6 +255,10 @@ defmodule Pleroma.Object do
end
end
defp poll_is_multiple?(%Object{data: %{"anyOf" => [_ | _]}}), do: true
defp poll_is_multiple?(_), do: false
def decrease_replies_count(ap_id) do
Object
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
@ -281,10 +285,10 @@ defmodule Pleroma.Object do
def increase_vote_count(ap_id, name, actor) do
with %Object{} = object <- Object.normalize(ap_id),
"Question" <- object.data["type"] do
multiple = Map.has_key?(object.data, "anyOf")
key = if poll_is_multiple?(object), do: "anyOf", else: "oneOf"
options =
(object.data["anyOf"] || object.data["oneOf"] || [])
object.data[key]
|> Enum.map(fn
%{"name" => ^name} = option ->
Kernel.update_in(option["replies"]["totalItems"], &(&1 + 1))
@ -296,11 +300,8 @@ defmodule Pleroma.Object do
voters = [actor | object.data["voters"] || []] |> Enum.uniq()
data =
if multiple do
Map.put(object.data, "anyOf", options)
else
Map.put(object.data, "oneOf", options)
end
object.data
|> Map.put(key, options)
|> Map.put("voters", voters)
object

View file

@ -44,18 +44,11 @@ defmodule Pleroma.Object.Containment do
nil
end
# TODO: We explicitly allow 'tag' URIs through, due to references to legacy OStatus
# objects being present in the test suite environment. Once these objects are
# removed, please also remove this.
if Mix.env() == :test do
defp compare_uris(_, %URI{scheme: "tag"}), do: :ok
end
defp compare_uris(%URI{host: host} = _id_uri, %URI{host: host} = _other_uri), do: :ok
defp compare_uris(_id_uri, _other_uri), do: :error
@doc """
Checks that an imported AP object's actor matches the domain it came from.
Checks that an imported AP object's actor matches the host it came from.
"""
def contain_origin(_id, %{"actor" => nil}), do: :error

View file

@ -9,6 +9,7 @@ defmodule Pleroma.Object.Fetcher do
alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
@ -23,21 +24,38 @@ defmodule Pleroma.Object.Fetcher do
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(data, %{data: %{} = old_data}) do
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
Map.merge(data, internal_fields)
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(data, _), do: data
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(struct, data) do
Logger.debug("Reinjecting object #{data["id"]}")
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- Transmogrifier.fix_object(data),
data <- maybe_reinject_internal_fields(data, struct),
changeset <- Object.change(struct, %{data: data}),
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
end
end
defp reinject_object(%Object{} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
@ -51,8 +69,8 @@ defmodule Pleroma.Object.Fetcher do
def refetch_object(%Object{data: %{"id" => id}} = object) do
with {:local, false} <- {:local, Object.local?(object)},
{:ok, data} <- fetch_and_contain_remote_object_from_id(id),
{:ok, object} <- reinject_object(object, data) do
{:ok, new_data} <- fetch_and_contain_remote_object_from_id(id),
{:ok, object} <- reinject_object(object, new_data) do
{:ok, object}
else
{:local, true} -> {:ok, object}
@ -83,8 +101,8 @@ defmodule Pleroma.Object.Fetcher do
{:transmogrifier, {:error, {:reject, nil}}} ->
{:reject, nil}
{:transmogrifier, _} ->
{:error, "Transmogrifier failure."}
{:transmogrifier, _} = e ->
{:error, e}
{:object, data, nil} ->
reinject_object(%Object{}, data)
@ -106,8 +124,8 @@ defmodule Pleroma.Object.Fetcher do
defp prepare_activity_params(data) do
%{
"type" => "Create",
"to" => data["to"],
"cc" => data["cc"],
"to" => data["to"] || [],
"cc" => data["cc"] || [],
# Should we seriously keep this attributedTo thing?
"actor" => data["actor"] || data["attributedTo"],
"object" => data
@ -124,6 +142,10 @@ defmodule Pleroma.Object.Fetcher do
{:error, "Object has been deleted"} ->
nil
{:reject, reason} ->
Logger.info("Rejected #{id} while fetching: #{inspect(reason)}")
nil
e ->
Logger.error("Error while fetching #{id}: #{inspect(e)}")
nil
@ -141,12 +163,12 @@ defmodule Pleroma.Object.Fetcher do
date: date
})
[{"signature", signature}]
{"signature", signature}
end
defp sign_fetch(headers, id, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ make_signature(id, date)
[make_signature(id, date) | headers]
else
headers
end
@ -154,7 +176,7 @@ defmodule Pleroma.Object.Fetcher do
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ [{"date", date}]
[{"date", date} | headers]
else
headers
end

View file

@ -64,6 +64,12 @@ defmodule Pleroma.Pagination do
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
def paginate(list, options, _method, _table_binding) when is_list(list) do
offset = options[:offset] || 0
limit = options[:limit] || 0
Enum.slice(list, offset, limit)
end
def paginate(query, options, :keyset, table_binding) do
query
|> restrict(:min_id, options, table_binding)

View file

@ -4,6 +4,9 @@
defmodule Pleroma.Plugs.AdminSecretAuthenticationPlug do
import Plug.Conn
alias Pleroma.Plugs.OAuthScopesPlug
alias Pleroma.Plugs.RateLimiter
alias Pleroma.User
def init(options) do
@ -11,7 +14,10 @@ defmodule Pleroma.Plugs.AdminSecretAuthenticationPlug do
end
def secret_token do
Pleroma.Config.get(:admin_token)
case Pleroma.Config.get(:admin_token) do
blank when blank in [nil, ""] -> nil
token -> token
end
end
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
@ -26,9 +32,9 @@ defmodule Pleroma.Plugs.AdminSecretAuthenticationPlug do
def authenticate(%{params: %{"admin_token" => admin_token}} = conn) do
if admin_token == secret_token() do
assign(conn, :user, %User{is_admin: true})
assign_admin_user(conn)
else
conn
handle_bad_token(conn)
end
end
@ -36,8 +42,19 @@ defmodule Pleroma.Plugs.AdminSecretAuthenticationPlug do
token = secret_token()
case get_req_header(conn, "x-admin-token") do
[^token] -> assign(conn, :user, %User{is_admin: true})
_ -> conn
blank when blank in [[], [""]] -> conn
[^token] -> assign_admin_user(conn)
_ -> handle_bad_token(conn)
end
end
defp assign_admin_user(conn) do
conn
|> assign(:user, %User{is_admin: true})
|> OAuthScopesPlug.skip_plug()
end
defp handle_bad_token(conn) do
RateLimiter.call(conn, name: :authentication)
end
end

View file

@ -0,0 +1,55 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.FrontendStatic do
require Pleroma.Constants
@moduledoc """
This is a shim to call `Plug.Static` but with runtime `from` configuration`. It dispatches to the different frontends.
"""
@behaviour Plug
def file_path(path, frontend_type \\ :primary) do
if configuration = Pleroma.Config.get([:frontends, frontend_type]) do
instance_static_path = Pleroma.Config.get([:instance, :static_dir], "instance/static")
Path.join([
instance_static_path,
"frontends",
configuration["name"],
configuration["ref"],
path
])
else
nil
end
end
def init(opts) do
opts
|> Keyword.put(:from, "__unconfigured_frontend_static_plug")
|> Plug.Static.init()
|> Map.put(:frontend_type, opts[:frontend_type])
end
def call(conn, opts) do
frontend_type = Map.get(opts, :frontend_type, :primary)
path = file_path("", frontend_type)
if path do
conn
|> call_static(opts, path)
else
conn
end
end
defp call_static(conn, opts, from) do
opts =
opts
|> Map.put(:from, from)
Plug.Static.call(conn, opts)
end
end

View file

@ -69,10 +69,11 @@ defmodule Pleroma.Plugs.HTTPSecurityPlug do
img_src = "img-src 'self' data: blob:"
media_src = "media-src 'self'"
# Strict multimedia CSP enforcement only when MediaProxy is enabled
{img_src, media_src} =
if Config.get([:media_proxy, :enabled]) &&
!Config.get([:media_proxy, :proxy_opts, :redirect_on_failure]) do
sources = get_proxy_and_attachment_sources()
sources = build_csp_multimedia_source_list()
{[img_src, sources], [media_src, sources]}
else
{[img_src, " https:"], [media_src, " https:"]}
@ -81,14 +82,14 @@ defmodule Pleroma.Plugs.HTTPSecurityPlug do
connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
connect_src =
if Pleroma.Config.get(:env) == :dev do
if Config.get(:env) == :dev do
[connect_src, " http://localhost:3035/"]
else
connect_src
end
script_src =
if Pleroma.Config.get(:env) == :dev do
if Config.get(:env) == :dev do
"script-src 'self' 'unsafe-eval'"
else
"script-src 'self'"
@ -107,38 +108,64 @@ defmodule Pleroma.Plugs.HTTPSecurityPlug do
|> :erlang.iolist_to_binary()
end
defp get_proxy_and_attachment_sources do
defp build_csp_from_whitelist([], acc), do: acc
defp build_csp_from_whitelist([last], acc) do
[build_csp_param_from_whitelist(last) | acc]
end
defp build_csp_from_whitelist([head | tail], acc) do
build_csp_from_whitelist(tail, [[?\s, build_csp_param_from_whitelist(head)] | acc])
end
# TODO: use `build_csp_param/1` after removing support bare domains for media proxy whitelist
defp build_csp_param_from_whitelist("http" <> _ = url) do
build_csp_param(url)
end
defp build_csp_param_from_whitelist(url), do: url
defp build_csp_multimedia_source_list do
media_proxy_whitelist =
Enum.reduce(Config.get([:media_proxy, :whitelist]), [], fn host, acc ->
add_source(acc, host)
end)
[:media_proxy, :whitelist]
|> Config.get()
|> build_csp_from_whitelist([])
media_proxy_base_url =
if Config.get([:media_proxy, :base_url]),
do: URI.parse(Config.get([:media_proxy, :base_url])).host
captcha_method = Config.get([Pleroma.Captcha, :method])
captcha_endpoint = Config.get([captcha_method, :endpoint])
upload_base_url =
if Config.get([Pleroma.Upload, :base_url]),
do: URI.parse(Config.get([Pleroma.Upload, :base_url])).host
base_endpoints =
[
[:media_proxy, :base_url],
[Pleroma.Upload, :base_url],
[Pleroma.Uploaders.S3, :public_endpoint]
]
|> Enum.map(&Config.get/1)
s3_endpoint =
if Config.get([Pleroma.Upload, :uploader]) == Pleroma.Uploaders.S3,
do: URI.parse(Config.get([Pleroma.Uploaders.S3, :public_endpoint])).host
[]
|> add_source(media_proxy_base_url)
|> add_source(upload_base_url)
|> add_source(s3_endpoint)
[captcha_endpoint | base_endpoints]
|> Enum.map(&build_csp_param/1)
|> Enum.reduce([], &add_source(&2, &1))
|> add_source(media_proxy_whitelist)
end
defp add_source(iodata, nil), do: iodata
defp add_source(iodata, []), do: iodata
defp add_source(iodata, source), do: [[?\s, source] | iodata]
defp add_csp_param(csp_iodata, nil), do: csp_iodata
defp add_csp_param(csp_iodata, param), do: [[param, ?;] | csp_iodata]
defp build_csp_param(nil), do: nil
defp build_csp_param(url) when is_binary(url) do
%{host: host, scheme: scheme} = URI.parse(url)
if scheme do
[scheme, "://", host]
end
end
def warn_if_disabled do
unless Config.get([:http_security, :enabled]) do
Logger.warn("

View file

@ -16,28 +16,24 @@ defmodule Pleroma.Plugs.InstanceStatic do
instance_path =
Path.join(Pleroma.Config.get([:instance, :static_dir], "instance/static/"), path)
if File.exists?(instance_path) do
instance_path
else
frontend_path = Pleroma.Plugs.FrontendStatic.file_path(path, :primary)
(File.exists?(instance_path) && instance_path) ||
(frontend_path && File.exists?(frontend_path) && frontend_path) ||
Path.join(Application.app_dir(:pleroma, "priv/static/"), path)
end
end
def init(opts) do
opts
|> Keyword.put(:from, "__unconfigured_instance_static_plug")
|> Keyword.put(:at, "/__unconfigured_instance_static_plug")
|> Plug.Static.init()
end
for only <- Pleroma.Constants.static_only_files() do
at = Plug.Router.Utils.split("/")
def call(%{request_path: "/" <> unquote(only) <> _} = conn, opts) do
call_static(
conn,
opts,
unquote(at),
Pleroma.Config.get([:instance, :static_dir], "instance/static")
)
end
@ -47,11 +43,10 @@ defmodule Pleroma.Plugs.InstanceStatic do
conn
end
defp call_static(conn, opts, at, from) do
defp call_static(conn, opts, from) do
opts =
opts
|> Map.put(:from, from)
|> Map.put(:at, at)
Plug.Static.call(conn, opts)
end

View file

@ -7,6 +7,8 @@ defmodule Pleroma.Plugs.RemoteIp do
This is a shim to call [`RemoteIp`](https://git.pleroma.social/pleroma/remote_ip) but with runtime configuration.
"""
import Plug.Conn
@behaviour Plug
@headers ~w[
@ -26,11 +28,12 @@ defmodule Pleroma.Plugs.RemoteIp do
def init(_), do: nil
def call(conn, _) do
def call(%{remote_ip: original_remote_ip} = conn, _) do
config = Pleroma.Config.get(__MODULE__, [])
if Keyword.get(config, :enabled, false) do
RemoteIp.call(conn, remote_ip_opts(config))
%{remote_ip: new_remote_ip} = conn = RemoteIp.call(conn, remote_ip_opts(config))
assign(conn, :remote_ip_found, original_remote_ip != new_remote_ip)
else
conn
end

View file

@ -9,7 +9,7 @@ defmodule Pleroma.Plugs.StaticFEPlug do
def init(options), do: options
def call(conn, _) do
if enabled?() and accepts_html?(conn) do
if enabled?() and requires_html?(conn) do
conn
|> StaticFEController.call(:show)
|> halt()
@ -20,10 +20,7 @@ defmodule Pleroma.Plugs.StaticFEPlug do
defp enabled?, do: Pleroma.Config.get([:static_fe, :enabled], false)
defp accepts_html?(conn) do
case get_req_header(conn, "accept") do
[accept | _] -> String.contains?(accept, "text/html")
_ -> false
end
defp requires_html?(conn) do
Phoenix.Controller.get_format(conn) == "html"
end
end

View file

@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
import Pleroma.Web.Gettext
require Logger
alias Pleroma.Web.MediaProxy
@behaviour Plug
# no slashes
@path "media"
@ -35,8 +37,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
%{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"")
conn
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
conn ->
conn
@ -47,7 +48,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false),
{:ok, get_method} <- uploader.get_file(file) do
{:ok, get_method} <- uploader.get_file(file),
false <- media_is_banned(conn, get_method) do
get_media(conn, get_method, proxy_remote, opts)
else
_ ->
@ -59,6 +61,14 @@ defmodule Pleroma.Plugs.UploadedMedia do
def call(conn, _opts), do: conn
defp media_is_banned(%{request_path: path} = _conn, {:static_dir, _}) do
MediaProxy.in_banned_urls(Pleroma.Web.base_url() <> path)
end
defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url)
defp media_is_banned(_, _), do: false
defp get_media(conn, {:static_dir, directory}, _, opts) do
static_opts =
Map.get(opts, :static_plug_opts)

View file

@ -7,37 +7,18 @@ defmodule Pleroma.Plugs.UserIsAdminPlug do
import Plug.Conn
alias Pleroma.User
alias Pleroma.Web.OAuth
def init(options) do
options
end
def call(%{assigns: %{user: %User{is_admin: true}} = assigns} = conn, _) do
token = assigns[:token]
cond do
not Pleroma.Config.enforce_oauth_admin_scope_usage?() ->
conn
token && OAuth.Scopes.contains_admin_scopes?(token.scopes) ->
# Note: checking for _any_ admin scope presence, not necessarily fitting requested action.
# Thus, controller must explicitly invoke OAuthScopesPlug to verify scope requirements.
# Admin might opt out of admin scope for some apps to block any admin actions from them.
conn
true ->
fail(conn)
end
def call(%{assigns: %{user: %User{is_admin: true}}} = conn, _) do
conn
end
def call(conn, _) do
fail(conn)
end
defp fail(conn) do
conn
|> render_error(:forbidden, "User is not an admin or OAuth admin scope is not granted.")
|> render_error(:forbidden, "User is not an admin.")
|> halt()
end
end

View file

@ -1,283 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Connections do
use GenServer
alias Pleroma.Config
alias Pleroma.Gun
require Logger
@type domain :: String.t()
@type conn :: Pleroma.Gun.Conn.t()
@type t :: %__MODULE__{
conns: %{domain() => conn()},
opts: keyword()
}
defstruct conns: %{}, opts: []
@spec start_link({atom(), keyword()}) :: {:ok, pid()}
def start_link({name, opts}) do
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl true
def init(opts), do: {:ok, %__MODULE__{conns: %{}, opts: opts}}
@spec checkin(String.t() | URI.t(), atom()) :: pid() | nil
def checkin(url, name)
def checkin(url, name) when is_binary(url), do: checkin(URI.parse(url), name)
def checkin(%URI{} = uri, name) do
timeout = Config.get([:connections_pool, :checkin_timeout], 250)
GenServer.call(name, {:checkin, uri}, timeout)
end
@spec alive?(atom()) :: boolean()
def alive?(name) do
if pid = Process.whereis(name) do
Process.alive?(pid)
else
false
end
end
@spec get_state(atom()) :: t()
def get_state(name) do
GenServer.call(name, :state)
end
@spec count(atom()) :: pos_integer()
def count(name) do
GenServer.call(name, :count)
end
@spec get_unused_conns(atom()) :: [{domain(), conn()}]
def get_unused_conns(name) do
GenServer.call(name, :unused_conns)
end
@spec checkout(pid(), pid(), atom()) :: :ok
def checkout(conn, pid, name) do
GenServer.cast(name, {:checkout, conn, pid})
end
@spec add_conn(atom(), String.t(), Pleroma.Gun.Conn.t()) :: :ok
def add_conn(name, key, conn) do
GenServer.cast(name, {:add_conn, key, conn})
end
@spec remove_conn(atom(), String.t()) :: :ok
def remove_conn(name, key) do
GenServer.cast(name, {:remove_conn, key})
end
@impl true
def handle_cast({:add_conn, key, conn}, state) do
state = put_in(state.conns[key], conn)
Process.monitor(conn.conn)
{:noreply, state}
end
@impl true
def handle_cast({:checkout, conn_pid, pid}, state) do
state =
with true <- Process.alive?(conn_pid),
{key, conn} <- find_conn(state.conns, conn_pid),
used_by <- List.keydelete(conn.used_by, pid, 0) do
conn_state = if used_by == [], do: :idle, else: conn.conn_state
put_in(state.conns[key], %{conn | conn_state: conn_state, used_by: used_by})
else
false ->
Logger.debug("checkout for closed conn #{inspect(conn_pid)}")
state
nil ->
Logger.debug("checkout for alive conn #{inspect(conn_pid)}, but is not in state")
state
end
{:noreply, state}
end
@impl true
def handle_cast({:remove_conn, key}, state) do
state = put_in(state.conns, Map.delete(state.conns, key))
{:noreply, state}
end
@impl true
def handle_call({:checkin, uri}, from, state) do
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
case state.conns[key] do
%{conn: pid, gun_state: :up} = conn ->
time = :os.system_time(:second)
last_reference = time - conn.last_reference
crf = crf(last_reference, 100, conn.crf)
state =
put_in(state.conns[key], %{
conn
| last_reference: time,
crf: crf,
conn_state: :active,
used_by: [from | conn.used_by]
})
{:reply, pid, state}
%{gun_state: :down} ->
{:reply, nil, state}
nil ->
{:reply, nil, state}
end
end
@impl true
def handle_call(:state, _from, state), do: {:reply, state, state}
@impl true
def handle_call(:count, _from, state) do
{:reply, Enum.count(state.conns), state}
end
@impl true
def handle_call(:unused_conns, _from, state) do
unused_conns =
state.conns
|> Enum.filter(&filter_conns/1)
|> Enum.sort(&sort_conns/2)
{:reply, unused_conns, state}
end
defp filter_conns({_, %{conn_state: :idle, used_by: []}}), do: true
defp filter_conns(_), do: false
defp sort_conns({_, c1}, {_, c2}) do
c1.crf <= c2.crf and c1.last_reference <= c2.last_reference
end
@impl true
def handle_info({:gun_up, conn_pid, _protocol}, state) do
%{origin_host: host, origin_scheme: scheme, origin_port: port} = Gun.info(conn_pid)
host =
case :inet.ntoa(host) do
{:error, :einval} -> host
ip -> ip
end
key = "#{scheme}:#{host}:#{port}"
state =
with {key, conn} <- find_conn(state.conns, conn_pid, key),
{true, key} <- {Process.alive?(conn_pid), key} do
put_in(state.conns[key], %{
conn
| gun_state: :up,
conn_state: :active,
retries: 0
})
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
:ok = Gun.close(conn_pid)
state
end
{:noreply, state}
end
@impl true
def handle_info({:gun_down, conn_pid, _protocol, _reason, _killed}, state) do
retries = Config.get([:connections_pool, :retry], 1)
# we can't get info on this pid, because pid is dead
state =
with {key, conn} <- find_conn(state.conns, conn_pid),
{true, key} <- {Process.alive?(conn_pid), key} do
if conn.retries == retries do
:ok = Gun.close(conn.conn)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
put_in(state.conns[key], %{
conn
| gun_state: :down,
retries: conn.retries + 1
})
end
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
Logger.debug(":gun_down for conn which isn't found in state")
state
end
{:noreply, state}
end
@impl true
def handle_info({:DOWN, _ref, :process, conn_pid, reason}, state) do
Logger.debug("received DOWN message for #{inspect(conn_pid)} reason -> #{inspect(reason)}")
state =
with {key, conn} <- find_conn(state.conns, conn_pid) do
Enum.each(conn.used_by, fn {pid, _ref} ->
Process.exit(pid, reason)
end)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
nil ->
Logger.debug(":DOWN for conn which isn't found in state")
state
end
{:noreply, state}
end
defp find_conn(conns, conn_pid) do
Enum.find(conns, fn {_key, conn} ->
conn.conn == conn_pid
end)
end
defp find_conn(conns, conn_pid, conn_key) do
Enum.find(conns, fn {key, conn} ->
key == conn_key and conn.conn == conn_pid
end)
end
def crf(current, steps, crf) do
1 + :math.pow(0.5, current / steps) * crf
end
end

View file

@ -1,22 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool do
def child_spec(opts) do
poolboy_opts =
opts
|> Keyword.put(:worker_module, Pleroma.Pool.Request)
|> Keyword.put(:name, {:local, opts[:name]})
|> Keyword.put(:size, opts[:size])
|> Keyword.put(:max_overflow, opts[:max_overflow])
%{
id: opts[:id] || {__MODULE__, make_ref()},
start: {:poolboy, :start_link, [poolboy_opts, [name: opts[:name]]]},
restart: :permanent,
shutdown: 5000,
type: :worker
}
end
end

View file

@ -1,65 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Request do
use GenServer
require Logger
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@impl true
def init(_), do: {:ok, []}
@spec execute(pid() | atom(), Tesla.Client.t(), keyword(), pos_integer()) ::
{:ok, Tesla.Env.t()} | {:error, any()}
def execute(pid, client, request, timeout) do
GenServer.call(pid, {:execute, client, request}, timeout)
end
@impl true
def handle_call({:execute, client, request}, _from, state) do
response = Pleroma.HTTP.request(client, request)
{:reply, response, state}
end
@impl true
def handle_info({:gun_data, _conn, _stream, _, _}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_up, _conn, _protocol}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_down, _conn, _protocol, _reason, _killed}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_error, _conn, _stream, _error}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_push, _conn, _stream, _new_stream, _method, _uri, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_response, _conn, _stream, _, _status, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info(msg, state) do
Logger.warn("Received unexpected message #{inspect(__MODULE__)} #{inspect(msg)}")
{:noreply, state}
end
end

View file

@ -1,42 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Supervisor do
use Supervisor
alias Pleroma.Config
alias Pleroma.Pool
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
def init(_) do
conns_child = %{
id: Pool.Connections,
start:
{Pool.Connections, :start_link, [{:gun_connections, Config.get([:connections_pool])}]}
}
Supervisor.init([conns_child | pools()], strategy: :one_for_one)
end
defp pools do
pools = Config.get(:pools)
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) == false do
Keyword.delete(pools, :upload)
else
pools
end
for {pool_name, pool_opts} <- pools do
pool_opts
|> Keyword.put(:id, {Pool, pool_name})
|> Keyword.put(:name, pool_name)
|> Pool.child_spec()
end
end
end

View file

@ -8,11 +8,10 @@ defmodule Pleroma.Repo do
adapter: Ecto.Adapters.Postgres,
migration_timestamps: [type: :naive_datetime_usec]
import Ecto.Query
require Logger
defmodule Instrumenter do
use Prometheus.EctoInstrumenter
end
defmodule Instrumenter, do: use(Prometheus.EctoInstrumenter)
@doc """
Dynamically loads the repository url from the
@ -50,36 +49,30 @@ defmodule Pleroma.Repo do
end
end
def check_migrations_applied!() do
unless Pleroma.Config.get(
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
false
) do
Ecto.Migrator.with_repo(__MODULE__, fn repo ->
down_migrations =
Ecto.Migrator.migrations(repo)
|> Enum.reject(fn
{:up, _, _} -> true
{:down, _, _} -> false
end)
def chunk_stream(query, chunk_size) do
# We don't actually need start and end funcitons of resource streaming,
# but it seems to be the only way to not fetch records one-by-one and
# have individual records be the elements of the stream, instead of
# lists of records
Stream.resource(
fn -> 0 end,
fn
last_id ->
query
|> order_by(asc: :id)
|> where([r], r.id > ^last_id)
|> limit(^chunk_size)
|> all()
|> case do
[] ->
{:halt, last_id}
if length(down_migrations) > 0 do
down_migrations_text =
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
Logger.error(
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
)
raise Pleroma.Repo.UnappliedMigrationsError
end
end)
else
:ok
end
records ->
last_id = List.last(records).id
{records, last_id}
end
end,
fn _ -> :ok end
)
end
end
defmodule Pleroma.Repo.UnappliedMigrationsError do
defexception message: "Unapplied Migrations detected"
end

View file

@ -7,6 +7,7 @@ defmodule Pleroma.ReverseProxy.Client.Hackney do
@impl true
def request(method, url, headers, body, opts \\ []) do
opts = Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
:hackney.request(method, url, headers, body, opts)
end

View file

@ -5,6 +5,8 @@
defmodule Pleroma.ReverseProxy.Client.Tesla do
@behaviour Pleroma.ReverseProxy.Client
alias Pleroma.Gun.ConnectionPool
@type headers() :: [{String.t(), String.t()}]
@type status() :: pos_integer()
@ -26,11 +28,13 @@ defmodule Pleroma.ReverseProxy.Client.Tesla do
url,
body,
headers,
Keyword.put(opts, :adapter, opts)
opts
) do
if is_map(response.body) and method != :head do
{:ok, response.status, response.headers, response.body}
else
conn_pid = response.opts[:adapter][:conn]
ConnectionPool.release_conn(conn_pid)
{:ok, response.status, response.headers}
end
else
@ -41,15 +45,8 @@ defmodule Pleroma.ReverseProxy.Client.Tesla do
@impl true
@spec stream_body(map()) ::
{:ok, binary(), map()} | {:error, atom() | String.t()} | :done | no_return()
def stream_body(%{pid: pid, opts: opts, fin: true}) do
# if connection was reused, but in tesla were redirects,
# tesla returns new opened connection, which must be closed manually
if opts[:old_conn], do: Tesla.Adapter.Gun.close(pid)
# if there were redirects we need to checkout old conn
conn = opts[:old_conn] || opts[:conn]
if conn, do: :ok = Pleroma.Pool.Connections.checkout(conn, self(), :gun_connections)
def stream_body(%{pid: pid, fin: true}) do
ConnectionPool.release_conn(pid)
:done
end
@ -74,8 +71,7 @@ defmodule Pleroma.ReverseProxy.Client.Tesla do
@impl true
@spec close(map) :: :ok | no_return()
def close(%{pid: pid}) do
adapter = check_adapter()
adapter.close(pid)
ConnectionPool.release_conn(pid)
end
defp check_adapter do

View file

@ -3,12 +3,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy do
@range_headers ~w(range if-range)
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
~w(if-unmodified-since if-none-match if-range range)
~w(if-unmodified-since if-none-match) ++ @range_headers
@resp_cache_headers ~w(etag date last-modified)
@keep_resp_headers @resp_cache_headers ++
~w(content-type content-disposition content-encoding content-range) ++
~w(accept-ranges vary)
~w(content-length content-type content-disposition content-encoding) ++
~w(content-range accept-ranges vary)
@default_cache_control_header "public, max-age=1209600"
@valid_resp_codes [200, 206, 304]
@max_read_duration :timer.seconds(30)
@ -164,12 +165,17 @@ defmodule Pleroma.ReverseProxy do
{:ok, code, _, _} ->
{:error, {:invalid_http_response, code}}
{:ok, code, _} ->
{:error, {:invalid_http_response, code}}
{:error, error} ->
{:error, error}
end
end
defp response(conn, client, url, status, headers, opts) do
Logger.debug("#{__MODULE__} #{status} #{url} #{inspect(headers)}")
result =
conn
|> put_resp_headers(build_resp_headers(headers, opts))
@ -220,7 +226,9 @@ defmodule Pleroma.ReverseProxy do
end
end
defp head_response(conn, _url, code, headers, opts) do
defp head_response(conn, url, code, headers, opts) do
Logger.debug("#{__MODULE__} #{code} #{url} #{inspect(headers)}")
conn
|> put_resp_headers(build_resp_headers(headers, opts))
|> send_resp(code, "")
@ -262,20 +270,33 @@ defmodule Pleroma.ReverseProxy do
headers
|> downcase_headers()
|> Enum.filter(fn {k, _} -> k in @keep_req_headers end)
|> (fn headers ->
headers = headers ++ Keyword.get(opts, :req_headers, [])
|> build_req_range_or_encoding_header(opts)
|> build_req_user_agent_header(opts)
|> Keyword.merge(Keyword.get(opts, :req_headers, []))
end
if Keyword.get(opts, :keep_user_agent, false) do
List.keystore(
headers,
"user-agent",
0,
{"user-agent", Pleroma.Application.user_agent()}
)
else
headers
end
end).()
# Disable content-encoding if any @range_headers are requested (see #1823).
defp build_req_range_or_encoding_header(headers, _opts) do
range? = Enum.any?(headers, fn {header, _} -> Enum.member?(@range_headers, header) end)
if range? && List.keymember?(headers, "accept-encoding", 0) do
List.keydelete(headers, "accept-encoding", 0)
else
headers
end
end
defp build_req_user_agent_header(headers, opts) do
if Keyword.get(opts, :keep_user_agent, false) do
List.keystore(
headers,
"user-agent",
0,
{"user-agent", Pleroma.Application.user_agent()}
)
else
headers
end
end
defp build_resp_headers(headers, opts) do
@ -283,7 +304,7 @@ defmodule Pleroma.ReverseProxy do
|> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
|> build_resp_cache_headers(opts)
|> build_resp_content_disposition_header(opts)
|> (fn headers -> headers ++ Keyword.get(opts, :resp_headers, []) end).()
|> Keyword.merge(Keyword.get(opts, :resp_headers, []))
end
defp build_resp_cache_headers(headers, _opts) do

View file

@ -5,10 +5,10 @@
defmodule Pleroma.Signature do
@behaviour HTTPSignatures.Adapter
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Keys
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
def key_id_to_actor_id(key_id) do
uri =
@ -24,7 +24,7 @@ defmodule Pleroma.Signature do
maybe_ap_id = URI.to_string(uri)
case Types.ObjectID.cast(maybe_ap_id) do
case ObjectValidators.ObjectID.cast(maybe_ap_id) do
{:ok, ap_id} ->
{:ok, ap_id}

View file

@ -3,12 +3,15 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Stats do
use GenServer
import Ecto.Query
alias Pleroma.CounterCache
alias Pleroma.Repo
alias Pleroma.User
use GenServer
@interval :timer.seconds(60)
def start_link(_) do
GenServer.start_link(
@ -18,6 +21,12 @@ defmodule Pleroma.Stats do
)
end
@impl true
def init(_args) do
if Pleroma.Config.get(:env) == :test, do: :ok = Ecto.Adapters.SQL.Sandbox.checkout(Repo)
{:ok, nil, {:continue, :calculate_stats}}
end
@doc "Performs update stats"
def force_update do
GenServer.call(__MODULE__, :force_update)
@ -29,7 +38,11 @@ defmodule Pleroma.Stats do
end
@doc "Returns stats data"
@spec get_stats() :: %{domain_count: integer(), status_count: integer(), user_count: integer()}
@spec get_stats() :: %{
domain_count: non_neg_integer(),
status_count: non_neg_integer(),
user_count: non_neg_integer()
}
def get_stats do
%{stats: stats} = GenServer.call(__MODULE__, :get_state)
@ -44,25 +57,14 @@ defmodule Pleroma.Stats do
peers
end
def init(_args) do
{:ok, calculate_stat_data()}
end
def handle_call(:force_update, _from, _state) do
new_stats = calculate_stat_data()
{:reply, new_stats, new_stats}
end
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_cast(:run_update, _state) do
new_stats = calculate_stat_data()
{:noreply, new_stats}
end
@spec calculate_stat_data() :: %{
peers: list(),
stats: %{
domain_count: non_neg_integer(),
status_count: non_neg_integer(),
user_count: non_neg_integer()
}
}
def calculate_stat_data do
peers =
from(
@ -97,20 +99,44 @@ defmodule Pleroma.Stats do
}
end
def get_status_visibility_count do
counter_cache =
CounterCache.get_as_map([
"status_visibility_public",
"status_visibility_private",
"status_visibility_unlisted",
"status_visibility_direct"
])
@spec get_status_visibility_count(String.t() | nil) :: map()
def get_status_visibility_count(instance \\ nil) do
if is_nil(instance) do
CounterCache.get_sum()
else
CounterCache.get_by_instance(instance)
end
end
%{
public: counter_cache["status_visibility_public"] || 0,
unlisted: counter_cache["status_visibility_unlisted"] || 0,
private: counter_cache["status_visibility_private"] || 0,
direct: counter_cache["status_visibility_direct"] || 0
}
@impl true
def handle_continue(:calculate_stats, _) do
stats = calculate_stat_data()
Process.send_after(self(), :run_update, @interval)
{:noreply, stats}
end
@impl true
def handle_call(:force_update, _from, _state) do
new_stats = calculate_stat_data()
{:reply, new_stats, new_stats}
end
@impl true
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
@impl true
def handle_cast(:run_update, _state) do
new_stats = calculate_stat_data()
{:noreply, new_stats}
end
@impl true
def handle_info(:run_update, _) do
new_stats = calculate_stat_data()
Process.send_after(self(), :run_update, @interval)
{:noreply, new_stats}
end
end

View file

@ -0,0 +1,76 @@
defmodule Pleroma.Telemetry.Logger do
@moduledoc "Transforms Pleroma telemetry events to logs"
require Logger
@events [
[:pleroma, :connection_pool, :reclaim, :start],
[:pleroma, :connection_pool, :reclaim, :stop],
[:pleroma, :connection_pool, :provision_failure],
[:pleroma, :connection_pool, :client_death]
]
def attach do
:telemetry.attach_many("pleroma-logger", @events, &handle_event/4, [])
end
# Passing anonymous functions instead of strings to logger is intentional,
# that way strings won't be concatenated if the message is going to be thrown
# out anyway due to higher log level configured
def handle_event(
[:pleroma, :connection_pool, :reclaim, :start],
_,
%{max_connections: max_connections, reclaim_max: reclaim_max},
_
) do
Logger.debug(fn ->
"Connection pool is exhausted (reached #{max_connections} connections). Starting idle connection cleanup to reclaim as much as #{
reclaim_max
} connections"
end)
end
def handle_event(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: 0},
_,
_
) do
Logger.error(fn ->
"Connection pool failed to reclaim any connections due to all of them being in use. It will have to drop requests for opening connections to new hosts"
end)
end
def handle_event(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: reclaimed_count},
_,
_
) do
Logger.debug(fn -> "Connection pool cleaned up #{reclaimed_count} idle connections" end)
end
def handle_event(
[:pleroma, :connection_pool, :provision_failure],
%{opts: [key | _]},
_,
_
) do
Logger.error(fn ->
"Connection pool had to refuse opening a connection to #{key} due to connection limit exhaustion"
end)
end
def handle_event(
[:pleroma, :connection_pool, :client_death],
%{client_pid: client_pid, reason: reason},
%{key: key},
_
) do
Logger.warn(fn ->
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{
inspect(reason)
}"
end)
end
end

View file

@ -0,0 +1,50 @@
# Pleroma: A lightweight social networking server
# Copyright © 2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Tesla.Middleware.ConnectionPool do
@moduledoc """
Middleware to get/release connections from `Pleroma.Gun.ConnectionPool`
"""
@behaviour Tesla.Middleware
alias Pleroma.Gun.ConnectionPool
@impl Tesla.Middleware
def call(%Tesla.Env{url: url, opts: opts} = env, next, _) do
uri = URI.parse(url)
# Avoid leaking connections when the middleware is called twice
# with body_as: :chunks. We assume only the middleware can set
# opts[:adapter][:conn]
if opts[:adapter][:conn] do
ConnectionPool.release_conn(opts[:adapter][:conn])
end
case ConnectionPool.get_conn(uri, opts[:adapter]) do
{:ok, conn_pid} ->
adapter_opts = Keyword.merge(opts[:adapter], conn: conn_pid, close_conn: false)
opts = Keyword.put(opts, :adapter, adapter_opts)
env = %{env | opts: opts}
case Tesla.run(env, next) do
{:ok, env} ->
unless opts[:adapter][:body_as] == :chunks do
ConnectionPool.release_conn(conn_pid)
{_, res} = pop_in(env.opts[:adapter][:conn])
{:ok, res}
else
{:ok, env}
end
err ->
ConnectionPool.release_conn(conn_pid)
err
end
err ->
err
end
end
end

View file

@ -56,6 +56,15 @@ defmodule Pleroma.Upload do
}
defstruct [:id, :name, :tempfile, :content_type, :path]
defp get_description(opts, upload) do
case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do
{description, _} when is_binary(description) -> description
{_, :filename} -> upload.name
{_, str} when is_binary(str) -> str
_ -> ""
end
end
@spec store(source, options :: [option()]) :: {:ok, Map.t()} | {:error, any()}
@doc "Store a file. If using a `Plug.Upload{}` as the source, be sure to use `Majic.Plug` to ensure its content_type and filename is correct."
def store(upload, opts \\ []) do
@ -64,6 +73,10 @@ defmodule Pleroma.Upload do
with {:ok, upload} <- prepare_upload(upload, opts),
upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
{:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
description = get_description(opts, upload),
{_, true} <-
{:description_limit,
String.length(description) <= Pleroma.Config.get([:instance, :description_limit])},
{:ok, url_spec} <- Pleroma.Uploaders.Uploader.put_file(opts.uploader, upload) do
{:ok,
%{
@ -76,9 +89,12 @@ defmodule Pleroma.Upload do
"href" => url_from_spec(upload, opts.base_url, url_spec)
}
],
"name" => Map.get(opts, :description) || upload.name
"name" => description
}}
else
{:description_limit, _} ->
{:error, :description_too_long}
{:error, error} ->
Logger.error(
"#{__MODULE__} store (using #{inspect(opts.uploader)}) failed: #{inspect(error)}"

View file

@ -15,7 +15,11 @@ defmodule Pleroma.Upload.Filter do
require Logger
@callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()}
@callback filter(Pleroma.Upload.t()) ::
{:ok, :filtered}
| {:ok, :noop}
| {:ok, :filtered, Pleroma.Upload.t()}
| {:error, any()}
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
@ -25,10 +29,13 @@ defmodule Pleroma.Upload.Filter do
def filter([filter | rest], upload) do
case filter.filter(upload) do
:ok ->
{:ok, :filtered} ->
filter(rest, upload)
{:ok, upload} ->
{:ok, :filtered, upload} ->
filter(rest, upload)
{:ok, :noop} ->
filter(rest, upload)
error ->

View file

@ -16,9 +16,11 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
def filter(%Upload{name: name} = upload) do
extension = List.last(String.split(name, "."))
name = predefined_name(extension) || random(extension)
{:ok, %Upload{upload | name: name}}
{:ok, :filtered, %Upload{upload | name: name}}
end
def filter(_), do: {:ok, :noop}
@spec predefined_name(String.t()) :: String.t() | nil
defp predefined_name(extension) do
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),

View file

@ -17,8 +17,8 @@ defmodule Pleroma.Upload.Filter.Dedupe do
|> Base.encode16(case: :lower)
filename = shasum <> "." <> extension
{:ok, %Upload{upload | id: shasum, path: filename}}
{:ok, :filtered, %Upload{upload | id: shasum, path: filename}}
end
def filter(_), do: :ok
def filter(_), do: {:ok, :noop}
end

View file

@ -0,0 +1,30 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.Exiftool do
@moduledoc """
Strips GPS related EXIF tags and overwrites the file in place.
Also strips or replaces filesystem metadata e.g., timestamps.
"""
@behaviour Pleroma.Upload.Filter
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
# webp is not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
{_response, 0} -> {:ok, :filtered}
{error, 1} -> {:error, error}
end
rescue
_e in ErlangError ->
{:error, "exiftool command not found"}
end
end
def filter(_), do: {:ok, :noop}
end

View file

@ -6,6 +6,10 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
@behaviour Pleroma.Upload.Filter
alias Pleroma.Upload.Filter
@moduledoc """
This module is just an example of an Upload filter. It's not supposed to be used in production.
"""
@filters [
{"implode", "1"},
{"-raise", "20"},
@ -34,11 +38,16 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
[{"fill", "yellow"}, {"tint", "40"}]
]
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
:ok
try do
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
{:ok, :filtered}
rescue
_e in ErlangError ->
{:error, "mogrify command not found"}
end
end
def filter(_), do: :ok
def filter(_), do: {:ok, :noop}
end

View file

@ -8,14 +8,18 @@ defmodule Pleroma.Upload.Filter.Mogrify do
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
@type conversions :: conversion() | [conversion()]
@spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
filters = Pleroma.Config.get!([__MODULE__, :args])
do_filter(file, filters)
:ok
try do
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
{:ok, :filtered}
rescue
_e in ErlangError ->
{:error, "mogrify command not found"}
end
end
def filter(_), do: :ok
def filter(_), do: {:ok, :noop}
def do_filter(file, filters) do
file

Some files were not shown because too many files have changed in this diff Show more