Merge remote-tracking branch 'origin/develop' into shigusegubu
This commit is contained in:
commit
d380bdd2f9
76 changed files with 1961 additions and 577 deletions
|
|
@ -14,9 +14,10 @@ variables: &global_variables
|
||||||
workflow:
|
workflow:
|
||||||
rules:
|
rules:
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
- if: $CI_COMMIT_BRANCH == "develop"
|
||||||
|
- if: $CI_COMMIT_BRANCH == "stable"
|
||||||
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
|
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
|
||||||
when: never
|
when: never
|
||||||
- if: $CI_COMMIT_BRANCH
|
|
||||||
|
|
||||||
cache: &global_cache_policy
|
cache: &global_cache_policy
|
||||||
key: $CI_JOB_IMAGE-$CI_COMMIT_SHORT_SHA
|
key: $CI_JOB_IMAGE-$CI_COMMIT_SHORT_SHA
|
||||||
|
|
@ -131,10 +132,25 @@ unit-testing-1.14.5-otp-25:
|
||||||
- name: postgres:13-alpine
|
- name: postgres:13-alpine
|
||||||
alias: postgres
|
alias: postgres
|
||||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||||
|
before_script: &testing_before_script
|
||||||
|
- echo $MIX_ENV
|
||||||
|
- rm -rf _build/*/lib/pleroma
|
||||||
|
# Create a non-root user for running tests
|
||||||
|
- useradd -m -s /bin/bash testuser
|
||||||
|
# Install dependencies as root first
|
||||||
|
- mix deps.get
|
||||||
|
# Set proper ownership for everything
|
||||||
|
- chown -R testuser:testuser .
|
||||||
|
- chown -R testuser:testuser /root/.mix || true
|
||||||
|
- chown -R testuser:testuser /root/.hex || true
|
||||||
|
# Create user-specific directories
|
||||||
|
- su testuser -c "HOME=/home/testuser mix local.hex --force"
|
||||||
|
- su testuser -c "HOME=/home/testuser mix local.rebar --force"
|
||||||
script: &testing_script
|
script: &testing_script
|
||||||
- mix ecto.create
|
# Run tests as non-root user
|
||||||
- mix ecto.migrate
|
- su testuser -c "HOME=/home/testuser mix ecto.create"
|
||||||
- mix pleroma.test_runner --cover --preload-modules
|
- su testuser -c "HOME=/home/testuser mix ecto.migrate"
|
||||||
|
- su testuser -c "HOME=/home/testuser mix pleroma.test_runner --cover --preload-modules"
|
||||||
coverage: '/^Line total: ([^ ]*%)$/'
|
coverage: '/^Line total: ([^ ]*%)$/'
|
||||||
artifacts:
|
artifacts:
|
||||||
reports:
|
reports:
|
||||||
|
|
@ -150,6 +166,7 @@ unit-testing-1.18.3-otp-27:
|
||||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.18.3-otp-27
|
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.18.3-otp-27
|
||||||
cache: *testing_cache_policy
|
cache: *testing_cache_policy
|
||||||
services: *testing_services
|
services: *testing_services
|
||||||
|
before_script: *testing_before_script
|
||||||
script: *testing_script
|
script: *testing_script
|
||||||
|
|
||||||
formatting-1.15:
|
formatting-1.15:
|
||||||
|
|
|
||||||
1
changelog.d/db-restore-docs.change
Normal file
1
changelog.d/db-restore-docs.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Docs: Restore DB schema before data to avoid long restore times
|
||||||
1
changelog.d/deactivated-404-inbox.change
Normal file
1
changelog.d/deactivated-404-inbox.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Return 404 with a better error message instead of 400 when receiving an activity for a deactivated user
|
||||||
1
changelog.d/emoji-pack-upload-zip.add
Normal file
1
changelog.d/emoji-pack-upload-zip.add
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Added a way to upload new packs from a URL or ZIP file via Admin API
|
||||||
0
changelog.d/gitlabci.skip
Normal file
0
changelog.d/gitlabci.skip
Normal file
1
changelog.d/gun.change
Normal file
1
changelog.d/gun.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Update Cowboy, Gun, and Plug family of dependencies
|
||||||
1
changelog.d/hashtag-search.change
Normal file
1
changelog.d/hashtag-search.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Hashtag searches return real results based on words in your query
|
||||||
0
changelog.d/noop-fixes.skip
Normal file
0
changelog.d/noop-fixes.skip
Normal file
1
changelog.d/oban-notifier.change
Normal file
1
changelog.d/oban-notifier.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Oban Notifier was changed to Oban.Notifiers.PG for performance and scalability benefits
|
||||||
1
changelog.d/postgrex.change
Normal file
1
changelog.d/postgrex.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Updated Postgrex library to 0.20.0
|
||||||
1
changelog.d/reachability.change
Normal file
1
changelog.d/reachability.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Improved the logic of how we determine if a server is unreachable.
|
||||||
1
changelog.d/smtp-docs.change
Normal file
1
changelog.d/smtp-docs.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Change SMTP example to use the Mua adapter that works with OTP>25
|
||||||
1
changelog.d/tesla.change
Normal file
1
changelog.d/tesla.change
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Updated Tesla to 1.15.3
|
||||||
1
changelog.d/url-encoding.fix
Normal file
1
changelog.d/url-encoding.fix
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
Fix HTTP client making invalid requests due to no percent encoding processing or validation.
|
||||||
|
|
@ -194,7 +194,6 @@ config :pleroma, :instance,
|
||||||
account_approval_required: false,
|
account_approval_required: false,
|
||||||
federating: true,
|
federating: true,
|
||||||
federation_incoming_replies_max_depth: 100,
|
federation_incoming_replies_max_depth: 100,
|
||||||
federation_reachability_timeout_days: 7,
|
|
||||||
allow_relay: true,
|
allow_relay: true,
|
||||||
public: true,
|
public: true,
|
||||||
quarantined_instances: [{ "pleroma.rareome.ga", "leaks private posts or sumshit i dont rember" }],
|
quarantined_instances: [{ "pleroma.rareome.ga", "leaks private posts or sumshit i dont rember" }],
|
||||||
|
|
@ -603,6 +602,7 @@ config :pleroma, Pleroma.User,
|
||||||
# value or it cannot enforce uniqueness.
|
# value or it cannot enforce uniqueness.
|
||||||
config :pleroma, Oban,
|
config :pleroma, Oban,
|
||||||
repo: Pleroma.Repo,
|
repo: Pleroma.Repo,
|
||||||
|
notifier: Oban.Notifiers.PG,
|
||||||
log: false,
|
log: false,
|
||||||
queues: [
|
queues: [
|
||||||
activity_expiration: 10,
|
activity_expiration: 10,
|
||||||
|
|
|
||||||
|
|
@ -2,28 +2,60 @@
|
||||||
|
|
||||||
## Backup
|
## Backup
|
||||||
|
|
||||||
1. Stop the Pleroma service.
|
1. Stop the Pleroma service:
|
||||||
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
```
|
||||||
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>` (make sure the postgres user has write access to the destination file)
|
# sudo systemctl stop pleroma
|
||||||
|
```
|
||||||
|
2. Go to the working directory of Pleroma (default is `/opt/pleroma`).
|
||||||
|
3. Run (make sure the postgres user has write access to the destination file):
|
||||||
|
```
|
||||||
|
# sudo -Hu postgres pg_dump -d <pleroma_db> -v --format=custom --compress=9 -f </path/to/backup_location/pleroma.pgdump>
|
||||||
|
```
|
||||||
4. Copy `pleroma.pgdump`, `config/prod.secret.exs`, `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
4. Copy `pleroma.pgdump`, `config/prod.secret.exs`, `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||||
5. Restart the Pleroma service.
|
5. Restart the Pleroma service:
|
||||||
|
```
|
||||||
|
# sudo systemctl start pleroma
|
||||||
|
```
|
||||||
|
|
||||||
## Restore/Move
|
## Restore/Move
|
||||||
|
|
||||||
1. Optionally reinstall Pleroma (either on the same server or on another server if you want to move servers).
|
1. Optionally reinstall Pleroma (either on the same server or on another server if you want to move servers).
|
||||||
2. Stop the Pleroma service.
|
2. Stop the Pleroma service:
|
||||||
3. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
```
|
||||||
|
# sudo systemctl stop pleroma
|
||||||
|
```
|
||||||
|
3. Go to the working directory of Pleroma (default is `/opt/pleroma`).
|
||||||
4. Copy the above mentioned files back to their original position.
|
4. Copy the above mentioned files back to their original position.
|
||||||
5. Drop the existing database and user if restoring in-place. `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;'`
|
5. Drop the existing database and user if restoring in-place:
|
||||||
6. Restore the database schema and pleroma postgres role the with the original `setup_db.psql` if you have it: `sudo -Hu postgres psql -f config/setup_db.psql`.
|
```
|
||||||
|
# sudo -Hu postgres dropdb <pleroma_db>
|
||||||
|
# sudo -Hu postgres dropuser <pleroma_user>
|
||||||
|
```
|
||||||
|
6. Restore the database schema and pleroma database user the with the original `setup_db.psql` if you have it:
|
||||||
|
```
|
||||||
|
# sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
Alternatively, run the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backup of `config/prod.secret.exs`. Then run the restoration of the pleroma role and schema with of the generated `config/setup_db.psql` as instructed above. You may delete the `config/generated_config.exs` file as it is not needed.
|
Alternatively, run the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backup of `config/prod.secret.exs`. Then run the restoration of the pleroma user and schema with the generated `config/setup_db.psql` as instructed above. You may delete the `config/generated_config.exs` file as it is not needed.
|
||||||
|
|
||||||
7. Now restore the Pleroma instance's data into the empty database schema: `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
|
7. Now restore the Pleroma instance's schema into the empty database schema:
|
||||||
8. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
```
|
||||||
9. Restart the Pleroma service.
|
# sudo -Hu postgres pg_restore -d <pleroma_db> -v -s -1 </path/to/backup_location/pleroma.pgdump>
|
||||||
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
```
|
||||||
11. If setting up on a new server configure Nginx by using the `installation/pleroma.nginx` config sample or reference the Pleroma installation guide for your OS which contains the Nginx configuration instructions.
|
8. Now restore the Pleroma instance's data into the database:
|
||||||
|
```
|
||||||
|
# sudo -Hu postgres pg_restore -d <pleroma_db> -v -a -1 --disable-triggers </path/to/backup_location/pleroma.pgdump>
|
||||||
|
```
|
||||||
|
9. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
||||||
|
10. Generate the statistics so that PostgreSQL can properly plan queries:
|
||||||
|
```
|
||||||
|
# sudo -Hu postgres vacuumdb -v --all --analyze-in-stages
|
||||||
|
```
|
||||||
|
11. Restart the Pleroma service:
|
||||||
|
```
|
||||||
|
# sudo systemctl start pleroma
|
||||||
|
```
|
||||||
|
12. If setting up on a new server, configure Nginx by using your original configuration or by using the `installation/pleroma.nginx` config sample or reference the Pleroma installation guide for your OS which contains the Nginx configuration instructions.
|
||||||
|
|
||||||
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
||||||
|
|
||||||
|
|
@ -32,10 +64,26 @@
|
||||||
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
|
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
|
||||||
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
|
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
|
||||||
* You can also list local users and delete them individually using the CLI tasks for [Managing users](./CLI_tasks/user.md).
|
* You can also list local users and delete them individually using the CLI tasks for [Managing users](./CLI_tasks/user.md).
|
||||||
2. Stop the Pleroma service `systemctl stop pleroma`
|
2. Stop the Pleroma service:
|
||||||
3. Disable pleroma from systemd `systemctl disable pleroma`
|
```
|
||||||
|
# systemctl stop pleroma
|
||||||
|
```
|
||||||
|
3. Disable pleroma from systemd:
|
||||||
|
```
|
||||||
|
# systemctl disable pleroma
|
||||||
|
```
|
||||||
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
|
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
|
||||||
5. Reload nginx now that the configuration is removed `systemctl reload nginx`
|
5. Reload nginx now that the configuration is removed:
|
||||||
6. Remove the database and database user `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;'`
|
```
|
||||||
7. Remove the system user `userdel pleroma`
|
# systemctl reload nginx
|
||||||
8. Remove the dependencies that you don't need anymore (see installation guide). Make sure you don't remove packages that are still needed for other software that you have running!
|
```
|
||||||
|
6. Remove the database and database user:
|
||||||
|
```
|
||||||
|
# sudo -Hu postgres dropdb <pleroma_db>
|
||||||
|
# sudo -Hu postgres dropuser <pleroma_user>
|
||||||
|
```
|
||||||
|
7. Remove the system user:
|
||||||
|
```
|
||||||
|
# userdel -r pleroma
|
||||||
|
```
|
||||||
|
8. Remove the dependencies that you don't need anymore (see installation guide). **Make sure you don't remove packages that are still needed for other software that you have running!**
|
||||||
|
|
|
||||||
|
|
@ -733,13 +733,11 @@ An example for SMTP adapter:
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, Pleroma.Emails.Mailer,
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
adapter: Swoosh.Adapters.SMTP,
|
adapter: Swoosh.Adapters.Mua,
|
||||||
relay: "smtp.gmail.com",
|
relay: "smtp.gmail.com",
|
||||||
username: "YOUR_USERNAME@gmail.com",
|
auth: [username: "YOUR_USERNAME@gmail.com", password: "YOUR_SMTP_PASSWORD"],
|
||||||
password: "YOUR_SMTP_PASSWORD",
|
|
||||||
port: 465,
|
port: 465,
|
||||||
ssl: true,
|
protocol: :ssl
|
||||||
auth: :always
|
|
||||||
```
|
```
|
||||||
|
|
||||||
An example for Mua adapter:
|
An example for Mua adapter:
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,11 @@ defmodule Mix.Pleroma do
|
||||||
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||||
|
|
||||||
unless System.get_env("DEBUG") do
|
unless System.get_env("DEBUG") do
|
||||||
Logger.remove_backend(:console)
|
try do
|
||||||
|
Logger.remove_backend(:console)
|
||||||
|
catch
|
||||||
|
:exit, _ -> :ok
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
adapter = Application.get_env(:tesla, :adapter)
|
adapter = Application.get_env(:tesla, :adapter)
|
||||||
|
|
|
||||||
|
|
@ -225,6 +225,97 @@ defmodule Pleroma.Emoji.Pack do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def download_zip(name, opts \\ %{}) do
|
||||||
|
with :ok <- validate_not_empty([name]),
|
||||||
|
:ok <- validate_new_pack(name),
|
||||||
|
{:ok, archive_data} <- fetch_archive_data(opts),
|
||||||
|
pack_path <- path_join_name_safe(emoji_path(), name),
|
||||||
|
:ok <- create_pack_dir(pack_path),
|
||||||
|
:ok <- safe_unzip(archive_data, pack_path) do
|
||||||
|
ensure_pack_json(pack_path, archive_data, opts)
|
||||||
|
else
|
||||||
|
{:error, :empty_values} -> {:error, "Pack name cannot be empty"}
|
||||||
|
{:error, reason} when is_binary(reason) -> {:error, reason}
|
||||||
|
_ -> {:error, "Could not process pack"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_pack_dir(pack_path) do
|
||||||
|
case File.mkdir_p(pack_path) do
|
||||||
|
:ok -> :ok
|
||||||
|
{:error, _} -> {:error, "Could not create the pack directory"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp safe_unzip(archive_data, pack_path) do
|
||||||
|
case SafeZip.unzip_data(archive_data, pack_path) do
|
||||||
|
{:ok, _} -> :ok
|
||||||
|
{:error, reason} when is_binary(reason) -> {:error, reason}
|
||||||
|
_ -> {:error, "Could not unzip pack"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_new_pack(name) do
|
||||||
|
pack_path = path_join_name_safe(emoji_path(), name)
|
||||||
|
|
||||||
|
if File.exists?(pack_path) do
|
||||||
|
{:error, "Pack already exists, refusing to import #{name}"}
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_archive_data(%{url: url}) do
|
||||||
|
case Pleroma.HTTP.get(url) do
|
||||||
|
{:ok, %{status: 200, body: data}} -> {:ok, data}
|
||||||
|
_ -> {:error, "Could not download pack"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_archive_data(%{file: %Plug.Upload{path: path}}) do
|
||||||
|
case File.read(path) do
|
||||||
|
{:ok, data} -> {:ok, data}
|
||||||
|
_ -> {:error, "Could not read the uploaded pack file"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_archive_data(_) do
|
||||||
|
{:error, "Neither file nor URL was present in the request"}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ensure_pack_json(pack_path, archive_data, opts) do
|
||||||
|
pack_json_path = Path.join(pack_path, "pack.json")
|
||||||
|
|
||||||
|
if not File.exists?(pack_json_path) do
|
||||||
|
create_pack_json(pack_path, pack_json_path, archive_data, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_pack_json(pack_path, pack_json_path, archive_data, opts) do
|
||||||
|
emoji_map =
|
||||||
|
Pleroma.Emoji.Loader.make_shortcode_to_file_map(
|
||||||
|
pack_path,
|
||||||
|
Map.get(opts, :exts, [".png", ".gif", ".jpg"])
|
||||||
|
)
|
||||||
|
|
||||||
|
archive_sha = :crypto.hash(:sha256, archive_data) |> Base.encode16()
|
||||||
|
|
||||||
|
pack_json = %{
|
||||||
|
pack: %{
|
||||||
|
license: Map.get(opts, :license, ""),
|
||||||
|
homepage: Map.get(opts, :homepage, ""),
|
||||||
|
description: Map.get(opts, :description, ""),
|
||||||
|
src: Map.get(opts, :url),
|
||||||
|
src_sha256: archive_sha
|
||||||
|
},
|
||||||
|
files: emoji_map
|
||||||
|
}
|
||||||
|
|
||||||
|
File.write!(pack_json_path, Jason.encode!(pack_json, pretty: true))
|
||||||
|
end
|
||||||
|
|
||||||
@spec download(String.t(), String.t(), String.t()) :: {:ok, t()} | {:error, atom()}
|
@spec download(String.t(), String.t(), String.t()) :: {:ok, t()} | {:error, atom()}
|
||||||
def download(name, url, as) do
|
def download(name, url, as) do
|
||||||
uri = url |> String.trim() |> URI.parse()
|
uri = url |> String.trim() |> URI.parse()
|
||||||
|
|
|
||||||
|
|
@ -22,14 +22,18 @@ defmodule Pleroma.Gopher.Server do
|
||||||
def init([ip, port]) do
|
def init([ip, port]) do
|
||||||
Logger.info("Starting gopher server on #{port}")
|
Logger.info("Starting gopher server on #{port}")
|
||||||
|
|
||||||
:ranch.start_listener(
|
{:ok, _pid} =
|
||||||
:gopher,
|
:ranch.start_listener(
|
||||||
100,
|
:gopher,
|
||||||
:ranch_tcp,
|
:ranch_tcp,
|
||||||
[ip: ip, port: port],
|
%{
|
||||||
__MODULE__.ProtocolHandler,
|
num_acceptors: 100,
|
||||||
[]
|
max_connections: 100,
|
||||||
)
|
socket_opts: [ip: ip, port: port]
|
||||||
|
},
|
||||||
|
__MODULE__.ProtocolHandler,
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
|
||||||
{:ok, %{ip: ip, port: port}}
|
{:ok, %{ip: ip, port: port}}
|
||||||
end
|
end
|
||||||
|
|
@ -43,13 +47,13 @@ defmodule Pleroma.Gopher.Server.ProtocolHandler do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
def start_link(ref, socket, transport, opts) do
|
def start_link(ref, transport, opts) do
|
||||||
pid = spawn_link(__MODULE__, :init, [ref, socket, transport, opts])
|
pid = spawn_link(__MODULE__, :init, [ref, transport, opts])
|
||||||
{:ok, pid}
|
{:ok, pid}
|
||||||
end
|
end
|
||||||
|
|
||||||
def init(ref, socket, transport, [] = _Opts) do
|
def init(ref, transport, opts \\ []) do
|
||||||
:ok = :ranch.accept_ack(ref)
|
{:ok, socket} = :ranch.handshake(ref, opts)
|
||||||
loop(socket, transport)
|
loop(socket, transport)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -130,4 +130,66 @@ defmodule Pleroma.Hashtag do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_recipients_for_activity(_activity), do: []
|
def get_recipients_for_activity(_activity), do: []
|
||||||
|
|
||||||
|
def search(query, options \\ []) do
|
||||||
|
limit = Keyword.get(options, :limit, 20)
|
||||||
|
offset = Keyword.get(options, :offset, 0)
|
||||||
|
|
||||||
|
search_terms =
|
||||||
|
query
|
||||||
|
|> String.downcase()
|
||||||
|
|> String.trim()
|
||||||
|
|> String.split(~r/\s+/)
|
||||||
|
|> Enum.filter(&(&1 != ""))
|
||||||
|
|> Enum.map(&String.trim_leading(&1, "#"))
|
||||||
|
|> Enum.filter(&(&1 != ""))
|
||||||
|
|
||||||
|
if Enum.empty?(search_terms) do
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
# Use PostgreSQL's ANY operator with array for efficient multi-term search
|
||||||
|
# This is much more efficient than multiple OR clauses
|
||||||
|
search_patterns = Enum.map(search_terms, &"%#{&1}%")
|
||||||
|
|
||||||
|
# Create ranking query that prioritizes exact matches and closer matches
|
||||||
|
# Use a subquery to properly handle computed columns in ORDER BY
|
||||||
|
base_query =
|
||||||
|
from(ht in Hashtag,
|
||||||
|
where: fragment("LOWER(?) LIKE ANY(?)", ht.name, ^search_patterns),
|
||||||
|
select: %{
|
||||||
|
name: ht.name,
|
||||||
|
# Ranking: exact matches get highest priority (0)
|
||||||
|
# then prefix matches (1), then contains (2)
|
||||||
|
match_rank:
|
||||||
|
fragment(
|
||||||
|
"""
|
||||||
|
CASE
|
||||||
|
WHEN LOWER(?) = ANY(?) THEN 0
|
||||||
|
WHEN LOWER(?) LIKE ANY(?) THEN 1
|
||||||
|
ELSE 2
|
||||||
|
END
|
||||||
|
""",
|
||||||
|
ht.name,
|
||||||
|
^search_terms,
|
||||||
|
ht.name,
|
||||||
|
^Enum.map(search_terms, &"#{&1}%")
|
||||||
|
),
|
||||||
|
# Secondary sort by name length (shorter names first)
|
||||||
|
name_length: fragment("LENGTH(?)", ht.name)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
from(result in subquery(base_query),
|
||||||
|
order_by: [
|
||||||
|
asc: result.match_rank,
|
||||||
|
asc: result.name_length,
|
||||||
|
asc: result.name
|
||||||
|
],
|
||||||
|
limit: ^limit,
|
||||||
|
offset: ^offset
|
||||||
|
)
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.map(& &1.name)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -105,20 +105,57 @@ defmodule Pleroma.HTTP do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp adapter_middlewares(Tesla.Adapter.Gun, extra_middleware) do
|
defp adapter_middlewares(Tesla.Adapter.Gun, extra_middleware) do
|
||||||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] ++
|
default_middleware() ++
|
||||||
|
[Pleroma.Tesla.Middleware.ConnectionPool] ++
|
||||||
extra_middleware
|
extra_middleware
|
||||||
end
|
end
|
||||||
|
|
||||||
defp adapter_middlewares({Tesla.Adapter.Finch, _}, extra_middleware) do
|
|
||||||
[Tesla.Middleware.FollowRedirects] ++ extra_middleware
|
|
||||||
end
|
|
||||||
|
|
||||||
defp adapter_middlewares(_, extra_middleware) do
|
defp adapter_middlewares(_, extra_middleware) do
|
||||||
if Pleroma.Config.get(:env) == :test do
|
# A lot of tests are written expecting unencoded URLs
|
||||||
# Emulate redirects in test env, which are handled by adapters in other environments
|
# and the burden of fixing that is high. Also it makes
|
||||||
[Tesla.Middleware.FollowRedirects]
|
# them hard to read. Tests will opt-in when we want to validate
|
||||||
else
|
# the encoding is being done correctly.
|
||||||
extra_middleware
|
cond do
|
||||||
|
Pleroma.Config.get(:env) == :test and Pleroma.Config.get(:test_url_encoding) ->
|
||||||
|
default_middleware()
|
||||||
|
|
||||||
|
Pleroma.Config.get(:env) == :test ->
|
||||||
|
# Emulate redirects in test env, which are handled by adapters in other environments
|
||||||
|
[Tesla.Middleware.FollowRedirects]
|
||||||
|
|
||||||
|
# Hackney and Finch
|
||||||
|
true ->
|
||||||
|
default_middleware() ++ extra_middleware
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp default_middleware,
|
||||||
|
do: [Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.EncodeUrl]
|
||||||
|
|
||||||
|
def encode_url(url) when is_binary(url) do
|
||||||
|
URI.parse(url)
|
||||||
|
|> then(fn parsed ->
|
||||||
|
path = encode_path(parsed.path)
|
||||||
|
query = encode_query(parsed.query)
|
||||||
|
|
||||||
|
%{parsed | path: path, query: query}
|
||||||
|
end)
|
||||||
|
|> URI.to_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp encode_path(nil), do: nil
|
||||||
|
|
||||||
|
defp encode_path(path) when is_binary(path) do
|
||||||
|
path
|
||||||
|
|> URI.decode()
|
||||||
|
|> URI.encode()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp encode_query(nil), do: nil
|
||||||
|
|
||||||
|
defp encode_query(query) when is_binary(query) do
|
||||||
|
query
|
||||||
|
|> URI.decode_query()
|
||||||
|
|> URI.encode_query()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -15,25 +15,7 @@ defmodule Pleroma.Instances do
|
||||||
|
|
||||||
defdelegate set_unreachable(url_or_host, unreachable_since \\ nil), to: Instance
|
defdelegate set_unreachable(url_or_host, unreachable_since \\ nil), to: Instance
|
||||||
|
|
||||||
defdelegate get_consistently_unreachable, to: Instance
|
defdelegate get_unreachable, to: Instance
|
||||||
|
|
||||||
def set_consistently_unreachable(url_or_host),
|
|
||||||
do: set_unreachable(url_or_host, reachability_datetime_threshold())
|
|
||||||
|
|
||||||
def reachability_datetime_threshold do
|
|
||||||
federation_reachability_timeout_days =
|
|
||||||
Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
|
|
||||||
|
|
||||||
if federation_reachability_timeout_days > 0 do
|
|
||||||
NaiveDateTime.add(
|
|
||||||
NaiveDateTime.utc_now(),
|
|
||||||
-federation_reachability_timeout_days * 24 * 3600,
|
|
||||||
:second
|
|
||||||
)
|
|
||||||
else
|
|
||||||
~N[0000-01-01 00:00:00]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def host(url_or_host) when is_binary(url_or_host) do
|
def host(url_or_host) when is_binary(url_or_host) do
|
||||||
if url_or_host =~ ~r/^http/i do
|
if url_or_host =~ ~r/^http/i do
|
||||||
|
|
@ -42,4 +24,21 @@ defmodule Pleroma.Instances do
|
||||||
url_or_host
|
url_or_host
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Schedules reachability checks for all unreachable instances"
|
||||||
|
def check_all_unreachable do
|
||||||
|
get_unreachable()
|
||||||
|
|> Enum.each(fn {domain, _} ->
|
||||||
|
Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain})
|
||||||
|
|> Oban.insert()
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Deletes all users and activities for unreachable instances"
|
||||||
|
def delete_all_unreachable do
|
||||||
|
get_unreachable()
|
||||||
|
|> Enum.each(fn {domain, _} ->
|
||||||
|
Instance.delete(domain)
|
||||||
|
end)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,7 @@ defmodule Pleroma.Instances.Instance do
|
||||||
|> cast(params, [:software_name, :software_version, :software_repository])
|
|> cast(params, [:software_name, :software_version, :software_repository])
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter_reachable([]), do: %{}
|
def filter_reachable([]), do: []
|
||||||
|
|
||||||
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
|
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
|
||||||
hosts =
|
hosts =
|
||||||
|
|
@ -67,19 +67,15 @@ defmodule Pleroma.Instances.Instance do
|
||||||
)
|
)
|
||||||
|> Map.new(& &1)
|
|> Map.new(& &1)
|
||||||
|
|
||||||
reachability_datetime_threshold = Instances.reachability_datetime_threshold()
|
|
||||||
|
|
||||||
for entry <- Enum.filter(urls_or_hosts, &is_binary/1) do
|
for entry <- Enum.filter(urls_or_hosts, &is_binary/1) do
|
||||||
host = host(entry)
|
host = host(entry)
|
||||||
unreachable_since = unreachable_since_by_host[host]
|
unreachable_since = unreachable_since_by_host[host]
|
||||||
|
|
||||||
if !unreachable_since ||
|
if is_nil(unreachable_since) do
|
||||||
NaiveDateTime.compare(unreachable_since, reachability_datetime_threshold) == :gt do
|
entry
|
||||||
{entry, unreachable_since}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|> Map.new(& &1)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def reachable?(url_or_host) when is_binary(url_or_host) do
|
def reachable?(url_or_host) when is_binary(url_or_host) do
|
||||||
|
|
@ -87,7 +83,7 @@ defmodule Pleroma.Instances.Instance do
|
||||||
from(i in Instance,
|
from(i in Instance,
|
||||||
where:
|
where:
|
||||||
i.host == ^host(url_or_host) and
|
i.host == ^host(url_or_host) and
|
||||||
i.unreachable_since <= ^Instances.reachability_datetime_threshold(),
|
not is_nil(i.unreachable_since),
|
||||||
select: true
|
select: true
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
@ -96,9 +92,16 @@ defmodule Pleroma.Instances.Instance do
|
||||||
def reachable?(url_or_host) when is_binary(url_or_host), do: true
|
def reachable?(url_or_host) when is_binary(url_or_host), do: true
|
||||||
|
|
||||||
def set_reachable(url_or_host) when is_binary(url_or_host) do
|
def set_reachable(url_or_host) when is_binary(url_or_host) do
|
||||||
%Instance{host: host(url_or_host)}
|
host = host(url_or_host)
|
||||||
|> changeset(%{unreachable_since: nil})
|
|
||||||
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
|
result =
|
||||||
|
%Instance{host: host}
|
||||||
|
|> changeset(%{unreachable_since: nil})
|
||||||
|
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
|
||||||
|
|
||||||
|
Pleroma.Workers.ReachabilityWorker.delete_jobs_for_host(host)
|
||||||
|
|
||||||
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_reachable(_), do: {:error, nil}
|
def set_reachable(_), do: {:error, nil}
|
||||||
|
|
@ -131,11 +134,9 @@ defmodule Pleroma.Instances.Instance do
|
||||||
|
|
||||||
def set_unreachable(_, _), do: {:error, nil}
|
def set_unreachable(_, _), do: {:error, nil}
|
||||||
|
|
||||||
def get_consistently_unreachable do
|
def get_unreachable do
|
||||||
reachability_datetime_threshold = Instances.reachability_datetime_threshold()
|
|
||||||
|
|
||||||
from(i in Instance,
|
from(i in Instance,
|
||||||
where: ^reachability_datetime_threshold > i.unreachable_since,
|
where: not is_nil(i.unreachable_since),
|
||||||
order_by: i.unreachable_since,
|
order_by: i.unreachable_since,
|
||||||
select: {i.host, i.unreachable_since}
|
select: {i.host, i.unreachable_since}
|
||||||
)
|
)
|
||||||
|
|
@ -295,8 +296,14 @@ defmodule Pleroma.Instances.Instance do
|
||||||
Deletes all users from an instance in a background task, thus also deleting
|
Deletes all users from an instance in a background task, thus also deleting
|
||||||
all of those users' activities and notifications.
|
all of those users' activities and notifications.
|
||||||
"""
|
"""
|
||||||
def delete_users_and_activities(host) when is_binary(host) do
|
def delete(host) when is_binary(host) do
|
||||||
DeleteWorker.new(%{"op" => "delete_instance", "host" => host})
|
DeleteWorker.new(%{"op" => "delete_instance", "host" => host})
|
||||||
|> Oban.insert()
|
|> Oban.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Schedules reachability check for instance"
|
||||||
|
def check_unreachable(domain) when is_binary(domain) do
|
||||||
|
Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain})
|
||||||
|
|> Oban.insert()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ defmodule Pleroma.Language.Translation.Provider do
|
||||||
@callback supported_languages(type :: :string | :target) ::
|
@callback supported_languages(type :: :string | :target) ::
|
||||||
{:ok, [String.t()]} | {:error, atom()}
|
{:ok, [String.t()]} | {:error, atom()}
|
||||||
|
|
||||||
@callback languages_matrix() :: {:ok, Map.t()} | {:error, atom()}
|
@callback languages_matrix() :: {:ok, map()} | {:error, atom()}
|
||||||
|
|
||||||
@callback name() :: String.t()
|
@callback name() :: String.t()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
defmodule Pleroma.Object.Fetcher do
|
defmodule Pleroma.Object.Fetcher do
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Instances
|
|
||||||
alias Pleroma.Maps
|
alias Pleroma.Maps
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Containment
|
alias Pleroma.Object.Containment
|
||||||
|
|
@ -19,8 +18,6 @@ defmodule Pleroma.Object.Fetcher do
|
||||||
require Logger
|
require Logger
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
|
||||||
@mix_env Mix.env()
|
|
||||||
|
|
||||||
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
|
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
|
||||||
defp reinject_object(%Object{data: %{}} = object, new_data) do
|
defp reinject_object(%Object{data: %{}} = object, new_data) do
|
||||||
Logger.debug("Reinjecting object #{new_data["id"]}")
|
Logger.debug("Reinjecting object #{new_data["id"]}")
|
||||||
|
|
@ -152,10 +149,6 @@ defmodule Pleroma.Object.Fetcher do
|
||||||
{:ok, body} <- get_object(id),
|
{:ok, body} <- get_object(id),
|
||||||
{:ok, data} <- safe_json_decode(body),
|
{:ok, data} <- safe_json_decode(body),
|
||||||
:ok <- Containment.contain_origin_from_id(id, data) do
|
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||||
if not Instances.reachable?(id) do
|
|
||||||
Instances.set_reachable(id)
|
|
||||||
end
|
|
||||||
|
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
else
|
else
|
||||||
{:scheme, _} ->
|
{:scheme, _} ->
|
||||||
|
|
@ -178,13 +171,8 @@ defmodule Pleroma.Object.Fetcher do
|
||||||
def fetch_and_contain_remote_object_from_id(_id),
|
def fetch_and_contain_remote_object_from_id(_id),
|
||||||
do: {:error, "id must be a string"}
|
do: {:error, "id must be a string"}
|
||||||
|
|
||||||
defp check_crossdomain_redirect(final_host, original_url)
|
defp check_crossdomain_redirect(final_host, _original_url) when is_nil(final_host) do
|
||||||
|
{:cross_domain_redirect, false}
|
||||||
# Handle the common case in tests where responses don't include URLs
|
|
||||||
if @mix_env == :test do
|
|
||||||
defp check_crossdomain_redirect(nil, _) do
|
|
||||||
{:cross_domain_redirect, false}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp check_crossdomain_redirect(final_host, original_url) do
|
defp check_crossdomain_redirect(final_host, original_url) do
|
||||||
|
|
|
||||||
|
|
@ -158,6 +158,8 @@ defmodule Pleroma.ReverseProxy do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
|
url = maybe_encode_url(url)
|
||||||
|
|
||||||
case client().request(method, url, headers, "", opts) do
|
case client().request(method, url, headers, "", opts) do
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||||
{:ok, code, downcase_headers(headers), client}
|
{:ok, code, downcase_headers(headers), client}
|
||||||
|
|
@ -449,4 +451,18 @@ defmodule Pleroma.ReverseProxy do
|
||||||
_ -> delete_resp_header(conn, "content-length")
|
_ -> delete_resp_header(conn, "content-length")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Only when Tesla adapter is Hackney or Finch does the URL
|
||||||
|
# need encoding before Reverse Proxying as both end up
|
||||||
|
# using the raw Hackney client and cannot leverage our
|
||||||
|
# EncodeUrl Tesla middleware
|
||||||
|
# Also do it for test environment
|
||||||
|
defp maybe_encode_url(url) do
|
||||||
|
case Application.get_env(:tesla, :adapter) do
|
||||||
|
Tesla.Adapter.Hackney -> Pleroma.HTTP.encode_url(url)
|
||||||
|
{Tesla.Adapter.Finch, _} -> Pleroma.HTTP.encode_url(url)
|
||||||
|
Tesla.Mock -> Pleroma.HTTP.encode_url(url)
|
||||||
|
_ -> url
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -56,10 +56,6 @@ defmodule Pleroma.SafeZip do
|
||||||
{_, true} <- {:safe_path, safe_path?(path)} do
|
{_, true} <- {:safe_path, safe_path?(path)} do
|
||||||
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||||
else
|
else
|
||||||
{:get_type, e} ->
|
|
||||||
{:halt,
|
|
||||||
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
|
||||||
|
|
||||||
{:type, _} ->
|
{:type, _} ->
|
||||||
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -157,26 +157,55 @@ defmodule Pleroma.Search.QdrantSearch do
|
||||||
end
|
end
|
||||||
|
|
||||||
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
|
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
|
||||||
use Tesla
|
|
||||||
alias Pleroma.Config.Getting, as: Config
|
alias Pleroma.Config.Getting, as: Config
|
||||||
|
|
||||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
|
def post(path, body) do
|
||||||
plug(Tesla.Middleware.JSON)
|
Tesla.post(client(), path, body)
|
||||||
|
end
|
||||||
|
|
||||||
plug(Tesla.Middleware.Headers, [
|
defp client do
|
||||||
{"Authorization",
|
Tesla.client(middleware())
|
||||||
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
|
end
|
||||||
])
|
|
||||||
|
defp middleware do
|
||||||
|
[
|
||||||
|
{Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url])},
|
||||||
|
Tesla.Middleware.JSON,
|
||||||
|
{Tesla.Middleware.Headers,
|
||||||
|
[
|
||||||
|
{"Authorization", "Bearer #{Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
|
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
|
||||||
use Tesla
|
|
||||||
alias Pleroma.Config.Getting, as: Config
|
alias Pleroma.Config.Getting, as: Config
|
||||||
|
|
||||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
|
def delete(path) do
|
||||||
plug(Tesla.Middleware.JSON)
|
Tesla.delete(client(), path)
|
||||||
|
end
|
||||||
|
|
||||||
plug(Tesla.Middleware.Headers, [
|
def post(path, body) do
|
||||||
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
|
Tesla.post(client(), path, body)
|
||||||
])
|
end
|
||||||
|
|
||||||
|
def put(path, body) do
|
||||||
|
Tesla.put(client(), path, body)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp client do
|
||||||
|
Tesla.client(middleware())
|
||||||
|
end
|
||||||
|
|
||||||
|
defp middleware do
|
||||||
|
[
|
||||||
|
{Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])},
|
||||||
|
Tesla.Middleware.JSON,
|
||||||
|
{Tesla.Middleware.Headers,
|
||||||
|
[
|
||||||
|
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
29
lib/pleroma/tesla/middleware/encode_url.ex
Normal file
29
lib/pleroma/tesla/middleware/encode_url.ex
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2025 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Tesla.Middleware.EncodeUrl do
|
||||||
|
@moduledoc """
|
||||||
|
Middleware to encode URLs properly
|
||||||
|
|
||||||
|
We must decode and then re-encode to ensure correct encoding.
|
||||||
|
If we only encode it will re-encode each % as %25 causing a space
|
||||||
|
already encoded as %20 to be %2520.
|
||||||
|
|
||||||
|
Similar problem for query parameters which need spaces to be the + character
|
||||||
|
"""
|
||||||
|
|
||||||
|
@behaviour Tesla.Middleware
|
||||||
|
|
||||||
|
@impl Tesla.Middleware
|
||||||
|
def call(%Tesla.Env{url: url} = env, next, _) do
|
||||||
|
url = Pleroma.HTTP.encode_url(url)
|
||||||
|
|
||||||
|
env = %{env | url: url}
|
||||||
|
|
||||||
|
case Tesla.run(env, next) do
|
||||||
|
{:ok, env} -> {:ok, env}
|
||||||
|
err -> err
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -53,7 +53,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(:log_inbox_metadata when action in [:inbox])
|
plug(:log_inbox_metadata when action in [:inbox])
|
||||||
plug(:set_requester_reachable when action in [:inbox])
|
|
||||||
plug(:relay_active? when action in [:relay])
|
plug(:relay_active? when action in [:relay])
|
||||||
|
|
||||||
defp relay_active?(conn, _) do
|
defp relay_active?(conn, _) do
|
||||||
|
|
@ -274,13 +273,37 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
end
|
end
|
||||||
|
|
||||||
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
||||||
with %User{is_active: true} = recipient <- User.get_cached_by_nickname(nickname),
|
with {:recipient_exists, %User{} = recipient} <-
|
||||||
{:ok, %User{is_active: true} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
|
{:recipient_exists, User.get_cached_by_nickname(nickname)},
|
||||||
|
{:sender_exists, {:ok, %User{} = actor}} <-
|
||||||
|
{:sender_exists, User.get_or_fetch_by_ap_id(params["actor"])},
|
||||||
|
{:recipient_active, true} <- {:recipient_active, recipient.is_active},
|
||||||
|
{:sender_active, true} <- {:sender_active, actor.is_active},
|
||||||
true <- Utils.recipient_in_message(recipient, actor, params),
|
true <- Utils.recipient_in_message(recipient, actor, params),
|
||||||
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
|
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
|
||||||
Federator.incoming_ap_doc(params)
|
Federator.incoming_ap_doc(params)
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
else
|
else
|
||||||
|
{:recipient_exists, _} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> json("User does not exist")
|
||||||
|
|
||||||
|
{:sender_exists, _} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> json("Sender does not exist")
|
||||||
|
|
||||||
|
{:recipient_active, _} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> json("User deactivated")
|
||||||
|
|
||||||
|
{:sender_active, _} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> json("Sender deactivated")
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
conn
|
conn
|
||||||
|> put_status(:bad_request)
|
|> put_status(:bad_request)
|
||||||
|
|
@ -520,15 +543,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
|> json(dgettext("errors", "error"))
|
|> json(dgettext("errors", "error"))
|
||||||
end
|
end
|
||||||
|
|
||||||
defp set_requester_reachable(%Plug.Conn{} = conn, _) do
|
|
||||||
with actor <- conn.params["actor"],
|
|
||||||
true <- is_binary(actor) do
|
|
||||||
Pleroma.Instances.set_reachable(actor)
|
|
||||||
end
|
|
||||||
|
|
||||||
conn
|
|
||||||
end
|
|
||||||
|
|
||||||
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
|
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
|
||||||
Logger.metadata(actor: actor, type: type)
|
Logger.metadata(actor: actor, type: type)
|
||||||
conn
|
conn
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy do
|
||||||
else
|
else
|
||||||
{:local, true} -> {:ok, object}
|
{:local, true} -> {:ok, object}
|
||||||
{:reject, message} -> {:reject, message}
|
{:reject, message} -> {:reject, message}
|
||||||
error -> {:reject, error}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -161,17 +161,9 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
{"digest", p.digest}
|
{"digest", p.digest}
|
||||||
]
|
]
|
||||||
) do
|
) do
|
||||||
if not is_nil(p.unreachable_since) do
|
|
||||||
Instances.set_reachable(p.inbox)
|
|
||||||
end
|
|
||||||
|
|
||||||
result
|
result
|
||||||
else
|
else
|
||||||
{_post_result, %{status: code} = response} = e ->
|
{_post_result, %{status: code} = response} = e ->
|
||||||
if is_nil(p.unreachable_since) do
|
|
||||||
Instances.set_unreachable(p.inbox)
|
|
||||||
end
|
|
||||||
|
|
||||||
Logger.metadata(activity: p.activity_id, inbox: p.inbox, status: code)
|
Logger.metadata(activity: p.activity_id, inbox: p.inbox, status: code)
|
||||||
Logger.error("Publisher failed to inbox #{p.inbox} with status #{code}")
|
Logger.error("Publisher failed to inbox #{p.inbox} with status #{code}")
|
||||||
|
|
||||||
|
|
@ -192,10 +184,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
connection_pool_snooze()
|
connection_pool_snooze()
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
if is_nil(p.unreachable_since) do
|
|
||||||
Instances.set_unreachable(p.inbox)
|
|
||||||
end
|
|
||||||
|
|
||||||
Logger.metadata(activity: p.activity_id, inbox: p.inbox)
|
Logger.metadata(activity: p.activity_id, inbox: p.inbox)
|
||||||
Logger.error("Publisher failed to inbox #{p.inbox} #{inspect(e)}")
|
Logger.error("Publisher failed to inbox #{p.inbox} #{inspect(e)}")
|
||||||
{:error, e}
|
{:error, e}
|
||||||
|
|
@ -307,7 +295,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
|
|
||||||
[priority_recipients, recipients] = recipients(actor, activity)
|
[priority_recipients, recipients] = recipients(actor, activity)
|
||||||
|
|
||||||
inboxes =
|
[priority_inboxes, other_inboxes] =
|
||||||
[priority_recipients, recipients]
|
[priority_recipients, recipients]
|
||||||
|> Enum.map(fn recipients ->
|
|> Enum.map(fn recipients ->
|
||||||
recipients
|
recipients
|
||||||
|
|
@ -320,8 +308,8 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Repo.checkout(fn ->
|
Repo.checkout(fn ->
|
||||||
Enum.each(inboxes, fn inboxes ->
|
Enum.each([priority_inboxes, other_inboxes], fn inboxes ->
|
||||||
Enum.each(inboxes, fn {inbox, unreachable_since} ->
|
Enum.each(inboxes, fn inbox ->
|
||||||
%User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
|
%User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
|
||||||
|
|
||||||
# Get all the recipients on the same host and add them to cc. Otherwise, a remote
|
# Get all the recipients on the same host and add them to cc. Otherwise, a remote
|
||||||
|
|
@ -331,8 +319,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
__MODULE__.enqueue_one(%{
|
__MODULE__.enqueue_one(%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
cc: cc,
|
cc: cc,
|
||||||
activity_id: activity.id,
|
activity_id: activity.id
|
||||||
unreachable_since: unreachable_since
|
|
||||||
})
|
})
|
||||||
end)
|
end)
|
||||||
end)
|
end)
|
||||||
|
|
@ -365,12 +352,11 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
|> Enum.each(fn {inboxes, priority} ->
|
|> Enum.each(fn {inboxes, priority} ->
|
||||||
inboxes
|
inboxes
|
||||||
|> Instances.filter_reachable()
|
|> Instances.filter_reachable()
|
||||||
|> Enum.each(fn {inbox, unreachable_since} ->
|
|> Enum.each(fn inbox ->
|
||||||
__MODULE__.enqueue_one(
|
__MODULE__.enqueue_one(
|
||||||
%{
|
%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
activity_id: activity.id,
|
activity_id: activity.id
|
||||||
unreachable_since: unreachable_since
|
|
||||||
},
|
},
|
||||||
priority: priority
|
priority: priority
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ defmodule Pleroma.Web.AdminAPI.InstanceController do
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete(conn, %{"instance" => instance}) do
|
def delete(conn, %{"instance" => instance}) do
|
||||||
with {:ok, _job} <- Instance.delete_users_and_activities(instance) do
|
with {:ok, _job} <- Instance.delete(instance) do
|
||||||
json(conn, instance)
|
json(conn, instance)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -158,6 +158,6 @@ defmodule Pleroma.Web.ApiSpec do
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
# discover request/response schemas from path specs
|
# discover request/response schemas from path specs
|
||||||
|> OpenApiSpex.resolve_schema_modules()
|
|> then(&OpenApiSpex.resolve_schema_modules/1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -127,6 +127,20 @@ defmodule Pleroma.Web.ApiSpec.PleromaEmojiPackOperation do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def download_zip_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Emoji pack administration"],
|
||||||
|
summary: "Download a pack from a URL or an uploaded file",
|
||||||
|
operationId: "PleromaAPI.EmojiPackController.download_zip",
|
||||||
|
security: [%{"oAuth" => ["admin:write"]}],
|
||||||
|
requestBody: request_body("Parameters", download_zip_request(), required: true),
|
||||||
|
responses: %{
|
||||||
|
200 => ok_response(),
|
||||||
|
400 => Operation.response("Bad Request", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
defp download_request do
|
defp download_request do
|
||||||
%Schema{
|
%Schema{
|
||||||
type: :object,
|
type: :object,
|
||||||
|
|
@ -143,6 +157,25 @@ defmodule Pleroma.Web.ApiSpec.PleromaEmojiPackOperation do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp download_zip_request do
|
||||||
|
%Schema{
|
||||||
|
type: :object,
|
||||||
|
required: [:name],
|
||||||
|
properties: %{
|
||||||
|
url: %Schema{
|
||||||
|
type: :string,
|
||||||
|
format: :uri,
|
||||||
|
description: "URL of the file"
|
||||||
|
},
|
||||||
|
file: %Schema{
|
||||||
|
description: "The uploaded ZIP file",
|
||||||
|
type: :object
|
||||||
|
},
|
||||||
|
name: %Schema{type: :string, format: :uri, description: "Pack Name"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
def create_operation do
|
def create_operation do
|
||||||
%Operation{
|
%Operation{
|
||||||
tags: ["Emoji pack administration"],
|
tags: ["Emoji pack administration"],
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,11 @@ defmodule Pleroma.Web.ApiSpec.Scopes.Compiler do
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_all_scopes do
|
def extract_all_scopes do
|
||||||
extract_all_scopes_from(Pleroma.Web.ApiSpec.spec())
|
try do
|
||||||
|
extract_all_scopes_from(Pleroma.Web.ApiSpec.spec())
|
||||||
|
catch
|
||||||
|
_, _ -> []
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_all_scopes_from(specs) do
|
def extract_all_scopes_from(specs) do
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Web.MastodonAPI.SearchController do
|
defmodule Pleroma.Web.MastodonAPI.SearchController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
alias Pleroma.Hashtag
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ControllerHelper
|
alias Pleroma.Web.ControllerHelper
|
||||||
|
|
@ -120,69 +121,14 @@ defmodule Pleroma.Web.MastodonAPI.SearchController do
|
||||||
defp resource_search(:v2, "hashtags", query, options) do
|
defp resource_search(:v2, "hashtags", query, options) do
|
||||||
tags_path = Endpoint.url() <> "/tag/"
|
tags_path = Endpoint.url() <> "/tag/"
|
||||||
|
|
||||||
query
|
Hashtag.search(query, options)
|
||||||
|> prepare_tags(options)
|
|
||||||
|> Enum.map(fn tag ->
|
|> Enum.map(fn tag ->
|
||||||
%{name: tag, url: tags_path <> tag}
|
%{name: tag, url: tags_path <> tag}
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp resource_search(:v1, "hashtags", query, options) do
|
defp resource_search(:v1, "hashtags", query, options) do
|
||||||
prepare_tags(query, options)
|
Hashtag.search(query, options)
|
||||||
end
|
|
||||||
|
|
||||||
defp prepare_tags(query, options) do
|
|
||||||
tags =
|
|
||||||
query
|
|
||||||
|> preprocess_uri_query()
|
|
||||||
|> String.split(~r/[^#\w]+/u, trim: true)
|
|
||||||
|> Enum.uniq_by(&String.downcase/1)
|
|
||||||
|
|
||||||
explicit_tags = Enum.filter(tags, fn tag -> String.starts_with?(tag, "#") end)
|
|
||||||
|
|
||||||
tags =
|
|
||||||
if Enum.any?(explicit_tags) do
|
|
||||||
explicit_tags
|
|
||||||
else
|
|
||||||
tags
|
|
||||||
end
|
|
||||||
|
|
||||||
tags = Enum.map(tags, fn tag -> String.trim_leading(tag, "#") end)
|
|
||||||
|
|
||||||
tags =
|
|
||||||
if Enum.empty?(explicit_tags) && !options[:skip_joined_tag] do
|
|
||||||
add_joined_tag(tags)
|
|
||||||
else
|
|
||||||
tags
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.Pagination.paginate_list(tags, options)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp add_joined_tag(tags) do
|
|
||||||
tags
|
|
||||||
|> Kernel.++([joined_tag(tags)])
|
|
||||||
|> Enum.uniq_by(&String.downcase/1)
|
|
||||||
end
|
|
||||||
|
|
||||||
# If `query` is a URI, returns last component of its path, otherwise returns `query`
|
|
||||||
defp preprocess_uri_query(query) do
|
|
||||||
if query =~ ~r/https?:\/\// do
|
|
||||||
query
|
|
||||||
|> String.trim_trailing("/")
|
|
||||||
|> URI.parse()
|
|
||||||
|> Map.get(:path)
|
|
||||||
|> String.split("/")
|
|
||||||
|> Enum.at(-1)
|
|
||||||
else
|
|
||||||
query
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp joined_tag(tags) do
|
|
||||||
tags
|
|
||||||
|> Enum.map(fn tag -> String.capitalize(tag) end)
|
|
||||||
|> Enum.join()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp with_fallback(f, fallback \\ []) do
|
defp with_fallback(f, fallback \\ []) do
|
||||||
|
|
|
||||||
|
|
@ -584,6 +584,9 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
|
||||||
|
|
||||||
{:error, error} when error in [:unexpected_response, :quota_exceeded, :too_many_requests] ->
|
{:error, error} when error in [:unexpected_response, :quota_exceeded, :too_many_requests] ->
|
||||||
render_error(conn, :service_unavailable, "Translation service not available")
|
render_error(conn, :service_unavailable, "Translation service not available")
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
render_error(conn, :internal_server_error, "Translation failed")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiPackController do
|
||||||
:import_from_filesystem,
|
:import_from_filesystem,
|
||||||
:remote,
|
:remote,
|
||||||
:download,
|
:download,
|
||||||
|
:download_zip,
|
||||||
:create,
|
:create,
|
||||||
:update,
|
:update,
|
||||||
:delete
|
:delete
|
||||||
|
|
@ -113,6 +114,27 @@ defmodule Pleroma.Web.PleromaAPI.EmojiPackController do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def download_zip(
|
||||||
|
%{private: %{open_api_spex: %{body_params: params}}} = conn,
|
||||||
|
_
|
||||||
|
) do
|
||||||
|
name = Map.get(params, :name)
|
||||||
|
|
||||||
|
with :ok <- Pack.download_zip(name, params) do
|
||||||
|
json(conn, "ok")
|
||||||
|
else
|
||||||
|
{:error, error} when is_binary(error) ->
|
||||||
|
conn
|
||||||
|
|> put_status(:bad_request)
|
||||||
|
|> json(%{error: error})
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:bad_request)
|
||||||
|
|> json(%{error: "Could not process pack"})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def download(
|
def download(
|
||||||
%{private: %{open_api_spex: %{body_params: %{url: url, name: name} = params}}} = conn,
|
%{private: %{open_api_spex: %{body_params: %{url: url, name: name} = params}}} = conn,
|
||||||
_
|
_
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ defmodule Pleroma.Web.PleromaAPI.InstancesController do
|
||||||
|
|
||||||
def show(conn, _params) do
|
def show(conn, _params) do
|
||||||
unreachable =
|
unreachable =
|
||||||
Instances.get_consistently_unreachable()
|
Instances.get_unreachable()
|
||||||
|> Map.new(fn {host, date} -> {host, to_string(date)} end)
|
|> Map.new(fn {host, date} -> {host, to_string(date)} end)
|
||||||
|
|
||||||
json(conn, %{"unreachable" => unreachable})
|
json(conn, %{"unreachable" => unreachable})
|
||||||
|
|
|
||||||
|
|
@ -466,6 +466,7 @@ defmodule Pleroma.Web.Router do
|
||||||
get("/import", EmojiPackController, :import_from_filesystem)
|
get("/import", EmojiPackController, :import_from_filesystem)
|
||||||
get("/remote", EmojiPackController, :remote)
|
get("/remote", EmojiPackController, :remote)
|
||||||
post("/download", EmojiPackController, :download)
|
post("/download", EmojiPackController, :download)
|
||||||
|
post("/download_zip", EmojiPackController, :download_zip)
|
||||||
|
|
||||||
post("/files", EmojiFileController, :create)
|
post("/files", EmojiFileController, :create)
|
||||||
patch("/files", EmojiFileController, :update)
|
patch("/files", EmojiFileController, :update)
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ defmodule Pleroma.Workers.DeleteWorker do
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do
|
def perform(%Job{args: %{"op" => "delete_instance", "host" => host}}) do
|
||||||
|
# Schedule the per-user deletion jobs
|
||||||
Pleroma.Repo.transaction(fn ->
|
Pleroma.Repo.transaction(fn ->
|
||||||
User.Query.build(%{nickname: "@#{host}"})
|
User.Query.build(%{nickname: "@#{host}"})
|
||||||
|> Pleroma.Repo.all()
|
|> Pleroma.Repo.all()
|
||||||
|
|
@ -22,6 +23,17 @@ defmodule Pleroma.Workers.DeleteWorker do
|
||||||
|> __MODULE__.new()
|
|> __MODULE__.new()
|
||||||
|> Oban.insert()
|
|> Oban.insert()
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
# Delete the instance from the Instances table
|
||||||
|
case Pleroma.Repo.get_by(Pleroma.Instances.Instance, host: host) do
|
||||||
|
nil -> :ok
|
||||||
|
instance -> Pleroma.Repo.delete(instance)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Delete any pending ReachabilityWorker jobs for this domain
|
||||||
|
Pleroma.Workers.ReachabilityWorker.delete_jobs_for_host(host)
|
||||||
|
|
||||||
|
:ok
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,10 @@
|
||||||
|
|
||||||
defmodule Pleroma.Workers.PublisherWorker do
|
defmodule Pleroma.Workers.PublisherWorker do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
use Oban.Worker, queue: :federator_outgoing, max_attempts: 5
|
use Oban.Worker, queue: :federator_outgoing, max_attempts: 13
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def perform(%Job{args: %{"op" => "publish", "activity_id" => activity_id}}) do
|
def perform(%Job{args: %{"op" => "publish", "activity_id" => activity_id}}) do
|
||||||
|
|
@ -14,9 +15,30 @@ defmodule Pleroma.Workers.PublisherWorker do
|
||||||
Federator.perform(:publish, activity)
|
Federator.perform(:publish, activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(%Job{args: %{"op" => "publish_one", "params" => params}}) do
|
def perform(%Job{args: %{"op" => "publish_one", "params" => params}} = job) do
|
||||||
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
|
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
|
||||||
Federator.perform(:publish_one, params)
|
|
||||||
|
# Cancel / skip the job if this server believed to be unreachable now
|
||||||
|
if not Instances.reachable?(params.inbox) do
|
||||||
|
{:cancel, :unreachable}
|
||||||
|
else
|
||||||
|
case Federator.perform(:publish_one, params) do
|
||||||
|
{:ok, _} ->
|
||||||
|
:ok
|
||||||
|
|
||||||
|
{:error, _} = error ->
|
||||||
|
# Only mark as unreachable on final failure
|
||||||
|
if job.attempt == job.max_attempts do
|
||||||
|
Instances.set_unreachable(params.inbox)
|
||||||
|
end
|
||||||
|
|
||||||
|
error
|
||||||
|
|
||||||
|
error ->
|
||||||
|
# Unexpected error, may have been client side
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
|
|
|
||||||
116
lib/pleroma/workers/reachability_worker.ex
Normal file
116
lib/pleroma/workers/reachability_worker.ex
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ReachabilityWorker do
|
||||||
|
use Oban.Worker,
|
||||||
|
queue: :background,
|
||||||
|
max_attempts: 1,
|
||||||
|
unique: [period: :infinity, states: [:available, :scheduled], keys: [:domain]]
|
||||||
|
|
||||||
|
alias Pleroma.HTTP
|
||||||
|
alias Pleroma.Instances
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def perform(%Oban.Job{args: %{"domain" => domain, "phase" => phase, "attempt" => attempt}}) do
|
||||||
|
case check_reachability(domain) do
|
||||||
|
:ok ->
|
||||||
|
Instances.set_reachable("https://#{domain}")
|
||||||
|
:ok
|
||||||
|
|
||||||
|
{:error, _} = error ->
|
||||||
|
handle_failed_attempt(domain, phase, attempt)
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# New jobs enter here and are immediately re-scheduled for the first phase
|
||||||
|
@impl true
|
||||||
|
def perform(%Oban.Job{args: %{"domain" => domain}}) do
|
||||||
|
scheduled_at = DateTime.add(DateTime.utc_now(), 60, :second)
|
||||||
|
|
||||||
|
%{
|
||||||
|
"domain" => domain,
|
||||||
|
"phase" => "phase_1min",
|
||||||
|
"attempt" => 1
|
||||||
|
}
|
||||||
|
|> new(scheduled_at: scheduled_at, replace: true)
|
||||||
|
|> Oban.insert()
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def timeout(_job), do: :timer.seconds(5)
|
||||||
|
|
||||||
|
@doc "Deletes scheduled jobs to check reachability for specified instance"
|
||||||
|
def delete_jobs_for_host(host) do
|
||||||
|
Oban.Job
|
||||||
|
|> where(worker: "Pleroma.Workers.ReachabilityWorker")
|
||||||
|
|> where([j], j.args["domain"] == ^host)
|
||||||
|
|> Oban.delete_all_jobs()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_reachability(domain) do
|
||||||
|
case HTTP.get("https://#{domain}/") do
|
||||||
|
{:ok, %{status: status}} when status in 200..299 ->
|
||||||
|
:ok
|
||||||
|
|
||||||
|
{:ok, %{status: _status}} ->
|
||||||
|
{:error, :unreachable}
|
||||||
|
|
||||||
|
{:error, _} = error ->
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_failed_attempt(_domain, "final", _attempt), do: :ok
|
||||||
|
|
||||||
|
defp handle_failed_attempt(domain, phase, attempt) do
|
||||||
|
{interval_minutes, max_attempts, next_phase} = get_phase_config(phase)
|
||||||
|
|
||||||
|
if attempt >= max_attempts do
|
||||||
|
# Move to next phase
|
||||||
|
schedule_next_phase(domain, next_phase)
|
||||||
|
else
|
||||||
|
# Retry same phase with incremented attempt
|
||||||
|
schedule_retry(domain, phase, attempt + 1, interval_minutes)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_phase_config("phase_1min"), do: {1, 4, "phase_15min"}
|
||||||
|
defp get_phase_config("phase_15min"), do: {15, 4, "phase_1hour"}
|
||||||
|
defp get_phase_config("phase_1hour"), do: {60, 4, "phase_8hour"}
|
||||||
|
defp get_phase_config("phase_8hour"), do: {480, 4, "phase_24hour"}
|
||||||
|
defp get_phase_config("phase_24hour"), do: {1440, 4, "final"}
|
||||||
|
defp get_phase_config("final"), do: {nil, 0, nil}
|
||||||
|
|
||||||
|
defp schedule_next_phase(_domain, "final"), do: :ok
|
||||||
|
|
||||||
|
defp schedule_next_phase(domain, next_phase) do
|
||||||
|
{interval_minutes, _max_attempts, _next_phase} = get_phase_config(next_phase)
|
||||||
|
scheduled_at = DateTime.add(DateTime.utc_now(), interval_minutes * 60, :second)
|
||||||
|
|
||||||
|
%{
|
||||||
|
"domain" => domain,
|
||||||
|
"phase" => next_phase,
|
||||||
|
"attempt" => 1
|
||||||
|
}
|
||||||
|
|> new(scheduled_at: scheduled_at, replace: true)
|
||||||
|
|> Oban.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
def schedule_retry(domain, phase, attempt, interval_minutes) do
|
||||||
|
scheduled_at = DateTime.add(DateTime.utc_now(), interval_minutes * 60, :second)
|
||||||
|
|
||||||
|
%{
|
||||||
|
"domain" => domain,
|
||||||
|
"phase" => phase,
|
||||||
|
"attempt" => attempt
|
||||||
|
}
|
||||||
|
|> new(scheduled_at: scheduled_at, replace: true)
|
||||||
|
|> Oban.insert()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Workers.ReceiverWorker do
|
defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Signature
|
alias Pleroma.Signature
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
|
@ -37,6 +38,11 @@ defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
{:ok, _public_key} <- Signature.refetch_public_key(conn_data),
|
{:ok, _public_key} <- Signature.refetch_public_key(conn_data),
|
||||||
{:signature, true} <- {:signature, Signature.validate_signature(conn_data)},
|
{:signature, true} <- {:signature, Signature.validate_signature(conn_data)},
|
||||||
{:ok, res} <- Federator.perform(:incoming_ap_doc, params) do
|
{:ok, res} <- Federator.perform(:incoming_ap_doc, params) do
|
||||||
|
unless Instances.reachable?(params["actor"]) do
|
||||||
|
domain = URI.parse(params["actor"]).host
|
||||||
|
Oban.insert(Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain}))
|
||||||
|
end
|
||||||
|
|
||||||
{:ok, res}
|
{:ok, res}
|
||||||
else
|
else
|
||||||
e -> process_errors(e)
|
e -> process_errors(e)
|
||||||
|
|
@ -45,6 +51,11 @@ defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
|
|
||||||
def perform(%Job{args: %{"op" => "incoming_ap_doc", "params" => params}}) do
|
def perform(%Job{args: %{"op" => "incoming_ap_doc", "params" => params}}) do
|
||||||
with {:ok, res} <- Federator.perform(:incoming_ap_doc, params) do
|
with {:ok, res} <- Federator.perform(:incoming_ap_doc, params) do
|
||||||
|
unless Instances.reachable?(params["actor"]) do
|
||||||
|
domain = URI.parse(params["actor"]).host
|
||||||
|
Oban.insert(Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain}))
|
||||||
|
end
|
||||||
|
|
||||||
{:ok, res}
|
{:ok, res}
|
||||||
else
|
else
|
||||||
e -> process_errors(e)
|
e -> process_errors(e)
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Workers.RemoteFetcherWorker do
|
defmodule Pleroma.Workers.RemoteFetcherWorker do
|
||||||
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
|
|
||||||
use Oban.Worker, queue: :background, unique: [period: :infinity]
|
use Oban.Worker, queue: :background, unique: [period: :infinity]
|
||||||
|
|
@ -11,6 +12,11 @@ defmodule Pleroma.Workers.RemoteFetcherWorker do
|
||||||
def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do
|
def perform(%Job{args: %{"op" => "fetch_remote", "id" => id} = args}) do
|
||||||
case Fetcher.fetch_object_from_id(id, depth: args["depth"]) do
|
case Fetcher.fetch_object_from_id(id, depth: args["depth"]) do
|
||||||
{:ok, _object} ->
|
{:ok, _object} ->
|
||||||
|
unless Instances.reachable?(id) do
|
||||||
|
# Mark the server as reachable since we successfully fetched an object
|
||||||
|
Instances.set_reachable(id)
|
||||||
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
|
||||||
{:allowed_depth, false} ->
|
{:allowed_depth, false} ->
|
||||||
|
|
|
||||||
12
mix.exs
12
mix.exs
|
|
@ -128,15 +128,15 @@ defmodule Pleroma.Mixfile do
|
||||||
{:phoenix_ecto, "~> 4.4"},
|
{:phoenix_ecto, "~> 4.4"},
|
||||||
{:ecto_sql, "~> 3.10"},
|
{:ecto_sql, "~> 3.10"},
|
||||||
{:ecto_enum, "~> 1.4"},
|
{:ecto_enum, "~> 1.4"},
|
||||||
{:postgrex, ">= 0.0.0"},
|
{:postgrex, ">= 0.20.0"},
|
||||||
{:phoenix_html, "~> 3.3"},
|
{:phoenix_html, "~> 3.3"},
|
||||||
{:phoenix_live_view, "~> 0.19.0"},
|
{:phoenix_live_view, "~> 0.19.0"},
|
||||||
{:phoenix_live_dashboard, "~> 0.8.0"},
|
{:phoenix_live_dashboard, "~> 0.8.0"},
|
||||||
{:telemetry_metrics, "~> 0.6"},
|
{:telemetry_metrics, "~> 0.6"},
|
||||||
{:telemetry_poller, "~> 1.0"},
|
{:telemetry_poller, "~> 1.0"},
|
||||||
{:tzdata, "~> 1.0.3"},
|
{:tzdata, "~> 1.0.3"},
|
||||||
{:plug_cowboy, "~> 2.5"},
|
{:plug_cowboy, "~> 2.7"},
|
||||||
{:oban, "~> 2.18.0"},
|
{:oban, "~> 2.19.0"},
|
||||||
{:gettext, "~> 0.20"},
|
{:gettext, "~> 0.20"},
|
||||||
{:bcrypt_elixir, "~> 2.2"},
|
{:bcrypt_elixir, "~> 2.2"},
|
||||||
{:trailing_format_plug, "~> 0.0.7"},
|
{:trailing_format_plug, "~> 0.0.7"},
|
||||||
|
|
@ -146,8 +146,8 @@ defmodule Pleroma.Mixfile do
|
||||||
{:cachex, "~> 3.2"},
|
{:cachex, "~> 3.2"},
|
||||||
{:tesla, "~> 1.11"},
|
{:tesla, "~> 1.11"},
|
||||||
{:castore, "~> 1.0"},
|
{:castore, "~> 1.0"},
|
||||||
{:cowlib, "~> 2.9", override: true},
|
{:cowlib, "~> 2.15"},
|
||||||
{:gun, "~> 2.0.0-rc.1", override: true},
|
{:gun, "~> 2.2"},
|
||||||
{:finch, "~> 0.15"},
|
{:finch, "~> 0.15"},
|
||||||
{:jason, "~> 1.2"},
|
{:jason, "~> 1.2"},
|
||||||
{:mogrify, "~> 0.9.0", override: "true"},
|
{:mogrify, "~> 0.9.0", override: "true"},
|
||||||
|
|
@ -188,7 +188,7 @@ defmodule Pleroma.Mixfile do
|
||||||
{:restarter, path: "./restarter"},
|
{:restarter, path: "./restarter"},
|
||||||
{:majic, "~> 1.0"},
|
{:majic, "~> 1.0"},
|
||||||
{:open_api_spex, "~> 3.16"},
|
{:open_api_spex, "~> 3.16"},
|
||||||
{:ecto_psql_extras, "~> 0.6"},
|
{:ecto_psql_extras, "~> 0.8"},
|
||||||
{:vix, "~> 0.26.0"},
|
{:vix, "~> 0.26.0"},
|
||||||
{:elixir_make, "~> 0.7.7", override: true},
|
{:elixir_make, "~> 0.7.7", override: true},
|
||||||
{:blurhash, "~> 0.1.0", hex: :rinpatch_blurhash},
|
{:blurhash, "~> 0.1.0", hex: :rinpatch_blurhash},
|
||||||
|
|
|
||||||
40
mix.lock
40
mix.lock
|
|
@ -11,7 +11,7 @@
|
||||||
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
|
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
|
||||||
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
||||||
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e7b7cc34cc16b383461b966484c297e4ec9aeef6", [ref: "e7b7cc34cc16b383461b966484c297e4ec9aeef6"]},
|
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e7b7cc34cc16b383461b966484c297e4ec9aeef6", [ref: "e7b7cc34cc16b383461b966484c297e4ec9aeef6"]},
|
||||||
"castore": {:hex, :castore, "1.0.8", "dedcf20ea746694647f883590b82d9e96014057aff1d44d03ec90f36a5c0dc6e", [:mix], [], "hexpm", "0b2b66d2ee742cb1d9cb8c8be3b43c3a70ee8651f37b75a8b982e036752983f1"},
|
"castore": {:hex, :castore, "1.0.14", "4582dd7d630b48cf5e1ca8d3d42494db51e406b7ba704e81fbd401866366896a", [:mix], [], "hexpm", "7bc1b65249d31701393edaaac18ec8398d8974d52c647b7904d01b964137b9f4"},
|
||||||
"cc_precompiler": {:hex, :cc_precompiler, "0.1.9", "e8d3364f310da6ce6463c3dd20cf90ae7bbecbf6c5203b98bf9b48035592649b", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "9dcab3d0f3038621f1601f13539e7a9ee99843862e66ad62827b0c42b2f58a54"},
|
"cc_precompiler": {:hex, :cc_precompiler, "0.1.9", "e8d3364f310da6ce6463c3dd20cf90ae7bbecbf6c5203b98bf9b48035592649b", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "9dcab3d0f3038621f1601f13539e7a9ee99843862e66ad62827b0c42b2f58a54"},
|
||||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
|
|
@ -20,23 +20,23 @@
|
||||||
"connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
|
"connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
|
||||||
"cors_plug": {:hex, :cors_plug, "2.0.3", "316f806d10316e6d10f09473f19052d20ba0a0ce2a1d910ddf57d663dac402ae", [:mix], [{:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ee4ae1418e6ce117fc42c2ba3e6cbdca4e95ecd2fe59a05ec6884ca16d469aea"},
|
"cors_plug": {:hex, :cors_plug, "2.0.3", "316f806d10316e6d10f09473f19052d20ba0a0ce2a1d910ddf57d663dac402ae", [:mix], [{:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ee4ae1418e6ce117fc42c2ba3e6cbdca4e95ecd2fe59a05ec6884ca16d469aea"},
|
||||||
"covertool": {:hex, :covertool, "2.0.6", "4a291b4e3449025b0595d8f44c8d7635d4f48f033be2ce88d22a329f36f94a91", [:rebar3], [], "hexpm", "5db3fcd82180d8ea4ad857d4d1ab21a8d31b5aee0d60d2f6c0f9e25a411d1e21"},
|
"covertool": {:hex, :covertool, "2.0.6", "4a291b4e3449025b0595d8f44c8d7635d4f48f033be2ce88d22a329f36f94a91", [:rebar3], [], "hexpm", "5db3fcd82180d8ea4ad857d4d1ab21a8d31b5aee0d60d2f6c0f9e25a411d1e21"},
|
||||||
"cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"},
|
"cowboy": {:hex, :cowboy, "2.13.0", "09d770dd5f6a22cc60c071f432cd7cb87776164527f205c5a6b0f24ff6b38990", [:make, :rebar3], [{:cowlib, ">= 2.14.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, ">= 1.8.0 and < 3.0.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "e724d3a70995025d654c1992c7b11dbfea95205c047d86ff9bf1cda92ddc5614"},
|
||||||
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
||||||
"cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"},
|
"cowlib": {:hex, :cowlib, "2.15.0", "3c97a318a933962d1c12b96ab7c1d728267d2c523c25a5b57b0f93392b6e9e25", [:make, :rebar3], [], "hexpm", "4f00c879a64b4fe7c8fcb42a4281925e9ffdb928820b03c3ad325a617e857532"},
|
||||||
"credo": {:hex, :credo, "1.7.12", "9e3c20463de4b5f3f23721527fcaf16722ec815e70ff6c60b86412c695d426c1", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8493d45c656c5427d9c729235b99d498bd133421f3e0a683e5c1b561471291e5"},
|
"credo": {:hex, :credo, "1.7.12", "9e3c20463de4b5f3f23721527fcaf16722ec815e70ff6c60b86412c695d426c1", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8493d45c656c5427d9c729235b99d498bd133421f3e0a683e5c1b561471291e5"},
|
||||||
"crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
|
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
|
||||||
"db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"},
|
"db_connection": {:hex, :db_connection, "2.8.0", "64fd82cfa6d8e25ec6660cea73e92a4cbc6a18b31343910427b702838c4b33b2", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "008399dae5eee1bf5caa6e86d204dcb44242c82b1ed5e22c881f2c34da201b15"},
|
||||||
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
|
"decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"},
|
||||||
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
|
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"},
|
||||||
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
|
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
|
||||||
"earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"},
|
"earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"},
|
||||||
"earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
|
"earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
|
||||||
"eblurhash": {:git, "https://github.com/zotonic/eblurhash.git", "bc37ceb426ef021ee9927fb249bb93f7059194ab", [ref: "bc37ceb426ef021ee9927fb249bb93f7059194ab"]},
|
"eblurhash": {:git, "https://github.com/zotonic/eblurhash.git", "bc37ceb426ef021ee9927fb249bb93f7059194ab", [ref: "bc37ceb426ef021ee9927fb249bb93f7059194ab"]},
|
||||||
"ecto": {:hex, :ecto, "3.11.2", "e1d26be989db350a633667c5cda9c3d115ae779b66da567c68c80cfb26a8c9ee", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3c38bca2c6f8d8023f2145326cc8a80100c3ffe4dcbd9842ff867f7fc6156c65"},
|
"ecto": {:hex, :ecto, "3.13.2", "7d0c0863f3fc8d71d17fc3ad3b9424beae13f02712ad84191a826c7169484f01", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "669d9291370513ff56e7b7e7081b7af3283d02e046cf3d403053c557894a0b3e"},
|
||||||
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
||||||
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.15", "0fc29dbae0e444a29bd6abeee4cf3c4c037e692a272478a234a1cc765077dbb1", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "b6127f3a5c6fc3d84895e4768cc7c199f22b48b67d6c99b13fbf4a374e73f039"},
|
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.8.8", "aa02529c97f69aed5722899f5dc6360128735a92dd169f23c5d50b1f7fdede08", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "> 0.16.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "04c63d92b141723ad6fed2e60a4b461ca00b3594d16df47bbc48f1f4534f2c49"},
|
||||||
"ecto_sql": {:hex, :ecto_sql, "3.11.3", "4eb7348ff8101fbc4e6bbc5a4404a24fecbe73a3372d16569526b0cf34ebc195", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e5f36e3d736b99c7fee3e631333b8394ade4bafe9d96d35669fca2d81c2be928"},
|
"ecto_sql": {:hex, :ecto_sql, "3.13.2", "a07d2461d84107b3d037097c822ffdd36ed69d1cf7c0f70e12a3d1decf04e2e1", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "539274ab0ecf1a0078a6a72ef3465629e4d6018a3028095dc90f60a19c371717"},
|
||||||
"eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"},
|
"eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"},
|
||||||
"elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"},
|
"elixir_make": {:hex, :elixir_make, "0.7.8", "505026f266552ee5aabca0b9f9c229cbb496c689537c9f922f3eb5431157efc7", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "7a71945b913d37ea89b06966e1342c85cfe549b15e6d6d081e8081c493062c07"},
|
||||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||||
|
|
@ -58,7 +58,7 @@
|
||||||
"floki": {:hex, :floki, "0.35.2", "87f8c75ed8654b9635b311774308b2760b47e9a579dabf2e4d5f1e1d42c39e0b", [:mix], [], "hexpm", "6b05289a8e9eac475f644f09c2e4ba7e19201fd002b89c28c1293e7bd16773d9"},
|
"floki": {:hex, :floki, "0.35.2", "87f8c75ed8654b9635b311774308b2760b47e9a579dabf2e4d5f1e1d42c39e0b", [:mix], [], "hexpm", "6b05289a8e9eac475f644f09c2e4ba7e19201fd002b89c28c1293e7bd16773d9"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
|
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
|
||||||
"gettext": {:hex, :gettext, "0.24.0", "6f4d90ac5f3111673cbefc4ebee96fe5f37a114861ab8c7b7d5b30a1108ce6d8", [:mix], [{:expo, "~> 0.5.1", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "bdf75cdfcbe9e4622dd18e034b227d77dd17f0f133853a1c73b97b3d6c770e8b"},
|
"gettext": {:hex, :gettext, "0.24.0", "6f4d90ac5f3111673cbefc4ebee96fe5f37a114861ab8c7b7d5b30a1108ce6d8", [:mix], [{:expo, "~> 0.5.1", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "bdf75cdfcbe9e4622dd18e034b227d77dd17f0f133853a1c73b97b3d6c770e8b"},
|
||||||
"gun": {:hex, :gun, "2.0.1", "160a9a5394800fcba41bc7e6d421295cf9a7894c2252c0678244948e3336ad73", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "a10bc8d6096b9502205022334f719cc9a08d9adcfbfc0dbee9ef31b56274a20b"},
|
"gun": {:hex, :gun, "2.2.0", "b8f6b7d417e277d4c2b0dc3c07dfdf892447b087f1cc1caff9c0f556b884e33d", [:make, :rebar3], [{:cowlib, ">= 2.15.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "76022700c64287feb4df93a1795cff6741b83fb37415c40c34c38d2a4645261a"},
|
||||||
"hackney": {:hex, :hackney, "1.18.2", "d7ff544ddae5e1cb49e9cf7fa4e356d7f41b283989a1c304bfc47a8cc1cf966f", [:rebar3], [{:certifi, "~>2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "af94d5c9f97857db257090a4a10e5426ecb6f4918aa5cc666798566ae14b65fd"},
|
"hackney": {:hex, :hackney, "1.18.2", "d7ff544ddae5e1cb49e9cf7fa4e356d7f41b283989a1c304bfc47a8cc1cf966f", [:rebar3], [{:certifi, "~>2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "af94d5c9f97857db257090a4a10e5426ecb6f4918aa5cc666798566ae14b65fd"},
|
||||||
"hpax": {:hex, :hpax, "0.2.0", "5a58219adcb75977b2edce5eb22051de9362f08236220c9e859a47111c194ff5", [:mix], [], "hexpm", "bea06558cdae85bed075e6c036993d43cd54d447f76d8190a8db0dc5893fa2f1"},
|
"hpax": {:hex, :hpax, "0.2.0", "5a58219adcb75977b2edce5eb22051de9362f08236220c9e859a47111c194ff5", [:mix], [], "hexpm", "bea06558cdae85bed075e6c036993d43cd54d447f76d8190a8db0dc5893fa2f1"},
|
||||||
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
|
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
|
||||||
|
|
@ -80,8 +80,8 @@
|
||||||
"meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"},
|
"meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"},
|
||||||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
||||||
"mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"},
|
"mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"},
|
||||||
"mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"},
|
"mimerl": {:hex, :mimerl, "1.4.0", "3882a5ca67fbbe7117ba8947f27643557adec38fa2307490c4c4207624cb213b", [:rebar3], [], "hexpm", "13af15f9f68c65884ecca3a3891d50a7b57d82152792f3e19d88650aa126b144"},
|
||||||
"mint": {:hex, :mint, "1.6.1", "065e8a5bc9bbd46a41099dfea3e0656436c5cbcb6e741c80bd2bad5cd872446f", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "4fc518dcc191d02f433393a72a7ba3f6f94b101d094cb6bf532ea54c89423780"},
|
"mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"},
|
||||||
"mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"},
|
"mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"},
|
||||||
"mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"},
|
"mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"},
|
||||||
"mogrify": {:hex, :mogrify, "0.9.3", "238c782f00271dace01369ad35ae2e9dd020feee3443b9299ea5ea6bed559841", [:mix], [], "hexpm", "0189b1e1de27455f2b9ae8cf88239cefd23d38de9276eb5add7159aea51731e6"},
|
"mogrify": {:hex, :mogrify, "0.9.3", "238c782f00271dace01369ad35ae2e9dd020feee3443b9299ea5ea6bed559841", [:mix], [], "hexpm", "0189b1e1de27455f2b9ae8cf88239cefd23d38de9276eb5add7159aea51731e6"},
|
||||||
|
|
@ -92,7 +92,7 @@
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"},
|
||||||
"nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"},
|
"nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"},
|
||||||
"nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]},
|
"nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]},
|
||||||
"oban": {:hex, :oban, "2.18.3", "1608c04f8856c108555c379f2f56bc0759149d35fa9d3b825cb8a6769f8ae926", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "36ca6ca84ef6518f9c2c759ea88efd438a3c81d667ba23b02b062a0aa785475e"},
|
"oban": {:hex, :oban, "2.19.4", "045adb10db1161dceb75c254782f97cdc6596e7044af456a59decb6d06da73c1", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:igniter, "~> 0.5", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5fcc6219e6464525b808d97add17896e724131f498444a292071bf8991c99f97"},
|
||||||
"oban_live_dashboard": {:hex, :oban_live_dashboard, "0.1.1", "8aa4ceaf381c818f7d5c8185cc59942b8ac82ef0cf559881aacf8d3f8ac7bdd3", [:mix], [{:oban, "~> 2.15", [hex: :oban, repo: "hexpm", optional: false]}, {:phoenix_live_dashboard, "~> 0.7", [hex: :phoenix_live_dashboard, repo: "hexpm", optional: false]}], "hexpm", "16dc4ce9c9a95aa2e655e35ed4e675652994a8def61731a18af85e230e1caa63"},
|
"oban_live_dashboard": {:hex, :oban_live_dashboard, "0.1.1", "8aa4ceaf381c818f7d5c8185cc59942b8ac82ef0cf559881aacf8d3f8ac7bdd3", [:mix], [{:oban, "~> 2.15", [hex: :oban, repo: "hexpm", optional: false]}, {:phoenix_live_dashboard, "~> 0.7", [hex: :phoenix_live_dashboard, repo: "hexpm", optional: false]}], "hexpm", "16dc4ce9c9a95aa2e655e35ed4e675652994a8def61731a18af85e230e1caa63"},
|
||||||
"octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"},
|
"octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"},
|
||||||
"open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"},
|
"open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"},
|
||||||
|
|
@ -108,13 +108,13 @@
|
||||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"},
|
"phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"},
|
||||||
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
||||||
"phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"},
|
"phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"},
|
||||||
"plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"},
|
"plug": {:hex, :plug, "1.18.1", "5067f26f7745b7e31bc3368bc1a2b818b9779faa959b49c934c17730efc911cf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "57a57db70df2b422b564437d2d33cf8d33cd16339c1edb190cd11b1a3a546cc2"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.7.4", "729c752d17cf364e2b8da5bdb34fb5804f56251e88bb602aff48ae0bd8673d11", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "9b85632bd7012615bae0a5d70084deb1b25d2bcbb32cab82d1e9a1e023168aa3"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
|
"plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"},
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
||||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"},
|
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"},
|
||||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
||||||
"postgrex": {:hex, :postgrex, "0.17.5", "0483d054938a8dc069b21bdd636bf56c487404c241ce6c319c1f43588246b281", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "50b8b11afbb2c4095a3ba675b4f055c416d0f3d7de6633a595fc131a828a67eb"},
|
"postgrex": {:hex, :postgrex, "0.20.0", "363ed03ab4757f6bc47942eff7720640795eb557e1935951c1626f0d303a3aed", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "d36ef8b36f323d29505314f704e21a1a038e2dc387c6409ee0cd24144e187c0f"},
|
||||||
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
||||||
"prom_ex": {:hex, :prom_ex, "1.9.0", "63e6dda6c05cdeec1f26c48443dcc38ffd2118b3665ae8d2bd0e5b79f2aea03e", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.2", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.15", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.3", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5 or ~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "01f3d4f69ec93068219e686cc65e58a29c42bea5429a8ff4e2121f19db178ee6"},
|
"prom_ex": {:hex, :prom_ex, "1.9.0", "63e6dda6c05cdeec1f26c48443dcc38ffd2118b3665ae8d2bd0e5b79f2aea03e", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.2", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.15", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.3", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5 or ~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "01f3d4f69ec93068219e686cc65e58a29c42bea5429a8ff4e2121f19db178ee6"},
|
||||||
"prometheus": {:hex, :prometheus, "4.10.0", "792adbf0130ff61b5fa8826f013772af24b6e57b984445c8d602c8a0355704a1", [:mix, :rebar3], [{:quantile_estimator, "~> 0.2.1", [hex: :quantile_estimator, repo: "hexpm", optional: false]}], "hexpm", "2a99bb6dce85e238c7236fde6b0064f9834dc420ddbd962aac4ea2a3c3d59384"},
|
"prometheus": {:hex, :prometheus, "4.10.0", "792adbf0130ff61b5fa8826f013772af24b6e57b984445c8d602c8a0355704a1", [:mix, :rebar3], [{:quantile_estimator, "~> 0.2.1", [hex: :quantile_estimator, repo: "hexpm", optional: false]}], "hexpm", "2a99bb6dce85e238c7236fde6b0064f9834dc420ddbd962aac4ea2a3c3d59384"},
|
||||||
|
|
@ -124,7 +124,7 @@
|
||||||
"prometheus_phx": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/prometheus-phx.git", "9cd8f248c9381ffedc799905050abce194a97514", [branch: "no-logging"]},
|
"prometheus_phx": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/prometheus-phx.git", "9cd8f248c9381ffedc799905050abce194a97514", [branch: "no-logging"]},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm", "0273a6483ccb936d79ca19b0ab629aef0dba958697c94782bb728b920dfc6a79"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm", "0273a6483ccb936d79ca19b0ab629aef0dba958697c94782bb728b920dfc6a79"},
|
||||||
"quantile_estimator": {:hex, :quantile_estimator, "0.2.1", "ef50a361f11b5f26b5f16d0696e46a9e4661756492c981f7b2229ef42ff1cd15", [:rebar3], [], "hexpm", "282a8a323ca2a845c9e6f787d166348f776c1d4a41ede63046d72d422e3da946"},
|
"quantile_estimator": {:hex, :quantile_estimator, "0.2.1", "ef50a361f11b5f26b5f16d0696e46a9e4661756492c981f7b2229ef42ff1cd15", [:rebar3], [], "hexpm", "282a8a323ca2a845c9e6f787d166348f776c1d4a41ede63046d72d422e3da946"},
|
||||||
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
|
"ranch": {:hex, :ranch, "2.2.0", "25528f82bc8d7c6152c57666ca99ec716510fe0925cb188172f41ce93117b1b0", [:make, :rebar3], [], "hexpm", "fa0b99a1780c80218a4197a59ea8d3bdae32fbff7e88527d7d8a4787eff4f8e7"},
|
||||||
"recon": {:hex, :recon, "2.5.4", "05dd52a119ee4059fa9daa1ab7ce81bc7a8161a2f12e9d42e9d551ffd2ba901c", [:mix, :rebar3], [], "hexpm", "e9ab01ac7fc8572e41eb59385efeb3fb0ff5bf02103816535bacaedf327d0263"},
|
"recon": {:hex, :recon, "2.5.4", "05dd52a119ee4059fa9daa1ab7ce81bc7a8161a2f12e9d42e9d551ffd2ba901c", [:mix, :rebar3], [], "hexpm", "e9ab01ac7fc8572e41eb59385efeb3fb0ff5bf02103816535bacaedf327d0263"},
|
||||||
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8", [ref: "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8"]},
|
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8", [ref: "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8"]},
|
||||||
"rustler": {:hex, :rustler, "0.30.0", "cefc49922132b072853fa9b0ca4dc2ffcb452f68fb73b779042b02d545e097fb", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "9ef1abb6a7dda35c47cfc649e6a5a61663af6cf842a55814a554a84607dee389"},
|
"rustler": {:hex, :rustler, "0.30.0", "cefc49922132b072853fa9b0ca4dc2ffcb452f68fb73b779042b02d545e097fb", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:toml, "~> 0.6", [hex: :toml, repo: "hexpm", optional: false]}], "hexpm", "9ef1abb6a7dda35c47cfc649e6a5a61663af6cf842a55814a554a84607dee389"},
|
||||||
|
|
@ -134,19 +134,19 @@
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
|
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
|
||||||
"swoosh": {:hex, :swoosh, "1.16.9", "20c6a32ea49136a4c19f538e27739bb5070558c0fa76b8a95f4d5d5ca7d319a1", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.0", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "878b1a7a6c10ebbf725a3349363f48f79c5e3d792eb621643b0d276a38acc0a6"},
|
"swoosh": {:hex, :swoosh, "1.16.9", "20c6a32ea49136a4c19f538e27739bb5070558c0fa76b8a95f4d5d5ca7d319a1", [:mix], [{:bandit, ">= 1.0.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:cowboy, "~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:ex_aws, "~> 2.1", [hex: :ex_aws, repo: "hexpm", optional: true]}, {:finch, "~> 0.6", [hex: :finch, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13 or ~> 1.0", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mua, "~> 0.2.0", [hex: :mua, repo: "hexpm", optional: true]}, {:multipart, "~> 0.4", [hex: :multipart, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:req, "~> 0.5 or ~> 1.0", [hex: :req, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "878b1a7a6c10ebbf725a3349363f48f79c5e3d792eb621643b0d276a38acc0a6"},
|
||||||
"syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"},
|
"syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"},
|
||||||
"table_rex": {:hex, :table_rex, "4.0.0", "3c613a68ebdc6d4d1e731bc973c233500974ec3993c99fcdabb210407b90959b", [:mix], [], "hexpm", "c35c4d5612ca49ebb0344ea10387da4d2afe278387d4019e4d8111e815df8f55"},
|
"table_rex": {:hex, :table_rex, "4.1.0", "fbaa8b1ce154c9772012bf445bfb86b587430fb96f3b12022d3f35ee4a68c918", [:mix], [], "hexpm", "95932701df195d43bc2d1c6531178fc8338aa8f38c80f098504d529c43bc2601"},
|
||||||
"telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"},
|
"telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"},
|
||||||
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.2", "2caabe9344ec17eafe5403304771c3539f3b6e2f7fb6a6f602558c825d0d0bfb", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9b43db0dc33863930b9ef9d27137e78974756f5f198cae18409970ed6fa5b561"},
|
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.2", "2caabe9344ec17eafe5403304771c3539f3b6e2f7fb6a6f602558c825d0d0bfb", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9b43db0dc33863930b9ef9d27137e78974756f5f198cae18409970ed6fa5b561"},
|
||||||
"telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.2.0", "b583c3f18508f5c5561b674d16cf5d9afd2ea3c04505b7d92baaeac93c1b8260", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "9cba950e1c4733468efbe3f821841f34ac05d28e7af7798622f88ecdbbe63ea3"},
|
"telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.2.0", "b583c3f18508f5c5561b674d16cf5d9afd2ea3c04505b7d92baaeac93c1b8260", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "9cba950e1c4733468efbe3f821841f34ac05d28e7af7798622f88ecdbbe63ea3"},
|
||||||
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
|
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
|
||||||
"tesla": {:hex, :tesla, "1.11.0", "81b2b10213dddb27105ec6102d9eb0cc93d7097a918a0b1594f2dfd1a4601190", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "b83ab5d4c2d202e1ea2b7e17a49f788d49a699513d7c4f08f2aef2c281be69db"},
|
"tesla": {:hex, :tesla, "1.15.3", "3a2b5c37f09629b8dcf5d028fbafc9143c0099753559d7fe567eaabfbd9b8663", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.13", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, ">= 1.0.0", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.21", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "4.4.2", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:mox, "~> 1.0", [hex: :mox, repo: "hexpm", optional: true]}, {:msgpax, "~> 2.3", [hex: :msgpax, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "98bb3d4558abc67b92fb7be4cd31bb57ca8d80792de26870d362974b58caeda7"},
|
||||||
"thousand_island": {:hex, :thousand_island, "1.3.5", "6022b6338f1635b3d32406ff98d68b843ba73b3aa95cfc27154223244f3a6ca5", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2be6954916fdfe4756af3239fb6b6d75d0b8063b5df03ba76fd8a4c87849e180"},
|
"thousand_island": {:hex, :thousand_island, "1.3.5", "6022b6338f1635b3d32406ff98d68b843ba73b3aa95cfc27154223244f3a6ca5", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2be6954916fdfe4756af3239fb6b6d75d0b8063b5df03ba76fd8a4c87849e180"},
|
||||||
"timex": {:hex, :timex, "3.7.7", "3ed093cae596a410759104d878ad7b38e78b7c2151c6190340835515d4a46b8a", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "0ec4b09f25fe311321f9fc04144a7e3affe48eb29481d7a5583849b6c4dfa0a7"},
|
"timex": {:hex, :timex, "3.7.7", "3ed093cae596a410759104d878ad7b38e78b7c2151c6190340835515d4a46b8a", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "0ec4b09f25fe311321f9fc04144a7e3affe48eb29481d7a5583849b6c4dfa0a7"},
|
||||||
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
|
"toml": {:hex, :toml, "0.7.0", "fbcd773caa937d0c7a02c301a1feea25612720ac3fa1ccb8bfd9d30d822911de", [:mix], [], "hexpm", "0690246a2478c1defd100b0c9b89b4ea280a22be9a7b313a8a058a2408a2fa70"},
|
||||||
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
||||||
"tzdata": {:hex, :tzdata, "1.0.5", "69f1ee029a49afa04ad77801febaf69385f3d3e3d1e4b56b9469025677b89a28", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "55519aa2a99e5d2095c1e61cc74c9be69688f8ab75c27da724eb8279ff402a5a"},
|
"tzdata": {:hex, :tzdata, "1.0.5", "69f1ee029a49afa04ad77801febaf69385f3d3e3d1e4b56b9469025677b89a28", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "55519aa2a99e5d2095c1e61cc74c9be69688f8ab75c27da724eb8279ff402a5a"},
|
||||||
"ueberauth": {:hex, :ueberauth, "0.10.7", "5a31cbe11e7ce5c7484d745dc9e1f11948e89662f8510d03c616de03df581ebd", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "0bccf73e2ffd6337971340832947ba232877aa8122dba4c95be9f729c8987377"},
|
"ueberauth": {:hex, :ueberauth, "0.10.7", "5a31cbe11e7ce5c7484d745dc9e1f11948e89662f8510d03c616de03df581ebd", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "0bccf73e2ffd6337971340832947ba232877aa8122dba4c95be9f729c8987377"},
|
||||||
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
|
"unicode_util_compat": {:hex, :unicode_util_compat, "0.7.1", "a48703a25c170eedadca83b11e88985af08d35f37c6f664d6dcfb106a97782fc", [:rebar3], [], "hexpm", "b3a917854ce3ae233619744ad1e0102e05673136776fb2fa76234f3e03b23642"},
|
||||||
"unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"},
|
"unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"},
|
||||||
"vix": {:hex, :vix, "0.26.0", "027f10b6969b759318be84bd0bd8c88af877445e4e41cf96a0460392cea5399c", [:make, :mix], [{:castore, "~> 1.0 or ~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:cc_precompiler, "~> 0.2 or ~> 0.1.4", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8 or ~> 0.7.3", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:kino, "~> 0.7", [hex: :kino, repo: "hexpm", optional: true]}], "hexpm", "71b0a79ae7f199cacfc8e679b0e4ba25ee47dc02e182c5b9097efb29fbe14efd"},
|
"vix": {:hex, :vix, "0.26.0", "027f10b6969b759318be84bd0bd8c88af877445e4e41cf96a0460392cea5399c", [:make, :mix], [{:castore, "~> 1.0 or ~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:cc_precompiler, "~> 0.2 or ~> 0.1.4", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8 or ~> 0.7.3", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:kino, "~> 0.7", [hex: :kino, repo: "hexpm", optional: true]}], "hexpm", "71b0a79ae7f199cacfc8e679b0e4ba25ee47dc02e182c5b9097efb29fbe14efd"},
|
||||||
"web_push_encryption": {:hex, :web_push_encryption, "0.3.1", "76d0e7375142dfee67391e7690e89f92578889cbcf2879377900b5620ee4708d", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.11.1", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "4f82b2e57622fb9337559058e8797cb0df7e7c9790793bdc4e40bc895f70e2a2"},
|
"web_push_encryption": {:hex, :web_push_encryption, "0.3.1", "76d0e7375142dfee67391e7690e89f92578889cbcf2879377900b5620ee4708d", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.11.1", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "4f82b2e57622fb9337559058e8797cb0df7e7c9790793bdc4e40bc895f70e2a2"},
|
||||||
|
|
|
||||||
|
|
@ -42,9 +42,10 @@ defmodule Mix.Tasks.Pleroma.AppTest do
|
||||||
|
|
||||||
test "with errors" do
|
test "with errors" do
|
||||||
Mix.Tasks.Pleroma.App.run(["create"])
|
Mix.Tasks.Pleroma.App.run(["create"])
|
||||||
{:mix_shell, :error, ["Creating failed:"]}
|
|
||||||
{:mix_shell, :error, ["name: can't be blank"]}
|
assert_receive {:mix_shell, :error, ["Creating failed:"]}
|
||||||
{:mix_shell, :error, ["redirect_uris: can't be blank"]}
|
assert_receive {:mix_shell, :error, ["name: can't be blank"]}
|
||||||
|
assert_receive {:mix_shell, :error, ["redirect_uris: can't be blank"]}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp assert_app(name, redirect, scopes) do
|
defp assert_app(name, redirect, scopes) do
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,9 @@ defmodule Pleroma.Emoji.PackTest do
|
||||||
)
|
)
|
||||||
|
|
||||||
setup do
|
setup do
|
||||||
|
# Reload emoji to ensure a clean state
|
||||||
|
Emoji.reload()
|
||||||
|
|
||||||
pack_path = Path.join(@emoji_path, "dump_pack")
|
pack_path = Path.join(@emoji_path, "dump_pack")
|
||||||
File.mkdir(pack_path)
|
File.mkdir(pack_path)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,4 +14,133 @@ defmodule Pleroma.HashtagTest do
|
||||||
assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors
|
assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "search_hashtags" do
|
||||||
|
test "searches hashtags by partial match" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("car")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("racecar")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("nascar")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("bicycle")
|
||||||
|
|
||||||
|
results = Hashtag.search("car")
|
||||||
|
assert "car" in results
|
||||||
|
assert "racecar" in results
|
||||||
|
assert "nascar" in results
|
||||||
|
refute "bicycle" in results
|
||||||
|
|
||||||
|
results = Hashtag.search("race")
|
||||||
|
assert "racecar" in results
|
||||||
|
refute "car" in results
|
||||||
|
refute "nascar" in results
|
||||||
|
refute "bicycle" in results
|
||||||
|
|
||||||
|
results = Hashtag.search("nonexistent")
|
||||||
|
assert results == []
|
||||||
|
end
|
||||||
|
|
||||||
|
test "searches hashtags by multiple words in query" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("computer")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("desktop")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("phone")
|
||||||
|
|
||||||
|
# Search for "new computer" - should return "computer"
|
||||||
|
results = Hashtag.search("new computer")
|
||||||
|
assert "computer" in results
|
||||||
|
refute "laptop" in results
|
||||||
|
refute "desktop" in results
|
||||||
|
refute "phone" in results
|
||||||
|
|
||||||
|
# Search for "computer laptop" - should return both
|
||||||
|
results = Hashtag.search("computer laptop")
|
||||||
|
assert "computer" in results
|
||||||
|
assert "laptop" in results
|
||||||
|
refute "desktop" in results
|
||||||
|
refute "phone" in results
|
||||||
|
|
||||||
|
# Search for "new phone" - should return "phone"
|
||||||
|
results = Hashtag.search("new phone")
|
||||||
|
assert "phone" in results
|
||||||
|
refute "computer" in results
|
||||||
|
refute "laptop" in results
|
||||||
|
refute "desktop" in results
|
||||||
|
end
|
||||||
|
|
||||||
|
test "supports pagination" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("alpha")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("beta")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("gamma")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("delta")
|
||||||
|
|
||||||
|
results = Hashtag.search("a", limit: 2)
|
||||||
|
assert length(results) == 2
|
||||||
|
|
||||||
|
results = Hashtag.search("a", limit: 2, offset: 1)
|
||||||
|
assert length(results) == 2
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles matching many search terms" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("computer")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("phone")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("tablet")
|
||||||
|
|
||||||
|
results = Hashtag.search("new fast computer laptop phone tablet device")
|
||||||
|
assert "computer" in results
|
||||||
|
assert "laptop" in results
|
||||||
|
assert "phone" in results
|
||||||
|
assert "tablet" in results
|
||||||
|
end
|
||||||
|
|
||||||
|
test "ranks results by match quality" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("my_computer")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("computer_science")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("computer")
|
||||||
|
|
||||||
|
results = Hashtag.search("computer")
|
||||||
|
|
||||||
|
# Exact match first
|
||||||
|
assert Enum.at(results, 0) == "computer"
|
||||||
|
|
||||||
|
# Prefix match would be next
|
||||||
|
assert Enum.at(results, 1) == "computer_science"
|
||||||
|
|
||||||
|
# worst match is last
|
||||||
|
assert Enum.at(results, 2) == "my_computer"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "prioritizes shorter names when ranking is equal" do
|
||||||
|
# Create hashtags with same ranking but different lengths
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("car")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("racecar")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("nascar")
|
||||||
|
|
||||||
|
# Search for "car" - shorter names should come first
|
||||||
|
results = Hashtag.search("car")
|
||||||
|
# Shortest exact match first
|
||||||
|
assert Enum.at(results, 0) == "car"
|
||||||
|
assert "racecar" in results
|
||||||
|
assert "nascar" in results
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles hashtag symbols in search query" do
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("computer")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
|
||||||
|
{:ok, _} = Hashtag.get_or_create_by_name("phone")
|
||||||
|
|
||||||
|
results_with_hash = Hashtag.search("#computer #laptop")
|
||||||
|
results_without_hash = Hashtag.search("computer laptop")
|
||||||
|
|
||||||
|
assert results_with_hash == results_without_hash
|
||||||
|
|
||||||
|
results_mixed = Hashtag.search("#computer laptop #phone")
|
||||||
|
assert "computer" in results_mixed
|
||||||
|
assert "laptop" in results_mixed
|
||||||
|
assert "phone" in results_mixed
|
||||||
|
|
||||||
|
results_only_hash = Hashtag.search("#computer")
|
||||||
|
results_no_hash = Hashtag.search("computer")
|
||||||
|
assert results_only_hash == results_no_hash
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,9 @@ defmodule Pleroma.HTTPTest do
|
||||||
|
|
||||||
%{method: :post, url: "http://example.com/world"} ->
|
%{method: :post, url: "http://example.com/world"} ->
|
||||||
%Tesla.Env{status: 200, body: "world"}
|
%Tesla.Env{status: 200, body: "world"}
|
||||||
|
|
||||||
|
%{method: :get, url: "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"} ->
|
||||||
|
%Tesla.Env{status: 200, body: "emoji data"}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
|
@ -67,4 +70,20 @@ defmodule Pleroma.HTTPTest do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "URL encoding properly encodes URLs with spaces" do
|
||||||
|
clear_config(:test_url_encoding, true)
|
||||||
|
|
||||||
|
url_with_space = "https://example.com/emoji/Pack 1/koronebless.png?foo=bar baz"
|
||||||
|
|
||||||
|
{:ok, result} = HTTP.get(url_with_space)
|
||||||
|
|
||||||
|
assert result.status == 200
|
||||||
|
|
||||||
|
properly_encoded_url = "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"
|
||||||
|
|
||||||
|
{:ok, result} = HTTP.get(properly_encoded_url)
|
||||||
|
|
||||||
|
assert result.status == 200
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Instances.InstanceTest do
|
defmodule Pleroma.Instances.InstanceTest do
|
||||||
alias Pleroma.Instances
|
|
||||||
alias Pleroma.Instances.Instance
|
alias Pleroma.Instances.Instance
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
|
@ -13,8 +12,6 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
|
|
||||||
|
|
||||||
describe "set_reachable/1" do
|
describe "set_reachable/1" do
|
||||||
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
||||||
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
|
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
|
||||||
|
|
@ -30,6 +27,32 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
||||||
refute instance.unreachable_since
|
refute instance.unreachable_since
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "cancels all ReachabilityWorker jobs for the domain" do
|
||||||
|
domain = "cancelme.example.org"
|
||||||
|
insert(:instance, host: domain, unreachable_since: NaiveDateTime.utc_now())
|
||||||
|
|
||||||
|
# Insert a ReachabilityWorker job for this domain, scheduled 5 minutes in the future
|
||||||
|
scheduled_at = DateTime.add(DateTime.utc_now(), 300, :second)
|
||||||
|
|
||||||
|
{:ok, job} =
|
||||||
|
Pleroma.Workers.ReachabilityWorker.new(
|
||||||
|
%{"domain" => domain, "phase" => "phase_1min", "attempt" => 1},
|
||||||
|
scheduled_at: scheduled_at
|
||||||
|
)
|
||||||
|
|> Oban.insert()
|
||||||
|
|
||||||
|
# Ensure the job is present
|
||||||
|
job = Pleroma.Repo.get(Oban.Job, job.id)
|
||||||
|
assert job
|
||||||
|
|
||||||
|
# Call set_reachable, which should delete the job
|
||||||
|
assert {:ok, _} = Instance.set_reachable(domain)
|
||||||
|
|
||||||
|
# Reload the job and assert it is deleted
|
||||||
|
job = Pleroma.Repo.get(Oban.Job, job.id)
|
||||||
|
refute job
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "set_unreachable/1" do
|
describe "set_unreachable/1" do
|
||||||
|
|
@ -144,7 +167,11 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "Doesn't scrapes unreachable instances" do
|
test "Doesn't scrapes unreachable instances" do
|
||||||
instance = insert(:instance, unreachable_since: Instances.reachability_datetime_threshold())
|
instance =
|
||||||
|
insert(:instance,
|
||||||
|
unreachable_since: NaiveDateTime.utc_now() |> NaiveDateTime.add(-:timer.hours(24))
|
||||||
|
)
|
||||||
|
|
||||||
url = "https://" <> instance.host
|
url = "https://" <> instance.host
|
||||||
|
|
||||||
assert capture_log(fn -> assert nil == Instance.get_or_update_favicon(URI.parse(url)) end) =~
|
assert capture_log(fn -> assert nil == Instance.get_or_update_favicon(URI.parse(url)) end) =~
|
||||||
|
|
@ -212,14 +239,44 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
test "delete_users_and_activities/1 schedules a job to delete the instance and users" do
|
test "delete/1 schedules a job to delete the instance and users" do
|
||||||
insert(:user, nickname: "mario@mushroom.kingdom", name: "Mario")
|
insert(:user, nickname: "mario@mushroom.kingdom", name: "Mario")
|
||||||
|
|
||||||
{:ok, _job} = Instance.delete_users_and_activities("mushroom.kingdom")
|
{:ok, _job} = Instance.delete("mushroom.kingdom")
|
||||||
|
|
||||||
assert_enqueued(
|
assert_enqueued(
|
||||||
worker: Pleroma.Workers.DeleteWorker,
|
worker: Pleroma.Workers.DeleteWorker,
|
||||||
args: %{"op" => "delete_instance", "host" => "mushroom.kingdom"}
|
args: %{"op" => "delete_instance", "host" => "mushroom.kingdom"}
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "check_unreachable/1" do
|
||||||
|
test "schedules a ReachabilityWorker job for the given domain" do
|
||||||
|
domain = "test.example.com"
|
||||||
|
|
||||||
|
# Call check_unreachable
|
||||||
|
assert {:ok, _job} = Instance.check_unreachable(domain)
|
||||||
|
|
||||||
|
# Verify that a ReachabilityWorker job was scheduled
|
||||||
|
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
|
||||||
|
assert length(jobs) == 1
|
||||||
|
[job] = jobs
|
||||||
|
assert job.args["domain"] == domain
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles multiple calls for the same domain (uniqueness enforced)" do
|
||||||
|
domain = "duplicate.example.com"
|
||||||
|
|
||||||
|
assert {:ok, _job1} = Instance.check_unreachable(domain)
|
||||||
|
|
||||||
|
# Second call for the same domain
|
||||||
|
assert {:ok, %Oban.Job{conflict?: true}} = Instance.check_unreachable(domain)
|
||||||
|
|
||||||
|
# Should only have one job due to uniqueness
|
||||||
|
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
|
||||||
|
assert length(jobs) == 1
|
||||||
|
[job] = jobs
|
||||||
|
assert job.args["domain"] == domain
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -6,74 +6,42 @@ defmodule Pleroma.InstancesTest do
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
|
|
||||||
|
|
||||||
describe "reachable?/1" do
|
describe "reachable?/1" do
|
||||||
test "returns `true` for host / url with unknown reachability status" do
|
test "returns `true` for host / url with unknown reachability status" do
|
||||||
assert Instances.reachable?("unknown.site")
|
assert Instances.reachable?("unknown.site")
|
||||||
assert Instances.reachable?("http://unknown.site")
|
assert Instances.reachable?("http://unknown.site")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns `false` for host / url marked unreachable for at least `reachability_datetime_threshold()`" do
|
|
||||||
host = "consistently-unreachable.name"
|
|
||||||
Instances.set_consistently_unreachable(host)
|
|
||||||
|
|
||||||
refute Instances.reachable?(host)
|
|
||||||
refute Instances.reachable?("http://#{host}/path")
|
|
||||||
end
|
|
||||||
|
|
||||||
test "returns `true` for host / url marked unreachable for less than `reachability_datetime_threshold()`" do
|
|
||||||
url = "http://eventually-unreachable.name/path"
|
|
||||||
|
|
||||||
Instances.set_unreachable(url)
|
|
||||||
|
|
||||||
assert Instances.reachable?(url)
|
|
||||||
assert Instances.reachable?(URI.parse(url).host)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "raises FunctionClauseError exception on non-binary input" do
|
|
||||||
assert_raise FunctionClauseError, fn -> Instances.reachable?(nil) end
|
|
||||||
assert_raise FunctionClauseError, fn -> Instances.reachable?(1) end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "filter_reachable/1" do
|
describe "filter_reachable/1" do
|
||||||
setup do
|
setup do
|
||||||
host = "consistently-unreachable.name"
|
unreachable_host = "consistently-unreachable.name"
|
||||||
url1 = "http://eventually-unreachable.com/path"
|
reachable_host = "http://domain.com/path"
|
||||||
url2 = "http://domain.com/path"
|
|
||||||
|
|
||||||
Instances.set_consistently_unreachable(host)
|
Instances.set_unreachable(unreachable_host)
|
||||||
Instances.set_unreachable(url1)
|
|
||||||
|
|
||||||
result = Instances.filter_reachable([host, url1, url2, nil])
|
result = Instances.filter_reachable([unreachable_host, reachable_host, nil])
|
||||||
%{result: result, url1: url1, url2: url2}
|
%{result: result, reachable_host: reachable_host, unreachable_host: unreachable_host}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns a map with keys containing 'not marked consistently unreachable' elements of supplied list",
|
test "returns a list of only reachable elements",
|
||||||
%{result: result, url1: url1, url2: url2} do
|
%{result: result, reachable_host: reachable_host} do
|
||||||
assert is_map(result)
|
assert is_list(result)
|
||||||
assert Enum.sort([url1, url2]) == result |> Map.keys() |> Enum.sort()
|
assert [reachable_host] == result
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns a map with `unreachable_since` values for keys",
|
test "returns an empty list when provided no data" do
|
||||||
%{result: result, url1: url1, url2: url2} do
|
assert [] == Instances.filter_reachable([])
|
||||||
assert is_map(result)
|
assert [] == Instances.filter_reachable([nil])
|
||||||
assert %NaiveDateTime{} = result[url1]
|
|
||||||
assert is_nil(result[url2])
|
|
||||||
end
|
|
||||||
|
|
||||||
test "returns an empty map for empty list or list containing no hosts / url" do
|
|
||||||
assert %{} == Instances.filter_reachable([])
|
|
||||||
assert %{} == Instances.filter_reachable([nil])
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "set_reachable/1" do
|
describe "set_reachable/1" do
|
||||||
test "sets unreachable url or host reachable" do
|
test "sets unreachable url or host reachable" do
|
||||||
host = "domain.com"
|
host = "domain.com"
|
||||||
Instances.set_consistently_unreachable(host)
|
Instances.set_unreachable(host)
|
||||||
refute Instances.reachable?(host)
|
refute Instances.reachable?(host)
|
||||||
|
|
||||||
Instances.set_reachable(host)
|
Instances.set_reachable(host)
|
||||||
|
|
@ -103,22 +71,68 @@ defmodule Pleroma.InstancesTest do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "set_consistently_unreachable/1" do
|
describe "check_all_unreachable/0" do
|
||||||
test "sets reachable url or host unreachable" do
|
test "schedules ReachabilityWorker jobs for all unreachable instances" do
|
||||||
url = "http://domain.com?q="
|
domain1 = "unreachable1.example.com"
|
||||||
assert Instances.reachable?(url)
|
domain2 = "unreachable2.example.com"
|
||||||
|
domain3 = "unreachable3.example.com"
|
||||||
|
|
||||||
Instances.set_consistently_unreachable(url)
|
Instances.set_unreachable(domain1)
|
||||||
refute Instances.reachable?(url)
|
Instances.set_unreachable(domain2)
|
||||||
|
Instances.set_unreachable(domain3)
|
||||||
|
|
||||||
|
Instances.check_all_unreachable()
|
||||||
|
|
||||||
|
# Verify that ReachabilityWorker jobs were scheduled for all unreachable domains
|
||||||
|
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
|
||||||
|
assert length(jobs) == 3
|
||||||
|
|
||||||
|
domains = Enum.map(jobs, & &1.args["domain"])
|
||||||
|
assert domain1 in domains
|
||||||
|
assert domain2 in domains
|
||||||
|
assert domain3 in domains
|
||||||
end
|
end
|
||||||
|
|
||||||
test "keeps unreachable url or host unreachable" do
|
test "does not schedule jobs for reachable instances" do
|
||||||
host = "site.name"
|
unreachable_domain = "unreachable.example.com"
|
||||||
Instances.set_consistently_unreachable(host)
|
reachable_domain = "reachable.example.com"
|
||||||
refute Instances.reachable?(host)
|
|
||||||
|
|
||||||
Instances.set_consistently_unreachable(host)
|
Instances.set_unreachable(unreachable_domain)
|
||||||
refute Instances.reachable?(host)
|
Instances.set_reachable(reachable_domain)
|
||||||
|
|
||||||
|
Instances.check_all_unreachable()
|
||||||
|
|
||||||
|
# Verify that only one job was scheduled (for the unreachable domain)
|
||||||
|
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
|
||||||
|
assert length(jobs) == 1
|
||||||
|
[job] = jobs
|
||||||
|
assert job.args["domain"] == unreachable_domain
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "delete_all_unreachable/0 schedules DeleteWorker jobs for all unreachable instances" do
|
||||||
|
domain1 = "unreachable1.example.com"
|
||||||
|
domain2 = "unreachable2.example.com"
|
||||||
|
domain3 = "unreachable3.example.com"
|
||||||
|
|
||||||
|
Instances.set_unreachable(domain1)
|
||||||
|
Instances.set_unreachable(domain2)
|
||||||
|
Instances.set_unreachable(domain3)
|
||||||
|
|
||||||
|
Instances.delete_all_unreachable()
|
||||||
|
|
||||||
|
# Verify that DeleteWorker jobs were scheduled for all unreachable domains
|
||||||
|
jobs = all_enqueued(worker: Pleroma.Workers.DeleteWorker)
|
||||||
|
assert length(jobs) == 3
|
||||||
|
|
||||||
|
domains = Enum.map(jobs, & &1.args["host"])
|
||||||
|
assert domain1 in domains
|
||||||
|
assert domain2 in domains
|
||||||
|
assert domain3 in domains
|
||||||
|
|
||||||
|
# Verify all jobs are delete_instance operations
|
||||||
|
Enum.each(jobs, fn job ->
|
||||||
|
assert job.args["op"] == "delete_instance"
|
||||||
|
end)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.Object.FetcherTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Instances
|
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||||
|
|
@ -250,17 +249,6 @@ defmodule Pleroma.Object.FetcherTest do
|
||||||
result = Fetcher.fetch_object_from_id("https://example.com/objects/no-content-type")
|
result = Fetcher.fetch_object_from_id("https://example.com/objects/no-content-type")
|
||||||
assert {:fetch, {:error, nil}} = result
|
assert {:fetch, {:error, nil}} = result
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it resets instance reachability on successful fetch" do
|
|
||||||
id = "http://mastodon.example.org/@admin/99541947525187367"
|
|
||||||
Instances.set_consistently_unreachable(id)
|
|
||||||
refute Instances.reachable?(id)
|
|
||||||
|
|
||||||
{:ok, _object} =
|
|
||||||
Fetcher.fetch_object_from_id("http://mastodon.example.org/@admin/99541947525187367")
|
|
||||||
|
|
||||||
assert Instances.reachable?(id)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "implementation quirks" do
|
describe "implementation quirks" do
|
||||||
|
|
|
||||||
|
|
@ -395,4 +395,40 @@ defmodule Pleroma.ReverseProxyTest do
|
||||||
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
|
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Hackey is used for Reverse Proxy when Hackney or Finch is the Tesla Adapter
|
||||||
|
# Gun is able to proxy through Tesla, so it does not need testing as the
|
||||||
|
# test cases in the Pleroma.HTTPTest module are sufficient
|
||||||
|
describe "Hackney URL encoding:" do
|
||||||
|
setup do
|
||||||
|
ClientMock
|
||||||
|
|> expect(:request, fn :get,
|
||||||
|
"https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz",
|
||||||
|
_headers,
|
||||||
|
_body,
|
||||||
|
_opts ->
|
||||||
|
{:ok, 200, [{"content-type", "image/png"}], "It works!"}
|
||||||
|
end)
|
||||||
|
|> stub(:stream_body, fn _ -> :done end)
|
||||||
|
|> stub(:close, fn _ -> :ok end)
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
test "properly encodes URLs with spaces", %{conn: conn} do
|
||||||
|
url_with_space = "https://example.com/emoji/Pack 1/koronebless.png?foo=bar baz"
|
||||||
|
|
||||||
|
result = ReverseProxy.call(conn, url_with_space)
|
||||||
|
|
||||||
|
assert result.status == 200
|
||||||
|
end
|
||||||
|
|
||||||
|
test "properly encoded URL should not be altered", %{conn: conn} do
|
||||||
|
properly_encoded_url = "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"
|
||||||
|
|
||||||
|
result = ReverseProxy.call(conn, properly_encoded_url)
|
||||||
|
|
||||||
|
assert result.status == 200
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
|
||||||
})
|
})
|
||||||
|
|
||||||
Config
|
Config
|
||||||
|> expect(:get, 3, fn
|
|> expect(:get, 4, fn
|
||||||
[Pleroma.Search, :module], nil ->
|
[Pleroma.Search, :module], nil ->
|
||||||
QdrantSearch
|
QdrantSearch
|
||||||
|
|
||||||
|
|
@ -93,7 +93,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
|
||||||
})
|
})
|
||||||
|
|
||||||
Config
|
Config
|
||||||
|> expect(:get, 3, fn
|
|> expect(:get, 4, fn
|
||||||
[Pleroma.Search, :module], nil ->
|
[Pleroma.Search, :module], nil ->
|
||||||
QdrantSearch
|
QdrantSearch
|
||||||
|
|
||||||
|
|
@ -158,7 +158,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Config
|
Config
|
||||||
|> expect(:get, 6, fn
|
|> expect(:get, 7, fn
|
||||||
[Pleroma.Search, :module], nil ->
|
[Pleroma.Search, :module], nil ->
|
||||||
QdrantSearch
|
QdrantSearch
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Delivery
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Instances
|
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Tests.ObanHelpers
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
@ -601,23 +600,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it clears `unreachable` federation status of the sender", %{conn: conn} do
|
|
||||||
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Jason.decode!()
|
|
||||||
|
|
||||||
sender_url = data["actor"]
|
|
||||||
Instances.set_consistently_unreachable(sender_url)
|
|
||||||
refute Instances.reachable?(sender_url)
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> assign(:valid_signature, true)
|
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|
||||||
|> post("/inbox", data)
|
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
|
||||||
assert Instances.reachable?(sender_url)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "accept follow activity", %{conn: conn} do
|
test "accept follow activity", %{conn: conn} do
|
||||||
clear_config([:instance, :federating], true)
|
clear_config([:instance, :federating], true)
|
||||||
relay = Relay.get_actor()
|
relay = Relay.get_actor()
|
||||||
|
|
@ -941,23 +923,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it rejects an invalid incoming activity", %{conn: conn, data: data} do
|
|
||||||
user = insert(:user, is_active: false)
|
|
||||||
|
|
||||||
data =
|
|
||||||
data
|
|
||||||
|> Map.put("bcc", [user.ap_id])
|
|
||||||
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> assign(:valid_signature, true)
|
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|
||||||
|
|
||||||
assert "Invalid request." == json_response(conn, 400)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it accepts messages with to as string instead of array", %{conn: conn, data: data} do
|
test "it accepts messages with to as string instead of array", %{conn: conn, data: data} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
|
|
@ -1108,24 +1073,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
assert response(conn, 200) =~ note_object.data["content"]
|
assert response(conn, 200) =~ note_object.data["content"]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it clears `unreachable` federation status of the sender", %{conn: conn, data: data} do
|
|
||||||
user = insert(:user)
|
|
||||||
data = Map.put(data, "bcc", [user.ap_id])
|
|
||||||
|
|
||||||
sender_host = URI.parse(data["actor"]).host
|
|
||||||
Instances.set_consistently_unreachable(sender_host)
|
|
||||||
refute Instances.reachable?(sender_host)
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> assign(:valid_signature, true)
|
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
|
||||||
assert Instances.reachable?(sender_host)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it removes all follower collections but actor's", %{conn: conn} do
|
test "it removes all follower collections but actor's", %{conn: conn} do
|
||||||
[actor, recipient] = insert_pair(:user)
|
[actor, recipient] = insert_pair(:user)
|
||||||
|
|
||||||
|
|
@ -1341,6 +1288,50 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "it returns an error when receiving an activity sent to a deactivated user", %{
|
||||||
|
conn: conn,
|
||||||
|
data: data
|
||||||
|
} do
|
||||||
|
user = insert(:user)
|
||||||
|
{:ok, _} = User.set_activation(user, false)
|
||||||
|
|
||||||
|
data =
|
||||||
|
data
|
||||||
|
|> Map.put("bcc", [user.ap_id])
|
||||||
|
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
|
||||||
|
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
assert "User deactivated" == json_response(conn, 404)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it returns an error when receiving an activity sent from a deactivated user", %{
|
||||||
|
conn: conn,
|
||||||
|
data: data
|
||||||
|
} do
|
||||||
|
sender = insert(:user)
|
||||||
|
user = insert(:user)
|
||||||
|
{:ok, _} = User.set_activation(sender, false)
|
||||||
|
|
||||||
|
data =
|
||||||
|
data
|
||||||
|
|> Map.put("bcc", [user.ap_id])
|
||||||
|
|> Map.put("actor", sender.ap_id)
|
||||||
|
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
|
||||||
|
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
assert "Sender deactivated" == json_response(conn, 404)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "GET /users/:nickname/outbox" do
|
describe "GET /users/:nickname/outbox" do
|
||||||
|
|
|
||||||
|
|
@ -6,13 +6,11 @@ defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
use Oban.Testing, repo: Pleroma.Repo
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
import ExUnit.CaptureLog
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Instances
|
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Tests.ObanHelpers
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.ActivityPub.Publisher
|
alias Pleroma.Web.ActivityPub.Publisher
|
||||||
|
|
@ -167,115 +165,6 @@ defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
})
|
})
|
||||||
|> Publisher.publish_one()
|
|> Publisher.publish_one()
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "calls `Instances.set_reachable` on successful federation if `unreachable_since` is set",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://200.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert {:ok, _} =
|
|
||||||
Publisher.prepare_one(%{
|
|
||||||
inbox: inbox,
|
|
||||||
activity_id: activity.id,
|
|
||||||
unreachable_since: NaiveDateTime.utc_now() |> NaiveDateTime.to_string()
|
|
||||||
})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
|
|
||||||
assert called(Instances.set_reachable(inbox))
|
|
||||||
end
|
|
||||||
|
|
||||||
test_with_mock "does NOT call `Instances.set_reachable` on successful federation if `unreachable_since` is nil",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://200.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert {:ok, _} =
|
|
||||||
Publisher.prepare_one(%{
|
|
||||||
inbox: inbox,
|
|
||||||
activity_id: activity.id,
|
|
||||||
unreachable_since: nil
|
|
||||||
})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
|
|
||||||
refute called(Instances.set_reachable(inbox))
|
|
||||||
end
|
|
||||||
|
|
||||||
test_with_mock "calls `Instances.set_unreachable` on target inbox on non-2xx HTTP response code",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://404.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert {:cancel, _} =
|
|
||||||
Publisher.prepare_one(%{inbox: inbox, activity_id: activity.id})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
|
|
||||||
assert called(Instances.set_unreachable(inbox))
|
|
||||||
end
|
|
||||||
|
|
||||||
test_with_mock "it calls `Instances.set_unreachable` on target inbox on request error of any kind",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert capture_log(fn ->
|
|
||||||
assert {:error, _} =
|
|
||||||
Publisher.prepare_one(%{
|
|
||||||
inbox: inbox,
|
|
||||||
activity_id: activity.id
|
|
||||||
})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
end) =~ "connrefused"
|
|
||||||
|
|
||||||
assert called(Instances.set_unreachable(inbox))
|
|
||||||
end
|
|
||||||
|
|
||||||
test_with_mock "does NOT call `Instances.set_unreachable` if target is reachable",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://200.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert {:ok, _} =
|
|
||||||
Publisher.prepare_one(%{inbox: inbox, activity_id: activity.id})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
|
|
||||||
refute called(Instances.set_unreachable(inbox))
|
|
||||||
end
|
|
||||||
|
|
||||||
test_with_mock "does NOT call `Instances.set_unreachable` if target instance has non-nil `unreachable_since`",
|
|
||||||
Instances,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
_actor = insert(:user)
|
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
|
|
||||||
assert capture_log(fn ->
|
|
||||||
assert {:error, _} =
|
|
||||||
Publisher.prepare_one(%{
|
|
||||||
inbox: inbox,
|
|
||||||
activity_id: activity.id,
|
|
||||||
unreachable_since: NaiveDateTime.utc_now() |> NaiveDateTime.to_string()
|
|
||||||
})
|
|
||||||
|> Publisher.publish_one()
|
|
||||||
end) =~ "connrefused"
|
|
||||||
|
|
||||||
refute called(Instances.set_unreachable(inbox))
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "publish/2" do
|
describe "publish/2" do
|
||||||
|
|
|
||||||
|
|
@ -126,22 +126,17 @@ defmodule Pleroma.Web.FederatorTest do
|
||||||
inbox: inbox2
|
inbox: inbox2
|
||||||
})
|
})
|
||||||
|
|
||||||
dt = NaiveDateTime.utc_now()
|
Instances.set_unreachable(URI.parse(inbox2).host)
|
||||||
Instances.set_unreachable(inbox1, dt)
|
|
||||||
|
|
||||||
Instances.set_consistently_unreachable(URI.parse(inbox2).host)
|
|
||||||
|
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{status: "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{status: "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
expected_dt = NaiveDateTime.to_iso8601(dt)
|
|
||||||
|
|
||||||
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
assert ObanHelpers.member?(
|
assert ObanHelpers.member?(
|
||||||
%{
|
%{
|
||||||
"op" => "publish_one",
|
"op" => "publish_one",
|
||||||
"params" => %{"inbox" => inbox1, "unreachable_since" => expected_dt}
|
"params" => %{"inbox" => inbox1}
|
||||||
},
|
},
|
||||||
all_enqueued(worker: PublisherWorker)
|
all_enqueued(worker: PublisherWorker)
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
||||||
|
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Endpoint
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
|
|
@ -66,9 +65,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
||||||
[account | _] = results["accounts"]
|
[account | _] = results["accounts"]
|
||||||
assert account["id"] == to_string(user_three.id)
|
assert account["id"] == to_string(user_three.id)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
assert results["hashtags"] == []
|
||||||
%{"name" => "private", "url" => "#{Endpoint.url()}/tag/private"}
|
|
||||||
]
|
|
||||||
|
|
||||||
[status] = results["statuses"]
|
[status] = results["statuses"]
|
||||||
assert status["id"] == to_string(activity.id)
|
assert status["id"] == to_string(activity.id)
|
||||||
|
|
@ -77,9 +74,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
||||||
get(conn, "/api/v2/search?q=天子")
|
get(conn, "/api/v2/search?q=天子")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
assert results["hashtags"] == []
|
||||||
%{"name" => "天子", "url" => "#{Endpoint.url()}/tag/天子"}
|
|
||||||
]
|
|
||||||
|
|
||||||
[status] = results["statuses"]
|
[status] = results["statuses"]
|
||||||
assert status["id"] == to_string(activity.id)
|
assert status["id"] == to_string(activity.id)
|
||||||
|
|
@ -130,84 +125,97 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
||||||
assert [] = results["statuses"]
|
assert [] = results["statuses"]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "constructs hashtags from search query", %{conn: conn} do
|
test "returns empty results when no hashtags match", %{conn: conn} do
|
||||||
results =
|
results =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v2/search?#{URI.encode_query(%{q: "some text with #explicit #hashtags"})}")
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "nonexistent"})}")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
assert results["hashtags"] == []
|
||||||
%{"name" => "explicit", "url" => "#{Endpoint.url()}/tag/explicit"},
|
end
|
||||||
%{"name" => "hashtags", "url" => "#{Endpoint.url()}/tag/hashtags"}
|
|
||||||
]
|
test "searches hashtags by multiple words in query", %{conn: conn} do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, _activity1} = CommonAPI.post(user, %{status: "This is my new #computer"})
|
||||||
|
{:ok, _activity2} = CommonAPI.post(user, %{status: "Check out this #laptop"})
|
||||||
|
{:ok, _activity3} = CommonAPI.post(user, %{status: "My #desktop setup"})
|
||||||
|
{:ok, _activity4} = CommonAPI.post(user, %{status: "New #phone arrived"})
|
||||||
|
|
||||||
results =
|
results =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v2/search?#{URI.encode_query(%{q: "john doe JOHN DOE"})}")
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "new computer"})}")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
hashtag_names = Enum.map(results["hashtags"], & &1["name"])
|
||||||
%{"name" => "john", "url" => "#{Endpoint.url()}/tag/john"},
|
assert "computer" in hashtag_names
|
||||||
%{"name" => "doe", "url" => "#{Endpoint.url()}/tag/doe"},
|
refute "laptop" in hashtag_names
|
||||||
%{"name" => "JohnDoe", "url" => "#{Endpoint.url()}/tag/JohnDoe"}
|
refute "desktop" in hashtag_names
|
||||||
]
|
refute "phone" in hashtag_names
|
||||||
|
|
||||||
results =
|
results =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v2/search?#{URI.encode_query(%{q: "accident-prone"})}")
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "computer laptop"})}")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
hashtag_names = Enum.map(results["hashtags"], & &1["name"])
|
||||||
%{"name" => "accident", "url" => "#{Endpoint.url()}/tag/accident"},
|
assert "computer" in hashtag_names
|
||||||
%{"name" => "prone", "url" => "#{Endpoint.url()}/tag/prone"},
|
assert "laptop" in hashtag_names
|
||||||
%{"name" => "AccidentProne", "url" => "#{Endpoint.url()}/tag/AccidentProne"}
|
refute "desktop" in hashtag_names
|
||||||
]
|
refute "phone" in hashtag_names
|
||||||
|
|
||||||
results =
|
|
||||||
conn
|
|
||||||
|> get("/api/v2/search?#{URI.encode_query(%{q: "https://shpposter.club/users/shpuld"})}")
|
|
||||||
|> json_response_and_validate_schema(200)
|
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
|
||||||
%{"name" => "shpuld", "url" => "#{Endpoint.url()}/tag/shpuld"}
|
|
||||||
]
|
|
||||||
|
|
||||||
results =
|
|
||||||
conn
|
|
||||||
|> get(
|
|
||||||
"/api/v2/search?#{URI.encode_query(%{q: "https://www.washingtonpost.com/sports/2020/06/10/" <> "nascar-ban-display-confederate-flag-all-events-properties/"})}"
|
|
||||||
)
|
|
||||||
|> json_response_and_validate_schema(200)
|
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
|
||||||
%{"name" => "nascar", "url" => "#{Endpoint.url()}/tag/nascar"},
|
|
||||||
%{"name" => "ban", "url" => "#{Endpoint.url()}/tag/ban"},
|
|
||||||
%{"name" => "display", "url" => "#{Endpoint.url()}/tag/display"},
|
|
||||||
%{"name" => "confederate", "url" => "#{Endpoint.url()}/tag/confederate"},
|
|
||||||
%{"name" => "flag", "url" => "#{Endpoint.url()}/tag/flag"},
|
|
||||||
%{"name" => "all", "url" => "#{Endpoint.url()}/tag/all"},
|
|
||||||
%{"name" => "events", "url" => "#{Endpoint.url()}/tag/events"},
|
|
||||||
%{"name" => "properties", "url" => "#{Endpoint.url()}/tag/properties"},
|
|
||||||
%{
|
|
||||||
"name" => "NascarBanDisplayConfederateFlagAllEventsProperties",
|
|
||||||
"url" =>
|
|
||||||
"#{Endpoint.url()}/tag/NascarBanDisplayConfederateFlagAllEventsProperties"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
test "supports pagination of hashtags search results", %{conn: conn} do
|
test "supports pagination of hashtags search results", %{conn: conn} do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, _activity1} = CommonAPI.post(user, %{status: "First #alpha hashtag"})
|
||||||
|
{:ok, _activity2} = CommonAPI.post(user, %{status: "Second #beta hashtag"})
|
||||||
|
{:ok, _activity3} = CommonAPI.post(user, %{status: "Third #gamma hashtag"})
|
||||||
|
{:ok, _activity4} = CommonAPI.post(user, %{status: "Fourth #delta hashtag"})
|
||||||
|
|
||||||
results =
|
results =
|
||||||
conn
|
conn
|
||||||
|> get(
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "a", limit: 2, offset: 1})}")
|
||||||
"/api/v2/search?#{URI.encode_query(%{q: "#some #text #with #hashtags", limit: 2, offset: 1})}"
|
|
||||||
)
|
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
assert results["hashtags"] == [
|
hashtag_names = Enum.map(results["hashtags"], & &1["name"])
|
||||||
%{"name" => "text", "url" => "#{Endpoint.url()}/tag/text"},
|
|
||||||
%{"name" => "with", "url" => "#{Endpoint.url()}/tag/with"}
|
# Should return 2 hashtags (alpha, beta, gamma, delta all contain 'a')
|
||||||
]
|
# With offset 1, we skip the first one, so we get 2 of the remaining 3
|
||||||
|
assert length(hashtag_names) == 2
|
||||||
|
assert Enum.all?(hashtag_names, &String.contains?(&1, "a"))
|
||||||
|
end
|
||||||
|
|
||||||
|
test "searches real hashtags from database", %{conn: conn} do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, _activity1} = CommonAPI.post(user, %{status: "Check out this #car"})
|
||||||
|
{:ok, _activity2} = CommonAPI.post(user, %{status: "Fast #racecar on the track"})
|
||||||
|
{:ok, _activity3} = CommonAPI.post(user, %{status: "NASCAR #nascar racing"})
|
||||||
|
|
||||||
|
results =
|
||||||
|
conn
|
||||||
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "car"})}")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
hashtag_names = Enum.map(results["hashtags"], & &1["name"])
|
||||||
|
|
||||||
|
# Should return car, racecar, and nascar since they all contain "car"
|
||||||
|
assert "car" in hashtag_names
|
||||||
|
assert "racecar" in hashtag_names
|
||||||
|
assert "nascar" in hashtag_names
|
||||||
|
|
||||||
|
# Search for "race" - should return racecar
|
||||||
|
results =
|
||||||
|
conn
|
||||||
|
|> get("/api/v2/search?#{URI.encode_query(%{q: "race"})}")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
hashtag_names = Enum.map(results["hashtags"], & &1["name"])
|
||||||
|
|
||||||
|
assert "racecar" in hashtag_names
|
||||||
|
refute "car" in hashtag_names
|
||||||
|
refute "nascar" in hashtag_names
|
||||||
end
|
end
|
||||||
|
|
||||||
test "excludes a blocked users from search results", %{conn: conn} do
|
test "excludes a blocked users from search results", %{conn: conn} do
|
||||||
|
|
@ -314,7 +322,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
||||||
[account | _] = results["accounts"]
|
[account | _] = results["accounts"]
|
||||||
assert account["id"] == to_string(user_three.id)
|
assert account["id"] == to_string(user_three.id)
|
||||||
|
|
||||||
assert results["hashtags"] == ["2hu"]
|
assert results["hashtags"] == []
|
||||||
|
|
||||||
[status] = results["statuses"]
|
[status] = results["statuses"]
|
||||||
assert status["id"] == to_string(activity.id)
|
assert status["id"] == to_string(activity.id)
|
||||||
|
|
|
||||||
|
|
@ -292,10 +292,14 @@ defmodule Pleroma.Web.PleromaAPI.AccountControllerTest do
|
||||||
User.endorse(user1, user2)
|
User.endorse(user1, user2)
|
||||||
User.endorse(user1, user3)
|
User.endorse(user1, user3)
|
||||||
|
|
||||||
[%{"id" => ^id2}, %{"id" => ^id3}] =
|
response =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v1/pleroma/accounts/#{id1}/endorsements")
|
|> get("/api/v1/pleroma/accounts/#{id1}/endorsements")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
assert length(response) == 2
|
||||||
|
assert Enum.any?(response, fn user -> user["id"] == id2 end)
|
||||||
|
assert Enum.any?(response, fn user -> user["id"] == id3 end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns 404 error when specified user is not exist", %{conn: conn} do
|
test "returns 404 error when specified user is not exist", %{conn: conn} do
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,371 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.PleromaAPI.EmojiPackControllerDownloadZipTest do
|
||||||
|
use Pleroma.Web.ConnCase, async: false
|
||||||
|
|
||||||
|
import Tesla.Mock
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
setup_all do
|
||||||
|
# Create a base temp directory for this test module
|
||||||
|
base_temp_dir = Path.join(System.tmp_dir!(), "emoji_test_#{Ecto.UUID.generate()}")
|
||||||
|
|
||||||
|
# Clean up when all tests in module are done
|
||||||
|
on_exit(fn ->
|
||||||
|
File.rm_rf!(base_temp_dir)
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:ok, %{base_temp_dir: base_temp_dir}}
|
||||||
|
end
|
||||||
|
|
||||||
|
setup %{base_temp_dir: base_temp_dir} do
|
||||||
|
# Create a unique subdirectory for each test
|
||||||
|
test_id = Ecto.UUID.generate()
|
||||||
|
temp_dir = Path.join(base_temp_dir, test_id)
|
||||||
|
emoji_dir = Path.join(temp_dir, "emoji")
|
||||||
|
|
||||||
|
# Create the directory structure
|
||||||
|
File.mkdir_p!(emoji_dir)
|
||||||
|
|
||||||
|
# Configure this test to use the temp directory
|
||||||
|
clear_config([:instance, :static_dir], temp_dir)
|
||||||
|
|
||||||
|
admin = insert(:user, is_admin: true)
|
||||||
|
token = insert(:oauth_admin_token, user: admin)
|
||||||
|
|
||||||
|
admin_conn =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, admin)
|
||||||
|
|> assign(:token, token)
|
||||||
|
|
||||||
|
Pleroma.Emoji.reload()
|
||||||
|
|
||||||
|
{:ok, %{admin_conn: admin_conn, emoji_path: emoji_dir}}
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "POST /api/pleroma/emoji/packs/download_zip" do
|
||||||
|
setup do
|
||||||
|
clear_config([:instance, :admin_privileges], [:emoji_manage_emoji])
|
||||||
|
end
|
||||||
|
|
||||||
|
test "creates pack from uploaded ZIP file", %{admin_conn: admin_conn, emoji_path: emoji_path} do
|
||||||
|
# Create a test ZIP file with emojis
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_zip_pack",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(200) == "ok"
|
||||||
|
|
||||||
|
# Verify pack was created
|
||||||
|
assert File.exists?("#{emoji_path}/test_zip_pack/pack.json")
|
||||||
|
assert File.exists?("#{emoji_path}/test_zip_pack/test_emoji.png")
|
||||||
|
|
||||||
|
# Verify pack.json contents
|
||||||
|
{:ok, pack_json} = File.read("#{emoji_path}/test_zip_pack/pack.json")
|
||||||
|
pack_data = Jason.decode!(pack_json)
|
||||||
|
|
||||||
|
assert pack_data["files"]["test_emoji"] == "test_emoji.png"
|
||||||
|
assert pack_data["pack"]["src_sha256"] != nil
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "creates pack from URL", %{admin_conn: admin_conn, emoji_path: emoji_path} do
|
||||||
|
# Mock HTTP request to download ZIP
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
{:ok, zip_data} = File.read(zip_path)
|
||||||
|
|
||||||
|
mock(fn
|
||||||
|
%{method: :get, url: "https://example.com/emoji_pack.zip"} ->
|
||||||
|
%Tesla.Env{status: 200, body: zip_data}
|
||||||
|
end)
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_zip_pack_url",
|
||||||
|
url: "https://example.com/emoji_pack.zip"
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(200) == "ok"
|
||||||
|
|
||||||
|
# Verify pack was created
|
||||||
|
assert File.exists?("#{emoji_path}/test_zip_pack_url/pack.json")
|
||||||
|
assert File.exists?("#{emoji_path}/test_zip_pack_url/test_emoji.png")
|
||||||
|
|
||||||
|
# Verify pack.json has URL as source
|
||||||
|
{:ok, pack_json} = File.read("#{emoji_path}/test_zip_pack_url/pack.json")
|
||||||
|
pack_data = Jason.decode!(pack_json)
|
||||||
|
|
||||||
|
assert pack_data["pack"]["src"] == "https://example.com/emoji_pack.zip"
|
||||||
|
assert pack_data["pack"]["src_sha256"] != nil
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "refuses to overwrite existing pack", %{admin_conn: admin_conn, emoji_path: emoji_path} do
|
||||||
|
# Create existing pack
|
||||||
|
pack_path = Path.join(emoji_path, "test_zip_pack")
|
||||||
|
File.mkdir_p!(pack_path)
|
||||||
|
File.write!(Path.join(pack_path, "pack.json"), Jason.encode!(%{files: %{}}))
|
||||||
|
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_zip_pack",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Pack already exists, refusing to import test_zip_pack"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles invalid ZIP file", %{admin_conn: admin_conn} do
|
||||||
|
# Create invalid ZIP file
|
||||||
|
invalid_zip_path = Path.join(System.tmp_dir!(), "invalid.zip")
|
||||||
|
File.write!(invalid_zip_path, "not a zip file")
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: invalid_zip_path,
|
||||||
|
filename: "invalid.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_invalid_pack",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Could not unzip pack"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(invalid_zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles URL download failure", %{admin_conn: admin_conn} do
|
||||||
|
mock(fn
|
||||||
|
%{method: :get, url: "https://example.com/bad_pack.zip"} ->
|
||||||
|
%Tesla.Env{status: 404, body: "Not found"}
|
||||||
|
end)
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_bad_url_pack",
|
||||||
|
url: "https://example.com/bad_pack.zip"
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Could not download pack"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "requires either file or URL parameter", %{admin_conn: admin_conn} do
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_no_source_pack"
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Neither file nor URL was present in the request"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error when pack name is empty", %{admin_conn: admin_conn} do
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Pack name cannot be empty"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error when unable to create pack directory", %{
|
||||||
|
admin_conn: admin_conn,
|
||||||
|
emoji_path: emoji_path
|
||||||
|
} do
|
||||||
|
# Make the emoji directory read-only to trigger mkdir_p failure
|
||||||
|
|
||||||
|
# Save original permissions
|
||||||
|
{:ok, %{mode: original_mode}} = File.stat(emoji_path)
|
||||||
|
|
||||||
|
# Make emoji directory read-only (no write permission)
|
||||||
|
File.chmod!(emoji_path, 0o555)
|
||||||
|
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try to create a pack in the read-only emoji directory
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_readonly_pack",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(400) == %{
|
||||||
|
"error" => "Could not create the pack directory"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up - restore original permissions
|
||||||
|
File.chmod!(emoji_path, original_mode)
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "preserves existing pack.json if present in ZIP", %{
|
||||||
|
admin_conn: admin_conn,
|
||||||
|
emoji_path: emoji_path
|
||||||
|
} do
|
||||||
|
# Create ZIP with pack.json
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip_with_pack_json()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack_with_json.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: "test_zip_pack_with_json",
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(200) == "ok"
|
||||||
|
|
||||||
|
# Verify original pack.json was preserved
|
||||||
|
{:ok, pack_json} = File.read("#{emoji_path}/test_zip_pack_with_json/pack.json")
|
||||||
|
pack_data = Jason.decode!(pack_json)
|
||||||
|
|
||||||
|
assert pack_data["pack"]["description"] == "Test pack from ZIP"
|
||||||
|
assert pack_data["pack"]["license"] == "Test License"
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "rejects malicious pack names", %{admin_conn: admin_conn} do
|
||||||
|
{:ok, zip_path} = create_test_emoji_zip()
|
||||||
|
|
||||||
|
upload = %Plug.Upload{
|
||||||
|
content_type: "application/zip",
|
||||||
|
path: zip_path,
|
||||||
|
filename: "test_pack.zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Test path traversal attempts
|
||||||
|
malicious_names = ["../evil", "../../evil", ".", "..", "evil/../../../etc"]
|
||||||
|
|
||||||
|
Enum.each(malicious_names, fn name ->
|
||||||
|
assert_raise RuntimeError, ~r/Invalid or malicious pack name/, fn ->
|
||||||
|
admin_conn
|
||||||
|
|> put_req_header("content-type", "multipart/form-data")
|
||||||
|
|> post("/api/pleroma/emoji/packs/download_zip", %{
|
||||||
|
name: name,
|
||||||
|
file: upload
|
||||||
|
})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
File.rm!(zip_path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_test_emoji_zip do
|
||||||
|
tmp_dir = System.tmp_dir!()
|
||||||
|
zip_path = Path.join(tmp_dir, "test_emoji_pack_#{:rand.uniform(10000)}.zip")
|
||||||
|
|
||||||
|
# 1x1 pixel PNG
|
||||||
|
png_data =
|
||||||
|
Base.decode64!(
|
||||||
|
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg=="
|
||||||
|
)
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{~c"test_emoji.png", png_data},
|
||||||
|
# Will be treated as GIF based on extension
|
||||||
|
{~c"another_emoji.gif", png_data}
|
||||||
|
]
|
||||||
|
|
||||||
|
{:ok, {_name, zip_binary}} = :zip.zip(~c"test_pack.zip", files, [:memory])
|
||||||
|
File.write!(zip_path, zip_binary)
|
||||||
|
|
||||||
|
{:ok, zip_path}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_test_emoji_zip_with_pack_json do
|
||||||
|
tmp_dir = System.tmp_dir!()
|
||||||
|
zip_path = Path.join(tmp_dir, "test_emoji_pack_json_#{:rand.uniform(10000)}.zip")
|
||||||
|
|
||||||
|
png_data =
|
||||||
|
Base.decode64!(
|
||||||
|
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg=="
|
||||||
|
)
|
||||||
|
|
||||||
|
pack_json =
|
||||||
|
Jason.encode!(%{
|
||||||
|
pack: %{
|
||||||
|
description: "Test pack from ZIP",
|
||||||
|
license: "Test License"
|
||||||
|
},
|
||||||
|
files: %{
|
||||||
|
"test_emoji" => "test_emoji.png"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
files = [
|
||||||
|
{~c"test_emoji.png", png_data},
|
||||||
|
{~c"pack.json", pack_json}
|
||||||
|
]
|
||||||
|
|
||||||
|
{:ok, {_name, zip_binary}} = :zip.zip(~c"test_pack.zip", files, [:memory])
|
||||||
|
File.write!(zip_path, zip_binary)
|
||||||
|
|
||||||
|
{:ok, zip_path}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
@ -7,16 +7,11 @@ defmodule Pleroma.Web.PleromaApi.InstancesControllerTest do
|
||||||
|
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
|
||||||
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
|
|
||||||
|
|
||||||
setup do
|
setup do
|
||||||
constant = "http://consistently-unreachable.name/"
|
constant = "http://consistently-unreachable.name/"
|
||||||
eventual = "http://eventually-unreachable.com/path"
|
|
||||||
|
|
||||||
{:ok, %Pleroma.Instances.Instance{unreachable_since: constant_unreachable}} =
|
{:ok, %Pleroma.Instances.Instance{unreachable_since: constant_unreachable}} =
|
||||||
Instances.set_consistently_unreachable(constant)
|
Instances.set_unreachable(constant)
|
||||||
|
|
||||||
_eventual_unreachable = Instances.set_unreachable(eventual)
|
|
||||||
|
|
||||||
%{constant_unreachable: constant_unreachable, constant: constant}
|
%{constant_unreachable: constant_unreachable, constant: constant}
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,8 @@
|
||||||
defmodule Pleroma.Web.Plugs.CacheTest do
|
defmodule Pleroma.Web.Plugs.CacheTest do
|
||||||
# Relies on Cachex, has to stay synchronous
|
# Relies on Cachex, has to stay synchronous
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
use Plug.Test
|
import Plug.Conn
|
||||||
|
import Plug.Test
|
||||||
|
|
||||||
alias Pleroma.Web.Plugs.Cache
|
alias Pleroma.Web.Plugs.Cache
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.Plugs.DigestPlugTest do
|
defmodule Pleroma.Web.Plugs.DigestPlugTest do
|
||||||
use ExUnit.Case, async: true
|
use ExUnit.Case, async: true
|
||||||
use Plug.Test
|
import Plug.Conn
|
||||||
|
import Plug.Test
|
||||||
|
|
||||||
test "digest algorithm is taken from digest header" do
|
test "digest algorithm is taken from digest header" do
|
||||||
body = "{\"hello\": \"world\"}"
|
body = "{\"hello\": \"world\"}"
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,8 @@
|
||||||
defmodule Pleroma.Web.Plugs.IdempotencyPlugTest do
|
defmodule Pleroma.Web.Plugs.IdempotencyPlugTest do
|
||||||
# Relies on Cachex, has to stay synchronous
|
# Relies on Cachex, has to stay synchronous
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
use Plug.Test
|
import Plug.Conn
|
||||||
|
import Plug.Test
|
||||||
|
|
||||||
alias Pleroma.Web.Plugs.IdempotencyPlug
|
alias Pleroma.Web.Plugs.IdempotencyPlug
|
||||||
alias Plug.Conn
|
alias Plug.Conn
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.Plugs.RemoteIpTest do
|
defmodule Pleroma.Web.Plugs.RemoteIpTest do
|
||||||
use ExUnit.Case
|
use ExUnit.Case
|
||||||
use Plug.Test
|
import Plug.Conn
|
||||||
|
import Plug.Test
|
||||||
|
|
||||||
alias Pleroma.Web.Plugs.RemoteIp
|
alias Pleroma.Web.Plugs.RemoteIp
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.Plugs.SetFormatPlugTest do
|
defmodule Pleroma.Web.Plugs.SetFormatPlugTest do
|
||||||
use ExUnit.Case, async: true
|
use ExUnit.Case, async: true
|
||||||
use Plug.Test
|
import Plug.Conn
|
||||||
|
import Plug.Test
|
||||||
|
|
||||||
alias Pleroma.Web.Plugs.SetFormatPlug
|
alias Pleroma.Web.Plugs.SetFormatPlug
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.Plugs.SetLocalePlugTest do
|
defmodule Pleroma.Web.Plugs.SetLocalePlugTest do
|
||||||
use ExUnit.Case, async: true
|
use ExUnit.Case, async: true
|
||||||
use Plug.Test
|
import Plug.Test
|
||||||
|
|
||||||
alias Pleroma.Web.Plugs.SetLocalePlug
|
alias Pleroma.Web.Plugs.SetLocalePlug
|
||||||
alias Plug.Conn
|
alias Plug.Conn
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,7 @@ defmodule Pleroma.Workers.DeleteWorkerTest do
|
||||||
user1 = insert(:user, nickname: "alice@example.com", name: "Alice")
|
user1 = insert(:user, nickname: "alice@example.com", name: "Alice")
|
||||||
user2 = insert(:user, nickname: "bob@example.com", name: "Bob")
|
user2 = insert(:user, nickname: "bob@example.com", name: "Bob")
|
||||||
|
|
||||||
{:ok, job} = Instance.delete_users_and_activities("example.com")
|
{:ok, job} = Instance.delete("example.com")
|
||||||
|
|
||||||
assert_enqueued(
|
assert_enqueued(
|
||||||
worker: DeleteWorker,
|
worker: DeleteWorker,
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ defmodule Pleroma.Workers.PublisherWorkerTest do
|
||||||
use Oban.Testing, repo: Pleroma.Repo
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
import Mock
|
||||||
|
|
||||||
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Builder
|
alias Pleroma.Web.ActivityPub.Builder
|
||||||
|
|
@ -37,4 +39,85 @@ defmodule Pleroma.Workers.PublisherWorkerTest do
|
||||||
assert {:ok, %Oban.Job{priority: 0}} = Federator.publish(post)
|
assert {:ok, %Oban.Job{priority: 0}} = Federator.publish(post)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "Server reachability:" do
|
||||||
|
setup do
|
||||||
|
user = insert(:user)
|
||||||
|
remote_user = insert(:user, local: false, inbox: "https://example.com/inbox")
|
||||||
|
{:ok, _, _} = Pleroma.User.follow(remote_user, user)
|
||||||
|
{:ok, activity} = CommonAPI.post(user, %{status: "Test post"})
|
||||||
|
|
||||||
|
%{
|
||||||
|
user: user,
|
||||||
|
remote_user: remote_user,
|
||||||
|
activity: activity
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "marks server as unreachable only on final failure", %{activity: activity} do
|
||||||
|
with_mock Pleroma.Web.Federator,
|
||||||
|
perform: fn :publish_one, _params -> {:error, :connection_error} end do
|
||||||
|
# First attempt
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"inbox" => "https://example.com/inbox",
|
||||||
|
"activity_id" => activity.id
|
||||||
|
}
|
||||||
|
},
|
||||||
|
attempt: 1,
|
||||||
|
max_attempts: 5
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:error, :connection_error} = Pleroma.Workers.PublisherWorker.perform(job)
|
||||||
|
assert Instances.reachable?("https://example.com/inbox")
|
||||||
|
|
||||||
|
# Final attempt
|
||||||
|
job = %{job | attempt: 5}
|
||||||
|
assert {:error, :connection_error} = Pleroma.Workers.PublisherWorker.perform(job)
|
||||||
|
refute Instances.reachable?("https://example.com/inbox")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "does not mark server as unreachable on successful publish", %{activity: activity} do
|
||||||
|
with_mock Pleroma.Web.Federator,
|
||||||
|
perform: fn :publish_one, _params -> {:ok, %{status: 200}} end do
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"inbox" => "https://example.com/inbox",
|
||||||
|
"activity_id" => activity.id
|
||||||
|
}
|
||||||
|
},
|
||||||
|
attempt: 1,
|
||||||
|
max_attempts: 5
|
||||||
|
}
|
||||||
|
|
||||||
|
assert :ok = Pleroma.Workers.PublisherWorker.perform(job)
|
||||||
|
assert Instances.reachable?("https://example.com/inbox")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "cancels job if server is unreachable", %{activity: activity} do
|
||||||
|
# First mark the server as unreachable
|
||||||
|
Instances.set_unreachable("https://example.com/inbox")
|
||||||
|
refute Instances.reachable?("https://example.com/inbox")
|
||||||
|
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"inbox" => "https://example.com/inbox",
|
||||||
|
"activity_id" => activity.id
|
||||||
|
}
|
||||||
|
},
|
||||||
|
attempt: 1,
|
||||||
|
max_attempts: 5
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:cancel, :unreachable} = Pleroma.Workers.PublisherWorker.perform(job)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
226
test/pleroma/workers/reachability_worker_test.exs
Normal file
226
test/pleroma/workers/reachability_worker_test.exs
Normal file
|
|
@ -0,0 +1,226 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ReachabilityWorkerTest do
|
||||||
|
use Pleroma.DataCase, async: true
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
|
import Mock
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
|
alias Pleroma.Workers.ReachabilityWorker
|
||||||
|
|
||||||
|
setup do
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "progressive backoff phases" do
|
||||||
|
test "starts with phase_1min and progresses through phases on failure" do
|
||||||
|
domain = "example.com"
|
||||||
|
|
||||||
|
with_mocks([
|
||||||
|
{Pleroma.HTTP, [], [get: fn _ -> {:error, :timeout} end]},
|
||||||
|
{Pleroma.Instances, [], [set_reachable: fn _ -> :ok end]}
|
||||||
|
]) do
|
||||||
|
# Start with phase_1min
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{"domain" => domain, "phase" => "phase_1min", "attempt" => 1}
|
||||||
|
}
|
||||||
|
|
||||||
|
# First attempt fails
|
||||||
|
assert {:error, :timeout} = ReachabilityWorker.perform(job)
|
||||||
|
|
||||||
|
# Should schedule retry for phase_1min (attempt 2)
|
||||||
|
retry_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(retry_jobs) == 1
|
||||||
|
[retry_job] = retry_jobs
|
||||||
|
assert retry_job.args["phase"] == "phase_1min"
|
||||||
|
assert retry_job.args["attempt"] == 2
|
||||||
|
|
||||||
|
# Clear jobs and simulate second attempt failure
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
|
||||||
|
retry_job = %Oban.Job{
|
||||||
|
args: %{"domain" => domain, "phase" => "phase_1min", "attempt" => 2}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:error, :timeout} = ReachabilityWorker.perform(retry_job)
|
||||||
|
|
||||||
|
# Should schedule retry for phase_1min (attempt 3)
|
||||||
|
retry_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(retry_jobs) == 1
|
||||||
|
[retry_job] = retry_jobs
|
||||||
|
assert retry_job.args["phase"] == "phase_1min"
|
||||||
|
assert retry_job.args["attempt"] == 3
|
||||||
|
|
||||||
|
# Clear jobs and simulate third attempt failure (final attempt for phase_1min)
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
|
||||||
|
retry_job = %Oban.Job{
|
||||||
|
args: %{"domain" => domain, "phase" => "phase_1min", "attempt" => 3}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:error, :timeout} = ReachabilityWorker.perform(retry_job)
|
||||||
|
|
||||||
|
# Should schedule retry for phase_1min (attempt 4)
|
||||||
|
retry_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(retry_jobs) == 1
|
||||||
|
[retry_job] = retry_jobs
|
||||||
|
assert retry_job.args["phase"] == "phase_1min"
|
||||||
|
assert retry_job.args["attempt"] == 4
|
||||||
|
|
||||||
|
# Clear jobs and simulate fourth attempt failure (final attempt for phase_1min)
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
|
||||||
|
retry_job = %Oban.Job{
|
||||||
|
args: %{"domain" => domain, "phase" => "phase_1min", "attempt" => 4}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:error, :timeout} = ReachabilityWorker.perform(retry_job)
|
||||||
|
|
||||||
|
# Should schedule next phase (phase_15min)
|
||||||
|
next_phase_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(next_phase_jobs) == 1
|
||||||
|
[next_phase_job] = next_phase_jobs
|
||||||
|
assert next_phase_job.args["phase"] == "phase_15min"
|
||||||
|
assert next_phase_job.args["attempt"] == 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "progresses through all phases correctly" do
|
||||||
|
domain = "example.com"
|
||||||
|
|
||||||
|
with_mocks([
|
||||||
|
{Pleroma.HTTP, [], [get: fn _ -> {:error, :timeout} end]},
|
||||||
|
{Pleroma.Instances, [], [set_reachable: fn _ -> :ok end]}
|
||||||
|
]) do
|
||||||
|
# Simulate all phases failing
|
||||||
|
phases = ["phase_1min", "phase_15min", "phase_1hour", "phase_8hour", "phase_24hour"]
|
||||||
|
|
||||||
|
Enum.each(phases, fn phase ->
|
||||||
|
{_interval, max_attempts, next_phase} = get_phase_config(phase)
|
||||||
|
|
||||||
|
# Simulate all attempts failing for this phase
|
||||||
|
Enum.each(1..max_attempts, fn attempt ->
|
||||||
|
job = %Oban.Job{args: %{"domain" => domain, "phase" => phase, "attempt" => attempt}}
|
||||||
|
assert {:error, :timeout} = ReachabilityWorker.perform(job)
|
||||||
|
|
||||||
|
if attempt < max_attempts do
|
||||||
|
# Should schedule retry for same phase
|
||||||
|
retry_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(retry_jobs) == 1
|
||||||
|
[retry_job] = retry_jobs
|
||||||
|
assert retry_job.args["phase"] == phase
|
||||||
|
assert retry_job.args["attempt"] == attempt + 1
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
else
|
||||||
|
# Should schedule next phase (except for final phase)
|
||||||
|
if next_phase != "final" do
|
||||||
|
next_phase_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(next_phase_jobs) == 1
|
||||||
|
[next_phase_job] = next_phase_jobs
|
||||||
|
assert next_phase_job.args["phase"] == next_phase
|
||||||
|
assert next_phase_job.args["attempt"] == 1
|
||||||
|
ObanHelpers.wipe_all()
|
||||||
|
else
|
||||||
|
# Final phase - no more jobs should be scheduled
|
||||||
|
next_phase_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(next_phase_jobs) == 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "succeeds and stops progression when instance becomes reachable" do
|
||||||
|
domain = "example.com"
|
||||||
|
|
||||||
|
with_mocks([
|
||||||
|
{Pleroma.HTTP, [], [get: fn _ -> {:ok, %{status: 200}} end]},
|
||||||
|
{Pleroma.Instances, [], [set_reachable: fn _ -> :ok end]}
|
||||||
|
]) do
|
||||||
|
job = %Oban.Job{args: %{"domain" => domain, "phase" => "phase_1hour", "attempt" => 2}}
|
||||||
|
|
||||||
|
# Should succeed and not schedule any more jobs
|
||||||
|
assert :ok = ReachabilityWorker.perform(job)
|
||||||
|
|
||||||
|
# Verify set_reachable was called
|
||||||
|
assert_called(Pleroma.Instances.set_reachable("https://#{domain}"))
|
||||||
|
|
||||||
|
# No more jobs should be scheduled
|
||||||
|
next_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(next_jobs) == 0
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "enforces uniqueness per domain using Oban's conflict detection" do
|
||||||
|
domain = "example.com"
|
||||||
|
|
||||||
|
# Insert first job for the domain
|
||||||
|
job1 =
|
||||||
|
%{
|
||||||
|
"domain" => domain,
|
||||||
|
"phase" => "phase_1min",
|
||||||
|
"attempt" => 1
|
||||||
|
}
|
||||||
|
|> ReachabilityWorker.new()
|
||||||
|
|> Oban.insert()
|
||||||
|
|
||||||
|
assert {:ok, _} = job1
|
||||||
|
|
||||||
|
# Try to insert a second job for the same domain with different phase/attempt
|
||||||
|
job2 =
|
||||||
|
%{
|
||||||
|
"domain" => domain,
|
||||||
|
"phase" => "phase_15min",
|
||||||
|
"attempt" => 1
|
||||||
|
}
|
||||||
|
|> ReachabilityWorker.new()
|
||||||
|
|> Oban.insert()
|
||||||
|
|
||||||
|
# Should fail due to uniqueness constraint (conflict)
|
||||||
|
assert {:ok, %Oban.Job{conflict?: true}} = job2
|
||||||
|
|
||||||
|
# Verify only one job exists for this domain
|
||||||
|
jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(jobs) == 1
|
||||||
|
[existing_job] = jobs
|
||||||
|
assert existing_job.args["domain"] == domain
|
||||||
|
assert existing_job.args["phase"] == "phase_1min"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "handles new jobs with only domain argument and transitions them to the first phase" do
|
||||||
|
domain = "legacy.example.com"
|
||||||
|
|
||||||
|
with_mocks([
|
||||||
|
{Pleroma.Instances, [], [set_reachable: fn _ -> :ok end]}
|
||||||
|
]) do
|
||||||
|
# Create a job with only domain (legacy format)
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{"domain" => domain}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Should reschedule with phase_1min and attempt 1
|
||||||
|
assert :ok = ReachabilityWorker.perform(job)
|
||||||
|
|
||||||
|
# Check that a new job was scheduled with the correct format
|
||||||
|
scheduled_jobs = all_enqueued(worker: ReachabilityWorker)
|
||||||
|
assert length(scheduled_jobs) == 1
|
||||||
|
[scheduled_job] = scheduled_jobs
|
||||||
|
assert scheduled_job.args["domain"] == domain
|
||||||
|
assert scheduled_job.args["phase"] == "phase_1min"
|
||||||
|
assert scheduled_job.args["attempt"] == 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_phase_config("phase_1min"), do: {1, 4, "phase_15min"}
|
||||||
|
defp get_phase_config("phase_15min"), do: {15, 4, "phase_1hour"}
|
||||||
|
defp get_phase_config("phase_1hour"), do: {60, 4, "phase_8hour"}
|
||||||
|
defp get_phase_config("phase_8hour"), do: {480, 4, "phase_24hour"}
|
||||||
|
defp get_phase_config("phase_24hour"), do: {1440, 4, "final"}
|
||||||
|
defp get_phase_config("final"), do: {nil, 0, nil}
|
||||||
|
end
|
||||||
|
|
@ -3,13 +3,14 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Workers.ReceiverWorkerTest do
|
defmodule Pleroma.Workers.ReceiverWorkerTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase, async: true
|
||||||
use Oban.Testing, repo: Pleroma.Repo
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Mock
|
import Mock
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
alias Pleroma.Workers.ReceiverWorker
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
|
||||||
|
|
@ -243,4 +244,62 @@ defmodule Pleroma.Workers.ReceiverWorkerTest do
|
||||||
|
|
||||||
assert {:cancel, _} = ReceiverWorker.perform(oban_job)
|
assert {:cancel, _} = ReceiverWorker.perform(oban_job)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "Server reachability:" do
|
||||||
|
setup do
|
||||||
|
user = insert(:user)
|
||||||
|
remote_user = insert(:user, local: false, ap_id: "https://example.com/users/remote")
|
||||||
|
{:ok, _, _} = Pleroma.User.follow(user, remote_user)
|
||||||
|
{:ok, activity} = CommonAPI.post(remote_user, %{status: "Test post"})
|
||||||
|
|
||||||
|
%{
|
||||||
|
user: user,
|
||||||
|
remote_user: remote_user,
|
||||||
|
activity: activity
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "schedules ReachabilityWorker if host is unreachable", %{activity: activity} do
|
||||||
|
with_mocks [
|
||||||
|
{Pleroma.Web.ActivityPub.Transmogrifier, [],
|
||||||
|
[handle_incoming: fn _ -> {:ok, activity} end]},
|
||||||
|
{Pleroma.Instances, [], [reachable?: fn _ -> false end]},
|
||||||
|
{Pleroma.Web.Federator, [], [perform: fn :incoming_ap_doc, _params -> {:ok, nil} end]}
|
||||||
|
] do
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{
|
||||||
|
"op" => "incoming_ap_doc",
|
||||||
|
"params" => activity.data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Pleroma.Workers.ReceiverWorker.perform(job)
|
||||||
|
|
||||||
|
assert_enqueued(
|
||||||
|
worker: Pleroma.Workers.ReachabilityWorker,
|
||||||
|
args: %{"domain" => "example.com"}
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "does not schedule ReachabilityWorker if host is reachable", %{activity: activity} do
|
||||||
|
with_mocks [
|
||||||
|
{Pleroma.Web.ActivityPub.Transmogrifier, [],
|
||||||
|
[handle_incoming: fn _ -> {:ok, activity} end]},
|
||||||
|
{Pleroma.Instances, [], [reachable?: fn _ -> true end]},
|
||||||
|
{Pleroma.Web.Federator, [], [perform: fn :incoming_ap_doc, _params -> {:ok, nil} end]}
|
||||||
|
] do
|
||||||
|
job = %Oban.Job{
|
||||||
|
args: %{
|
||||||
|
"op" => "incoming_ap_doc",
|
||||||
|
"params" => activity.data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Pleroma.Workers.ReceiverWorker.perform(job)
|
||||||
|
|
||||||
|
refute_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Workers.RemoteFetcherWorkerTest do
|
defmodule Pleroma.Workers.RemoteFetcherWorkerTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase, async: true
|
||||||
use Oban.Testing, repo: Pleroma.Repo
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
alias Pleroma.Workers.RemoteFetcherWorker
|
alias Pleroma.Workers.RemoteFetcherWorker
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue