Merge remote-tracking branch 'origin/develop' into shigusegubu
This commit is contained in:
commit
74cda5b78f
205 changed files with 4280 additions and 1037 deletions
|
@ -2,7 +2,7 @@ image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.14.5-otp-25
|
|||
|
||||
variables: &global_variables
|
||||
# Only used for the release
|
||||
ELIXIR_VER: 1.14.5
|
||||
ELIXIR_VER: 1.17.3
|
||||
POSTGRES_DB: pleroma_test
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
@ -272,7 +272,8 @@ stop_review_app:
|
|||
|
||||
amd64:
|
||||
stage: release
|
||||
image: elixir:$ELIXIR_VER
|
||||
image:
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011
|
||||
only: &release-only
|
||||
- stable@pleroma/pleroma
|
||||
- develop@pleroma/pleroma
|
||||
|
@ -297,8 +298,9 @@ amd64:
|
|||
variables: &release-variables
|
||||
MIX_ENV: prod
|
||||
VIX_COMPILATION_MODE: PLATFORM_PROVIDED_LIBVIPS
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
before_script: &before-release
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev git
|
||||
- echo "import Config" > config/prod.secret.exs
|
||||
- mix local.hex --force
|
||||
- mix local.rebar --force
|
||||
|
@ -313,7 +315,8 @@ amd64-musl:
|
|||
stage: release
|
||||
artifacts: *release-artifacts
|
||||
only: *release-only
|
||||
image: elixir:$ELIXIR_VER-alpine
|
||||
image:
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-alpine-3.17.9
|
||||
tags:
|
||||
- amd64
|
||||
cache: *release-cache
|
||||
|
@ -327,6 +330,7 @@ amd64-musl:
|
|||
|
||||
arm:
|
||||
stage: release
|
||||
allow_failure: true
|
||||
artifacts: *release-artifacts
|
||||
only: *release-only
|
||||
tags:
|
||||
|
@ -355,7 +359,8 @@ arm64:
|
|||
only: *release-only
|
||||
tags:
|
||||
- arm
|
||||
image: arm64v8/elixir:$ELIXIR_VER
|
||||
image:
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011
|
||||
cache: *release-cache
|
||||
variables: *release-variables
|
||||
before_script: *before-release
|
||||
|
@ -367,7 +372,8 @@ arm64-musl:
|
|||
only: *release-only
|
||||
tags:
|
||||
- arm
|
||||
image: arm64v8/elixir:$ELIXIR_VER-alpine
|
||||
image:
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-alpine-3.17.9
|
||||
cache: *release-cache
|
||||
variables: *release-variables
|
||||
before_script: *before-release-musl
|
||||
|
|
103
CHANGELOG.md
103
CHANGELOG.md
|
@ -4,6 +4,109 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## 2.9.1
|
||||
|
||||
### Security
|
||||
- Fix authorization checks for C2S Update activities to prevent unauthorized modifications of other users' content.
|
||||
- Fix content-type spoofing vulnerability that could allow users to upload ActivityPub objects as attachments
|
||||
- Reject cross-domain redirects when fetching ActivityPub objects to prevent bypassing domain-based security controls.
|
||||
- Limit emoji shortcodes to alphanumeric, dash, or underscore characters to prevent potential abuse.
|
||||
- Block attempts to fetch activities from the local instance to prevent spoofing.
|
||||
- Sanitize Content-Type headers in media proxy to prevent serving malicious ActivityPub content through proxied media.
|
||||
- Validate Content-Type headers when fetching remote ActivityPub objects to prevent spoofing attacks.
|
||||
|
||||
### Changed
|
||||
- Include `pl-fe` in available frontends
|
||||
|
||||
### Fixed
|
||||
- Remove trailing ` from end of line 75 which caused issues copy-pasting
|
||||
|
||||
## 2.9.0
|
||||
|
||||
### Security
|
||||
- Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
||||
- Fix several spoofing vectors
|
||||
|
||||
### Changed
|
||||
- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
||||
|
||||
### Added
|
||||
- Include "published" in actor view
|
||||
- Link to exported outbox/followers/following collections in backup actor.json
|
||||
- Hashtag following
|
||||
- Allow to specify post language
|
||||
|
||||
### Fixed
|
||||
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||
- Fix Mastodon incoming edits with inlined "likes"
|
||||
- Allow incoming "Listen" activities
|
||||
- Fix missing check for domain presence in rich media ignore_host configuration
|
||||
- Fix Rich Media parsing of TwitterCards/OpenGraph to adhere to the spec and always choose the first image if multiple are provided.
|
||||
- Fix OpenGraph/TwitterCard meta tag ordering for posts with multiple attachments
|
||||
- Fix blurhash generation crashes
|
||||
|
||||
### Removed
|
||||
- Retire MRFs DNSRBL, FODirectReply, and QuietReply
|
||||
|
||||
## 2.8.0
|
||||
|
||||
### Changed
|
||||
- Metadata: Do not include .atom feed links for remote accounts
|
||||
- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time
|
||||
- Dedupe upload filter now uses a three-level sharding directory structure
|
||||
- Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe`
|
||||
- Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types.
|
||||
- Elixir 1.14 and Erlang/OTP 23 is now the minimum supported release
|
||||
- Support `id` param in `GET /api/v1/statuses`
|
||||
- LDAP authentication has been refactored to operate as a GenServer process which will maintain an active connection to the LDAP server.
|
||||
- Fix 'Setting a marker should mark notifications as read'
|
||||
- Adjust more Oban workers to enforce unique job constraints.
|
||||
- Oban updated to 2.18.3
|
||||
- Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention.
|
||||
- Poll results refreshing is handled asynchronously and will not attempt to keep fetching updates to a closed poll.
|
||||
- Tuning for release builds to lower CPU usage.
|
||||
- Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch
|
||||
- Fix nonexisting user will not generate metadata for search engine opt-out
|
||||
- Update Oban to 2.18
|
||||
- Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues.
|
||||
|
||||
### Added
|
||||
- Add metadata provider for ActivityPub alternate links
|
||||
- Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream.
|
||||
- Respect :restrict_unauthenticated for hashtag rss/atom feeds
|
||||
- LDAP configuration now permits overriding the CA root certificate file for TLS validation.
|
||||
- LDAP now supports users changing their passwords
|
||||
- Include list id in StatusView
|
||||
- Added MRF.FODirectReply which changes replies to followers-only posts to be direct.
|
||||
- Add `id_filter` to MRF to filter URLs and their domain prior to fetching
|
||||
- Added MRF.QuietReply which prevents replies to public posts from being published to the timelines
|
||||
- Add `group_key` to notifications
|
||||
- Allow providing avatar/header descriptions
|
||||
- Added RemoteReportPolicy from Rebased for handling bogus federated reports
|
||||
- scrubbers/default: Allow "mention hashtag" classes used by Mastodon
|
||||
- Added dependencies for Swoosh's Mua mail adapter
|
||||
- Include session scopes in TokenView
|
||||
|
||||
### Fixed
|
||||
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||
- Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them.
|
||||
- Fix incoming Block activities being rejected
|
||||
- STARTTLS certificate and hostname verification for LDAP authentication
|
||||
- LDAPS connections (implicit TLS) are now supported.
|
||||
- Fix /api/v2/media returning the wrong status code (202) for media processed synchronously
|
||||
- Miscellaneous fixes for Meilisearch support
|
||||
- Fix pleroma_ctl mix task calls sometimes not being found
|
||||
- Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users.
|
||||
- ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally.
|
||||
- Address case where instance reachability status couldn't be updated
|
||||
- Remote Fetcher Worker recognizes more permanent failure errors
|
||||
- StreamerView: Do not leak follows count if hidden
|
||||
- Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job
|
||||
- Make vapid_config return empty array, fixing preloading for instances without push notifications configured
|
||||
|
||||
### Removed
|
||||
- Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0)
|
||||
|
||||
## 2.7.1
|
||||
|
||||
### Changed
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Add metadata provider for ActivityPub alternate links
|
|
@ -1 +0,0 @@
|
|||
Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream.
|
|
@ -1 +0,0 @@
|
|||
Metadata: Do not include .atom feed links for remote accounts
|
|
@ -1 +0,0 @@
|
|||
- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time
|
|
@ -1 +0,0 @@
|
|||
Fixed a formatting issue that had a required commend embedded in a textblock, and change the language to make it a bit more idiomatic.
|
|
@ -1 +0,0 @@
|
|||
Dedupe upload filter now uses a three-level sharding directory structure
|
|
@ -1 +0,0 @@
|
|||
Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe`
|
|
@ -1 +0,0 @@
|
|||
Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types.
|
|
@ -1 +0,0 @@
|
|||
Elixir 1.14 and Erlang/OTP 23 is now the minimum supported release
|
|
@ -1 +0,0 @@
|
|||
Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them.
|
|
@ -1 +0,0 @@
|
|||
Support `id` param in `GET /api/v1/statuses`
|
|
@ -1 +0,0 @@
|
|||
Repesct :restrict_unauthenticated for hashtag rss/atom feeds
|
|
@ -1 +0,0 @@
|
|||
Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0)
|
|
@ -1 +0,0 @@
|
|||
Fix incoming Block activities being rejected
|
|
@ -1 +0,0 @@
|
|||
LDAP configuration now permits overriding the CA root certificate file for TLS validation.
|
|
@ -1 +0,0 @@
|
|||
LDAP now supports users changing their passwords
|
|
@ -1 +0,0 @@
|
|||
LDAP authentication has been refactored to operate as a GenServer process which will maintain an active connection to the LDAP server.
|
|
@ -1 +0,0 @@
|
|||
STARTTLS certificate and hostname verification for LDAP authentication
|
|
@ -1 +0,0 @@
|
|||
LDAPS connections (implicit TLS) are now supported.
|
|
@ -1 +0,0 @@
|
|||
Include list id in StatusView
|
|
@ -1 +0,0 @@
|
|||
Fix /api/v2/media returning the wrong status code (202) for media processed synchronously
|
|
@ -1 +0,0 @@
|
|||
Miscellaneous fixes for Meilisearch support
|
|
@ -1 +0,0 @@
|
|||
Fix pleroma_ctl mix task calls sometimes not being found
|
|
@ -1 +0,0 @@
|
|||
Added MRF.FODirectReply which changes replies to followers-only posts to be direct.
|
|
@ -1 +0,0 @@
|
|||
Add `id_filter` to MRF to filter URLs and their domain prior to fetching
|
|
@ -1 +0,0 @@
|
|||
Added MRF.QuietReply which prevents replies to public posts from being published to the timelines
|
|
@ -1 +0,0 @@
|
|||
Add `group_key` to notifications
|
|
@ -1 +0,0 @@
|
|||
Fix 'Setting a marker should mark notifications as read'
|
|
@ -1 +0,0 @@
|
|||
Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users.
|
|
@ -1 +0,0 @@
|
|||
ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally.
|
|
@ -1 +0,0 @@
|
|||
Adjust more Oban workers to enforce unique job constraints.
|
|
@ -1 +0,0 @@
|
|||
Oban updated to 2.18.3
|
|
@ -1 +0,0 @@
|
|||
Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention.
|
|
@ -1 +0,0 @@
|
|||
Poll results refreshing is handled asynchronously and will not attempt to keep fetching updates to a closed poll.
|
|
@ -1 +0,0 @@
|
|||
Allow providing avatar/header descriptions
|
|
@ -1 +0,0 @@
|
|||
Address case where instance reachability status couldn't be updated
|
|
@ -1 +0,0 @@
|
|||
Tuning for release builds to lower CPU usage.
|
|
@ -1 +0,0 @@
|
|||
Remote Fetcher Worker recognizes more permanent failure errors
|
|
@ -1 +0,0 @@
|
|||
Added RemoteReportPolicy from Rebased for handling bogus federated reports
|
|
@ -1 +0,0 @@
|
|||
Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch
|
|
@ -1 +0,0 @@
|
|||
scrubbers/default: Allow "mention hashtag" classes used by Mastodon
|
|
@ -1 +0,0 @@
|
|||
Fix nonexisting user will not generate metadata for search engine opt-out
|
|
@ -1 +0,0 @@
|
|||
StreamerView: Do not leak follows count if hidden
|
|
@ -1 +0,0 @@
|
|||
Added dependencies for Swoosh's Mua mail adapter
|
|
@ -1 +0,0 @@
|
|||
Include session scopes in TokenView
|
|
@ -1 +0,0 @@
|
|||
Update Oban to 2.18
|
|
@ -1 +0,0 @@
|
|||
Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job
|
|
@ -1 +0,0 @@
|
|||
Make vapid_config return empty array, fixing preloading for instances without push notifications configured
|
|
@ -1 +0,0 @@
|
|||
Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues.
|
|
@ -66,6 +66,7 @@ config :pleroma, Pleroma.Upload,
|
|||
filename_display_max_length: 30,
|
||||
default_description: :filename,
|
||||
base_url: nil
|
||||
allowed_mime_types: ["image", "audio", "video"]
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
|
||||
|
||||
|
@ -150,7 +151,10 @@ config :mime, :types, %{
|
|||
"application/xrd+xml" => ["xrd+xml"],
|
||||
"application/jrd+json" => ["jrd+json"],
|
||||
"application/activity+json" => ["activity+json"],
|
||||
"application/ld+json" => ["activity+json"]
|
||||
"application/ld+json" => ["activity+json"],
|
||||
# Can be removed when bumping MIME past 2.0.5
|
||||
# see https://akkoma.dev/AkkomaGang/akkoma/issues/657
|
||||
"image/apng" => ["apng"]
|
||||
}
|
||||
|
||||
config :tesla, adapter: Tesla.Adapter.Hackney
|
||||
|
@ -357,7 +361,8 @@ config :pleroma, :activitypub,
|
|||
follow_handshake_timeout: 500,
|
||||
note_replies_output_limit: 5,
|
||||
sign_object_fetches: true,
|
||||
authorized_fetch_mode: false
|
||||
authorized_fetch_mode: false,
|
||||
client_api_enabled: false
|
||||
|
||||
config :pleroma, :streamer,
|
||||
workers: 3,
|
||||
|
@ -426,11 +431,6 @@ config :pleroma, :mrf_vocabulary,
|
|||
accept: [],
|
||||
reject: []
|
||||
|
||||
config :pleroma, :mrf_dnsrbl,
|
||||
nameserver: "127.0.0.1",
|
||||
port: 53,
|
||||
zone: "bl.pleroma.com"
|
||||
|
||||
# threshold of 7 days
|
||||
config :pleroma, :mrf_object_age,
|
||||
threshold: 604_800,
|
||||
|
@ -820,6 +820,13 @@ config :pleroma, :frontends,
|
|||
"https://lily-is.land/infra/glitch-lily/-/jobs/artifacts/${ref}/download?job=build",
|
||||
"ref" => "servant",
|
||||
"build_dir" => "public"
|
||||
},
|
||||
"pl-fe" => %{
|
||||
"name" => "pl-fe",
|
||||
"git" => "https://github.com/mkljczk/pl-fe",
|
||||
"build_url" => "https://pl.mkljczk.pl/pl-fe.zip",
|
||||
"ref" => "develop",
|
||||
"build_dir" => "."
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -117,6 +117,19 @@ config :pleroma, :config_description, [
|
|||
key: :filename_display_max_length,
|
||||
type: :integer,
|
||||
description: "Set max length of a filename to display. 0 = no limit. Default: 30"
|
||||
},
|
||||
%{
|
||||
key: :allowed_mime_types,
|
||||
label: "Allowed MIME types",
|
||||
type: {:list, :string},
|
||||
description:
|
||||
"List of MIME (main) types uploads are allowed to identify themselves with. Other types may still be uploaded, but will identify as a generic binary to clients. WARNING: Loosening this over the defaults can lead to security issues. Removing types is safe, but only add to the list if you are sure you know what you are doing.",
|
||||
suggestions: [
|
||||
"image",
|
||||
"audio",
|
||||
"video",
|
||||
"font"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1772,6 +1785,11 @@ config :pleroma, :config_description, [
|
|||
type: :integer,
|
||||
description: "Following handshake timeout",
|
||||
suggestions: [500]
|
||||
},
|
||||
%{
|
||||
key: :client_api_enabled,
|
||||
type: :boolean,
|
||||
description: "Allow client to server ActivityPub interactions"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -3302,8 +3320,7 @@ config :pleroma, :config_description, [
|
|||
suggestions: [
|
||||
Pleroma.Web.Preload.Providers.Instance,
|
||||
Pleroma.Web.Preload.Providers.User,
|
||||
Pleroma.Web.Preload.Providers.Timelines,
|
||||
Pleroma.Web.Preload.Providers.StatusNet
|
||||
Pleroma.Web.Preload.Providers.Timelines
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
@ -38,7 +38,10 @@ config :pleroma, :instance,
|
|||
external_user_synchronization: false,
|
||||
static_dir: "test/instance_static/"
|
||||
|
||||
config :pleroma, :activitypub, sign_object_fetches: false, follow_handshake_timeout: 0
|
||||
config :pleroma, :activitypub,
|
||||
sign_object_fetches: false,
|
||||
follow_handshake_timeout: 0,
|
||||
client_api_enabled: true
|
||||
|
||||
# Configure your database
|
||||
config :pleroma, Pleroma.Repo,
|
||||
|
@ -144,6 +147,7 @@ config :pleroma, Pleroma.Search.Meilisearch, url: "http://127.0.0.1:7700/", priv
|
|||
config :phoenix, :plug_init_mode, :runtime
|
||||
|
||||
config :pleroma, :config_impl, Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, :datetime_impl, Pleroma.DateTimeMock
|
||||
|
||||
config :pleroma, Pleroma.PromEx, disabled: true
|
||||
|
||||
|
@ -158,6 +162,12 @@ config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMoc
|
|||
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
|
||||
config :pleroma, Pleroma.Upload.Filter.AnonymizeFilename,
|
||||
config_impl: Pleroma.StaticStubbedConfigMock
|
||||
|
||||
config :pleroma, Pleroma.Upload.Filter.Mogrify, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Upload.Filter.Mogrify, mogrify_impl: Pleroma.MogrifyMock
|
||||
|
||||
config :pleroma, Pleroma.Signature, http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock
|
||||
|
||||
peer_module =
|
||||
|
|
|
@ -98,7 +98,7 @@ To add configuration to your config file, you can copy it from the base config.
|
|||
* `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...)
|
||||
* Possible values are the same as for `admin_privileges`
|
||||
|
||||
## :database
|
||||
## :features
|
||||
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
|
||||
|
||||
## Background migrations
|
||||
|
|
|
@ -72,7 +72,7 @@ sudo -Hu pleroma mix deps.get
|
|||
* Generate the configuration:
|
||||
|
||||
```shell
|
||||
sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen`
|
||||
sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen
|
||||
```
|
||||
|
||||
* During this process:
|
||||
|
|
|
@ -93,6 +93,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
)
|
||||
|
||||
files = fetch_and_decode!(files_loc)
|
||||
files_to_unzip = for({_, f} <- files, do: f)
|
||||
|
||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||
|
||||
|
@ -103,17 +104,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
pack_name
|
||||
])
|
||||
|
||||
files_to_unzip =
|
||||
Enum.map(
|
||||
files,
|
||||
fn {_, f} -> to_charlist(f) end
|
||||
)
|
||||
|
||||
{:ok, _} =
|
||||
:zip.unzip(binary_archive,
|
||||
cwd: String.to_charlist(pack_path),
|
||||
file_list: files_to_unzip
|
||||
)
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip)
|
||||
|
||||
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||
|
||||
|
@ -201,7 +192,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|
||||
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
||||
|
||||
{:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir))
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir)
|
||||
|
||||
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ defmodule Pleroma.Config do
|
|||
Application.get_env(:pleroma, key, default)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def get!(key) do
|
||||
value = get(key, nil)
|
||||
|
||||
|
|
|
@ -5,10 +5,13 @@
|
|||
defmodule Pleroma.Config.Getting do
|
||||
@callback get(any()) :: any()
|
||||
@callback get(any(), any()) :: any()
|
||||
@callback get!(any()) :: any()
|
||||
|
||||
def get(key), do: get(key, nil)
|
||||
def get(key, default), do: impl().get(key, default)
|
||||
|
||||
def get!(key), do: impl().get!(key)
|
||||
|
||||
def impl do
|
||||
Application.get_env(:pleroma, :config_impl, Pleroma.Config)
|
||||
end
|
||||
|
|
|
@ -20,7 +20,8 @@ defmodule Pleroma.Constants do
|
|||
"deleted_activity_id",
|
||||
"pleroma_internal",
|
||||
"generator",
|
||||
"rules"
|
||||
"rules",
|
||||
"language"
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -36,10 +37,12 @@ defmodule Pleroma.Constants do
|
|||
"updated",
|
||||
"emoji",
|
||||
"content",
|
||||
"contentMap",
|
||||
"summary",
|
||||
"sensitive",
|
||||
"attachment",
|
||||
"generator"
|
||||
"generator",
|
||||
"language"
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -100,7 +103,8 @@ defmodule Pleroma.Constants do
|
|||
"Announce",
|
||||
"Undo",
|
||||
"Flag",
|
||||
"EmojiReact"
|
||||
"EmojiReact",
|
||||
"Listen"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
3
lib/pleroma/date_time.ex
Normal file
3
lib/pleroma/date_time.ex
Normal file
|
@ -0,0 +1,3 @@
|
|||
defmodule Pleroma.DateTime do
|
||||
@callback utc_now() :: NaiveDateTime.t()
|
||||
end
|
6
lib/pleroma/date_time/impl.ex
Normal file
6
lib/pleroma/date_time/impl.ex
Normal file
|
@ -0,0 +1,6 @@
|
|||
defmodule Pleroma.DateTime.Impl do
|
||||
@behaviour Pleroma.DateTime
|
||||
|
||||
@impl true
|
||||
def utc_now, do: NaiveDateTime.utc_now()
|
||||
end
|
|
@ -0,0 +1,49 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do
|
||||
use Ecto.Type
|
||||
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [good_locale_code?: 1]
|
||||
|
||||
def type, do: :map
|
||||
|
||||
def cast(%{} = object) do
|
||||
with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do
|
||||
{:ok, data}
|
||||
else
|
||||
{_, nil} -> {:ok, nil}
|
||||
{:error, _} -> :error
|
||||
end
|
||||
end
|
||||
|
||||
def cast(_), do: :error
|
||||
|
||||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
defp validate_map(%{} = object) do
|
||||
{status, data} =
|
||||
object
|
||||
|> Enum.reduce({:ok, %{}}, fn
|
||||
{lang, value}, {status, acc} when is_binary(lang) and is_binary(value) ->
|
||||
if good_locale_code?(lang) do
|
||||
{status, Map.put(acc, lang, value)}
|
||||
else
|
||||
{:modified, acc}
|
||||
end
|
||||
|
||||
_, {_status, acc} ->
|
||||
{:modified, acc}
|
||||
end)
|
||||
|
||||
if data == %{} do
|
||||
{status, nil}
|
||||
else
|
||||
{status, data}
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,27 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do
|
||||
use Ecto.Type
|
||||
|
||||
def type, do: :string
|
||||
|
||||
def cast(language) when is_binary(language) do
|
||||
if good_locale_code?(language) do
|
||||
{:ok, language}
|
||||
else
|
||||
{:error, :invalid_language}
|
||||
end
|
||||
end
|
||||
|
||||
def cast(_), do: :error
|
||||
|
||||
def dump(data), do: {:ok, data}
|
||||
|
||||
def load(data), do: {:ok, data}
|
||||
|
||||
def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+\z$>
|
||||
|
||||
def good_locale_code?(_code), do: false
|
||||
end
|
|
@ -24,12 +24,13 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Emoji.Pack
|
||||
alias Pleroma.SafeZip
|
||||
alias Pleroma.Utils
|
||||
|
||||
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
||||
def create(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
dir <- Path.join(emoji_path(), name),
|
||||
dir <- path_join_name_safe(emoji_path(), name),
|
||||
:ok <- File.mkdir(dir) do
|
||||
save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")})
|
||||
end
|
||||
|
@ -65,43 +66,21 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
||||
def delete(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
pack_path <- Path.join(emoji_path(), name) do
|
||||
pack_path <- path_join_name_safe(emoji_path(), name) do
|
||||
File.rm_rf(pack_path)
|
||||
end
|
||||
end
|
||||
|
||||
@spec unpack_zip_emojies(list(tuple())) :: list(map())
|
||||
defp unpack_zip_emojies(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn
|
||||
{_, path, s, _, _, _}, acc when elem(s, 2) == :regular ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
|
||||
_, acc ->
|
||||
acc
|
||||
end)
|
||||
end
|
||||
|
||||
@spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) ::
|
||||
{:ok, t()}
|
||||
| {:error, File.posix() | atom()}
|
||||
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
|
||||
with {:ok, zip_files} <- :zip.table(to_charlist(file.path)),
|
||||
[_ | _] = emojies <- unpack_zip_emojies(zip_files),
|
||||
with {:ok, zip_files} <- SafeZip.list_dir_file(file.path),
|
||||
[_ | _] = emojies <- map_zip_emojies(zip_files),
|
||||
{:ok, tmp_dir} <- Utils.tmp_dir("emoji") do
|
||||
try do
|
||||
{:ok, _emoji_files} =
|
||||
:zip.unzip(
|
||||
to_charlist(file.path),
|
||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}]
|
||||
)
|
||||
SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojies, & &1[:path]))
|
||||
|
||||
{_, updated_pack} =
|
||||
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
|
||||
|
@ -292,7 +271,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
@spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()}
|
||||
def load_pack(name) do
|
||||
name = Path.basename(name)
|
||||
pack_file = Path.join([emoji_path(), name, "pack.json"])
|
||||
pack_file = path_join_name_safe(emoji_path(), name) |> Path.join("pack.json")
|
||||
|
||||
with {:ok, _} <- File.stat(pack_file),
|
||||
{:ok, pack_data} <- File.read(pack_file) do
|
||||
|
@ -416,10 +395,9 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp create_archive_and_cache(pack, hash) do
|
||||
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
|
||||
{:ok, {_, result}} =
|
||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
||||
pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end)
|
||||
files = ["pack.json" | pack_file_list]
|
||||
{:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true)
|
||||
|
||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||
|
@ -478,7 +456,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do
|
||||
file_path = Path.join(pack.path, filename)
|
||||
file_path = path_join_safe(pack.path, filename)
|
||||
create_subdirs(file_path)
|
||||
|
||||
with {:ok, _} <- File.copy(upload_path, file_path) do
|
||||
|
@ -497,8 +475,8 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp rename_file(pack, filename, new_filename) do
|
||||
old_path = Path.join(pack.path, filename)
|
||||
new_path = Path.join(pack.path, new_filename)
|
||||
old_path = path_join_safe(pack.path, filename)
|
||||
new_path = path_join_safe(pack.path, new_filename)
|
||||
create_subdirs(new_path)
|
||||
|
||||
with :ok <- File.rename(old_path, new_path) do
|
||||
|
@ -516,7 +494,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp remove_file(pack, shortcode) do
|
||||
with {:ok, filename} <- get_filename(pack, shortcode),
|
||||
emoji <- Path.join(pack.path, filename),
|
||||
emoji <- path_join_safe(pack.path, filename),
|
||||
:ok <- File.rm(emoji) do
|
||||
remove_dir_if_empty(emoji, filename)
|
||||
end
|
||||
|
@ -534,7 +512,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp get_filename(pack, shortcode) do
|
||||
with %{^shortcode => filename} when is_binary(filename) <- pack.files,
|
||||
file_path <- Path.join(pack.path, filename),
|
||||
file_path <- path_join_safe(pack.path, filename),
|
||||
{:ok, _} <- File.stat(file_path) do
|
||||
{:ok, filename}
|
||||
else
|
||||
|
@ -584,11 +562,10 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> path end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
||||
|
||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||
files = if pack_info[:fallback], do: files, else: ["pack.json" | files]
|
||||
SafeZip.unzip_data(archive, local_pack.path, files)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -649,13 +626,43 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp validate_has_all_files(pack, zip) do
|
||||
with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do
|
||||
# Check if all files from the pack.json are in the archive
|
||||
pack.files
|
||||
|> Enum.all?(fn {_, from_manifest} ->
|
||||
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
||||
# Check if all files from the pack.json are in the archive
|
||||
eset =
|
||||
Enum.reduce(pack.files, MapSet.new(), fn
|
||||
{_, file}, s -> MapSet.put(s, to_charlist(file))
|
||||
end)
|
||||
|> if(do: :ok, else: {:error, :incomplete})
|
||||
|
||||
if SafeZip.contains_all_data?(zip, eset),
|
||||
do: :ok,
|
||||
else: {:error, :incomplete}
|
||||
end
|
||||
|
||||
defp path_join_name_safe(dir, name) do
|
||||
if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do
|
||||
raise "Invalid or malicious pack name: #{name}"
|
||||
else
|
||||
Path.join(dir, name)
|
||||
end
|
||||
end
|
||||
|
||||
defp path_join_safe(dir, path) do
|
||||
{:ok, safe_path} = Path.safe_relative(path)
|
||||
Path.join(dir, safe_path)
|
||||
end
|
||||
|
||||
defp map_zip_emojies(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn path, acc ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
# note: this only checks the shortcode, if an emoji already exists on the same path, but
|
||||
# with a different shortcode, the existing one will be degraded to an alias of the new
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -65,24 +65,12 @@ defmodule Pleroma.Frontend do
|
|||
end
|
||||
|
||||
def unzip(zip, dest) do
|
||||
with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
|
||||
Enum.each(unzipped, fn {filename, data} ->
|
||||
path = filename
|
||||
|
||||
new_file_path = Path.join(dest, path)
|
||||
|
||||
path
|
||||
|> Path.dirname()
|
||||
|> then(&Path.join(dest, &1))
|
||||
|> File.mkdir_p!()
|
||||
|
||||
if not File.dir?(new_file_path) do
|
||||
File.write!(new_file_path, data)
|
||||
end
|
||||
end)
|
||||
case Pleroma.SafeZip.unzip_data(zip, dest) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ defmodule Pleroma.Hashtag do
|
|||
alias Pleroma.Hashtag
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User.HashtagFollow
|
||||
|
||||
schema "hashtags" do
|
||||
field(:name, :string)
|
||||
|
@ -27,6 +28,14 @@ defmodule Pleroma.Hashtag do
|
|||
|> String.trim()
|
||||
end
|
||||
|
||||
def get_by_id(id) do
|
||||
Repo.get(Hashtag, id)
|
||||
end
|
||||
|
||||
def get_by_name(name) do
|
||||
Repo.get_by(Hashtag, name: normalize_name(name))
|
||||
end
|
||||
|
||||
def get_or_create_by_name(name) do
|
||||
changeset = changeset(%Hashtag{}, %{name: name})
|
||||
|
||||
|
@ -103,4 +112,22 @@ defmodule Pleroma.Hashtag do
|
|||
{:ok, deleted_count}
|
||||
end
|
||||
end
|
||||
|
||||
def get_followers(%Hashtag{id: hashtag_id}) do
|
||||
from(hf in HashtagFollow)
|
||||
|> where([hf], hf.hashtag_id == ^hashtag_id)
|
||||
|> join(:inner, [hf], u in assoc(hf, :user))
|
||||
|> select([hf, u], u.id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_recipients_for_activity(%Pleroma.Activity{object: %{hashtags: tags}})
|
||||
when is_list(tags) do
|
||||
tags
|
||||
|> Enum.map(&get_followers/1)
|
||||
|> List.flatten()
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
def get_recipients_for_activity(_activity), do: []
|
||||
end
|
||||
|
|
15
lib/pleroma/mogrify_behaviour.ex
Normal file
15
lib/pleroma/mogrify_behaviour.ex
Normal file
|
@ -0,0 +1,15 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.MogrifyBehaviour do
|
||||
@moduledoc """
|
||||
Behaviour for Mogrify operations.
|
||||
This module defines the interface for Mogrify operations that can be mocked in tests.
|
||||
"""
|
||||
|
||||
@callback open(binary()) :: map()
|
||||
@callback custom(map(), binary()) :: map()
|
||||
@callback custom(map(), binary(), binary()) :: map()
|
||||
@callback save(map(), keyword()) :: map()
|
||||
end
|
30
lib/pleroma/mogrify_wrapper.ex
Normal file
30
lib/pleroma/mogrify_wrapper.ex
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.MogrifyWrapper do
|
||||
@moduledoc """
|
||||
Default implementation of MogrifyBehaviour that delegates to Mogrify.
|
||||
"""
|
||||
@behaviour Pleroma.MogrifyBehaviour
|
||||
|
||||
@impl true
|
||||
def open(file) do
|
||||
Mogrify.open(file)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def custom(image, action) do
|
||||
Mogrify.custom(image, action)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def custom(image, action, options) do
|
||||
Mogrify.custom(image, action, options)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def save(image, opts) do
|
||||
Mogrify.save(image, opts)
|
||||
end
|
||||
end
|
|
@ -47,6 +47,19 @@ defmodule Pleroma.Object.Containment do
|
|||
defp compare_uris(%URI{host: host} = _id_uri, %URI{host: host} = _other_uri), do: :ok
|
||||
defp compare_uris(_id_uri, _other_uri), do: :error
|
||||
|
||||
@doc """
|
||||
Checks whether an URL to fetch from is from the local server.
|
||||
|
||||
We never want to fetch from ourselves; if it's not in the database
|
||||
it can't be authentic and must be a counterfeit.
|
||||
"""
|
||||
def contain_local_fetch(id) do
|
||||
case compare_uris(URI.parse(id), Pleroma.Web.Endpoint.struct_url()) do
|
||||
:ok -> :error
|
||||
_ -> :ok
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks that an imported AP object's actor matches the host it came from.
|
||||
"""
|
||||
|
|
|
@ -19,6 +19,8 @@ defmodule Pleroma.Object.Fetcher do
|
|||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
@mix_env Mix.env()
|
||||
|
||||
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
|
||||
defp reinject_object(%Object{data: %{}} = object, new_data) do
|
||||
Logger.debug("Reinjecting object #{new_data["id"]}")
|
||||
|
@ -146,6 +148,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|
||||
with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")},
|
||||
{_, true} <- {:mrf, MRF.id_filter(id)},
|
||||
{_, :ok} <- {:local_fetch, Containment.contain_local_fetch(id)},
|
||||
{:ok, body} <- get_object(id),
|
||||
{:ok, data} <- safe_json_decode(body),
|
||||
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||
|
@ -158,6 +161,9 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:scheme, _} ->
|
||||
{:error, "Unsupported URI scheme"}
|
||||
|
||||
{:local_fetch, _} ->
|
||||
{:error, "Trying to fetch local resource"}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
|
||||
|
@ -172,6 +178,19 @@ defmodule Pleroma.Object.Fetcher do
|
|||
def fetch_and_contain_remote_object_from_id(_id),
|
||||
do: {:error, "id must be a string"}
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url)
|
||||
|
||||
# Handle the common case in tests where responses don't include URLs
|
||||
if @mix_env == :test do
|
||||
defp check_crossdomain_redirect(nil, _) do
|
||||
{:cross_domain_redirect, false}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url) do
|
||||
{:cross_domain_redirect, final_host != URI.parse(original_url).host}
|
||||
end
|
||||
|
||||
defp get_object(id) do
|
||||
date = Pleroma.Signature.signed_date()
|
||||
|
||||
|
@ -181,19 +200,29 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|> sign_fetch(id, date)
|
||||
|
||||
case HTTP.get(id, headers) do
|
||||
{:ok, %{body: body, status: code, headers: headers, url: final_url}}
|
||||
when code in 200..299 ->
|
||||
remote_host = if final_url, do: URI.parse(final_url).host, else: nil
|
||||
|
||||
with {:cross_domain_redirect, false} <- check_crossdomain_redirect(remote_host, id),
|
||||
{_, content_type} <- List.keyfind(headers, "content-type", 0),
|
||||
{:ok, _media_type} <- verify_content_type(content_type) do
|
||||
{:ok, body}
|
||||
else
|
||||
{:cross_domain_redirect, true} ->
|
||||
{:error, {:cross_domain_redirect, true}}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
|
||||
# Handle the case where URL is not in the response (older HTTP library versions)
|
||||
{:ok, %{body: body, status: code, headers: headers}} when code in 200..299 ->
|
||||
case List.keyfind(headers, "content-type", 0) do
|
||||
{_, content_type} ->
|
||||
case Plug.Conn.Utils.media_type(content_type) do
|
||||
{:ok, "application", "activity+json", _} ->
|
||||
{:ok, body}
|
||||
|
||||
{:ok, "application", "ld+json",
|
||||
%{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
|
||||
{:ok, body}
|
||||
|
||||
_ ->
|
||||
{:error, {:content_type, content_type}}
|
||||
case verify_content_type(content_type) do
|
||||
{:ok, _} -> {:ok, body}
|
||||
error -> error
|
||||
end
|
||||
|
||||
_ ->
|
||||
|
@ -216,4 +245,17 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|
||||
defp safe_json_decode(nil), do: {:ok, nil}
|
||||
defp safe_json_decode(json), do: Jason.decode(json)
|
||||
|
||||
defp verify_content_type(content_type) do
|
||||
case Plug.Conn.Utils.media_type(content_type) do
|
||||
{:ok, "application", "activity+json", _} ->
|
||||
{:ok, :activity_json}
|
||||
|
||||
{:ok, "application", "ld+json", %{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
|
||||
{:ok, :ld_json}
|
||||
|
||||
_ ->
|
||||
{:error, {:content_type, content_type}}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -89,9 +89,9 @@ defmodule Pleroma.Pagination do
|
|||
|
||||
defp cast_params(params) do
|
||||
param_types = %{
|
||||
min_id: :string,
|
||||
since_id: :string,
|
||||
max_id: :string,
|
||||
min_id: params[:id_type] || :string,
|
||||
since_id: params[:id_type] || :string,
|
||||
max_id: params[:id_type] || :string,
|
||||
offset: :integer,
|
||||
limit: :integer,
|
||||
skip_extra_order: :boolean,
|
||||
|
|
|
@ -17,6 +17,8 @@ defmodule Pleroma.ReverseProxy do
|
|||
@failed_request_ttl :timer.seconds(60)
|
||||
@methods ~w(GET HEAD)
|
||||
|
||||
@allowed_mime_types Pleroma.Config.get([Pleroma.Upload, :allowed_mime_types], [])
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def max_read_duration_default, do: @max_read_duration
|
||||
|
@ -301,10 +303,26 @@ defmodule Pleroma.ReverseProxy do
|
|||
headers
|
||||
|> Enum.filter(fn {k, _} -> k in @keep_resp_headers end)
|
||||
|> build_resp_cache_headers(opts)
|
||||
|> sanitise_content_type()
|
||||
|> build_resp_content_disposition_header(opts)
|
||||
|> Keyword.merge(Keyword.get(opts, :resp_headers, []))
|
||||
end
|
||||
|
||||
defp sanitise_content_type(headers) do
|
||||
original_ct = get_content_type(headers)
|
||||
|
||||
safe_ct =
|
||||
Pleroma.Web.Plugs.Utils.get_safe_mime_type(
|
||||
%{allowed_mime_types: @allowed_mime_types},
|
||||
original_ct
|
||||
)
|
||||
|
||||
[
|
||||
{"content-type", safe_ct}
|
||||
| Enum.filter(headers, fn {k, _v} -> k != "content-type" end)
|
||||
]
|
||||
end
|
||||
|
||||
defp build_resp_cache_headers(headers, _opts) do
|
||||
has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end)
|
||||
|
||||
|
|
216
lib/pleroma/safe_zip.ex
Normal file
216
lib/pleroma/safe_zip.ex
Normal file
|
@ -0,0 +1,216 @@
|
|||
# Akkoma: Magically expressive social media
|
||||
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.SafeZip do
|
||||
@moduledoc """
|
||||
Wraps the subset of Erlang's zip module we’d like to use
|
||||
but enforces path-traversal safety everywhere and other checks.
|
||||
|
||||
For convenience almost all functions accept both elixir strings and charlists,
|
||||
but output elixir strings themselves. However, this means the input parameter type
|
||||
can no longer be used to distinguish archive file paths from archive binary data in memory,
|
||||
thus where needed both a _data and _file variant are provided.
|
||||
"""
|
||||
|
||||
@type text() :: String.t() | [char()]
|
||||
|
||||
defp safe_path?(path) do
|
||||
# Path accepts elixir’s chardata()
|
||||
case Path.safe_relative(path) do
|
||||
{:ok, _} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp safe_type?(file_type) do
|
||||
if file_type in [:regular, :directory] do
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, nil), do: nil
|
||||
|
||||
defp maybe_add_file(:regular, path_charlist, file_list),
|
||||
do: [to_string(path_charlist) | file_list]
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, file_list), do: file_list
|
||||
|
||||
@spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_maybe_list_files(archive, opts, list) do
|
||||
acc = if list, do: [], else: nil
|
||||
|
||||
with {:ok, table} <- :zip.table(archive, opts) do
|
||||
Enum.reduce_while(table, {:ok, acc}, fn
|
||||
# ZIP comment
|
||||
{:zip_comment, _}, acc ->
|
||||
{:cont, acc}
|
||||
|
||||
# File entry
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} ->
|
||||
with {_, type} <- {:get_type, elem(info, 2)},
|
||||
{_, true} <- {:type, safe_type?(type)},
|
||||
{_, true} <- {:safe_path, safe_path?(path)} do
|
||||
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||
else
|
||||
{:get_type, e} ->
|
||||
{:halt,
|
||||
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
||||
|
||||
{:type, _} ->
|
||||
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||
|
||||
{:safe_path, _} ->
|
||||
{:halt, {:error, "Unsafe path in ZIP: #{path}"}}
|
||||
end
|
||||
|
||||
# new OTP version?
|
||||
_, _acc ->
|
||||
{:halt, {:error, "Unknown ZIP record type"}}
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_archive_and_list_files(binary() | [char()], [term()]) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_list_files(archive, opts \\ []) do
|
||||
check_safe_archive_and_maybe_list_files(archive, opts, true)
|
||||
end
|
||||
|
||||
@spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()}
|
||||
defp check_safe_archive(archive, opts \\ []) do
|
||||
case check_safe_archive_and_maybe_list_files(archive, opts, false) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_file_list([text()], text()) :: :ok | {:error, term()}
|
||||
defp check_safe_file_list([], _), do: :ok
|
||||
|
||||
defp check_safe_file_list([path | tail], cwd) do
|
||||
with {_, true} <- {:path, safe_path?(path)},
|
||||
{_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))},
|
||||
{_, true} <- {:type, safe_type?(fstat.type)} do
|
||||
check_safe_file_list(tail, cwd)
|
||||
else
|
||||
{:path, _} ->
|
||||
{:error, "Unsafe path escaping cwd: #{path}"}
|
||||
|
||||
{:stat, e} ->
|
||||
{:error, "Unable to check file type of #{path}: #{inspect(e)}"}
|
||||
|
||||
{:type, _} ->
|
||||
{:error, "Unsafe type at #{path}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"}
|
||||
|
||||
@doc """
|
||||
Checks whether the archive data contais file entries for all paths from fset
|
||||
|
||||
Note this really only accepts entries corresponding to regular _files_,
|
||||
if a path is contained as for example an directory, this does not count as a match.
|
||||
"""
|
||||
@spec contains_all_data?(binary(), MapSet.t()) :: true | false
|
||||
def contains_all_data?(archive_data, fset) do
|
||||
with {:ok, table} <- :zip.table(archive_data) do
|
||||
remaining =
|
||||
Enum.reduce(table, fset, fn
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, fset ->
|
||||
if elem(info, 2) == :regular do
|
||||
MapSet.delete(fset, path)
|
||||
else
|
||||
fset
|
||||
end
|
||||
|
||||
_, _ ->
|
||||
fset
|
||||
end)
|
||||
|> MapSet.size()
|
||||
|
||||
if remaining == 0, do: true, else: false
|
||||
else
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
List all file entries in ZIP, or error if invalid or unsafe.
|
||||
|
||||
Note this really only lists regular files, no directories, ZIP comments or other types!
|
||||
"""
|
||||
@spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()}
|
||||
def list_dir_file(archive) do
|
||||
path = to_charlist(archive)
|
||||
check_safe_archive_and_list_files(path)
|
||||
end
|
||||
|
||||
defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}}
|
||||
defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)}
|
||||
defp stringify_zip(ret), do: ret
|
||||
|
||||
@spec zip(text(), text(), [text()], boolean()) ::
|
||||
{:ok, file_name :: String.t()}
|
||||
| {:ok, {file_name :: String.t(), file_data :: binary()}}
|
||||
| {:error, reason :: term()}
|
||||
def zip(name, file_list, cwd, memory \\ false) do
|
||||
opts = [{:cwd, to_charlist(cwd)}]
|
||||
opts = if memory, do: [:memory | opts], else: opts
|
||||
|
||||
with :ok <- check_safe_file_list(file_list, cwd) do
|
||||
file_list = for f <- file_list, do: to_charlist(f)
|
||||
name = to_charlist(name)
|
||||
stringify_zip(:zip.zip(name, file_list, opts))
|
||||
end
|
||||
end
|
||||
|
||||
@spec unzip_file(text(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_file(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(to_charlist(archive), to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
@spec unzip_data(binary(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_data(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(archive, to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}),
|
||||
do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)}
|
||||
|
||||
defp stringify_unzip({:ok, [_fname | _] = filelist}),
|
||||
do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)}
|
||||
|
||||
defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}}
|
||||
defp stringify_unzip(ret), do: ret
|
||||
|
||||
@spec do_unzip(binary() | [char()], text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
defp do_unzip(archive, target_dir, file_list) do
|
||||
opts =
|
||||
if file_list != nil do
|
||||
[
|
||||
file_list: for(f <- file_list, do: to_charlist(f)),
|
||||
cwd: target_dir
|
||||
]
|
||||
else
|
||||
[cwd: target_dir]
|
||||
end
|
||||
|
||||
with :ok <- check_safe_archive(archive) do
|
||||
stringify_unzip(:zip.unzip(archive, opts))
|
||||
end
|
||||
end
|
||||
end
|
|
@ -90,9 +90,13 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
|||
{:ok, rgb} =
|
||||
if Image.has_alpha?(resized_image) do
|
||||
# remove alpha channel
|
||||
resized_image
|
||||
|> Operation.extract_band!(0, n: 3)
|
||||
|> Image.write_to_binary()
|
||||
case Operation.extract_band(resized_image, 0, n: 3) do
|
||||
{:ok, data} ->
|
||||
Image.write_to_binary(data)
|
||||
|
||||
_ ->
|
||||
Image.write_to_binary(resized_image)
|
||||
end
|
||||
else
|
||||
Image.write_to_binary(resized_image)
|
||||
end
|
||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
|||
"""
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
|
||||
alias Pleroma.Config
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
alias Pleroma.Upload
|
||||
|
||||
def filter(%Upload{name: name} = upload) do
|
||||
|
@ -23,7 +23,7 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
|||
|
||||
@spec predefined_name(String.t()) :: String.t() | nil
|
||||
defp predefined_name(extension) do
|
||||
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
||||
with name when not is_nil(name) <- @config_impl.get([__MODULE__, :text]),
|
||||
do: String.replace(name, "{extension}", extension)
|
||||
end
|
||||
|
||||
|
|
|
@ -8,9 +8,16 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
|||
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
||||
@type conversions :: conversion() | [conversion()]
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
@mogrify_impl Application.compile_env(
|
||||
:pleroma,
|
||||
[__MODULE__, :mogrify_impl],
|
||||
Pleroma.MogrifyWrapper
|
||||
)
|
||||
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
||||
do_filter(file, @config_impl.get!([__MODULE__, :args]))
|
||||
{:ok, :filtered}
|
||||
rescue
|
||||
e in ErlangError ->
|
||||
|
@ -22,9 +29,9 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
|||
|
||||
def do_filter(file, filters) do
|
||||
file
|
||||
|> Mogrify.open()
|
||||
|> @mogrify_impl.open()
|
||||
|> mogrify_filter(filters)
|
||||
|> Mogrify.save(in_place: true)
|
||||
|> @mogrify_impl.save(in_place: true)
|
||||
end
|
||||
|
||||
defp mogrify_filter(mogrify, nil), do: mogrify
|
||||
|
@ -38,10 +45,10 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
|||
defp mogrify_filter(mogrify, []), do: mogrify
|
||||
|
||||
defp mogrify_filter(mogrify, {action, options}) do
|
||||
Mogrify.custom(mogrify, action, options)
|
||||
@mogrify_impl.custom(mogrify, action, options)
|
||||
end
|
||||
|
||||
defp mogrify_filter(mogrify, action) when is_binary(action) do
|
||||
Mogrify.custom(mogrify, action)
|
||||
@mogrify_impl.custom(mogrify, action)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -19,6 +19,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Emoji
|
||||
alias Pleroma.FollowingRelationship
|
||||
alias Pleroma.Formatter
|
||||
alias Pleroma.Hashtag
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.Keys
|
||||
alias Pleroma.MFA
|
||||
|
@ -27,6 +28,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Registration
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.HashtagFollow
|
||||
alias Pleroma.UserRelationship
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Builder
|
||||
|
@ -174,6 +176,12 @@ defmodule Pleroma.User do
|
|||
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
|
||||
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
|
||||
|
||||
many_to_many(:followed_hashtags, Hashtag,
|
||||
on_replace: :delete,
|
||||
on_delete: :delete_all,
|
||||
join_through: HashtagFollow
|
||||
)
|
||||
|
||||
for {relationship_type,
|
||||
[
|
||||
{outgoing_relation, outgoing_relation_target},
|
||||
|
@ -2861,4 +2869,54 @@ defmodule Pleroma.User do
|
|||
birthday_month: month
|
||||
})
|
||||
end
|
||||
|
||||
defp maybe_load_followed_hashtags(%User{followed_hashtags: follows} = user)
|
||||
when is_list(follows),
|
||||
do: user
|
||||
|
||||
defp maybe_load_followed_hashtags(%User{} = user) do
|
||||
followed_hashtags = HashtagFollow.get_by_user(user)
|
||||
%{user | followed_hashtags: followed_hashtags}
|
||||
end
|
||||
|
||||
def followed_hashtags(%User{followed_hashtags: follows})
|
||||
when is_list(follows),
|
||||
do: follows
|
||||
|
||||
def followed_hashtags(%User{} = user) do
|
||||
{:ok, user} =
|
||||
user
|
||||
|> maybe_load_followed_hashtags()
|
||||
|> set_cache()
|
||||
|
||||
user.followed_hashtags
|
||||
end
|
||||
|
||||
def follow_hashtag(%User{} = user, %Hashtag{} = hashtag) do
|
||||
Logger.debug("Follow hashtag #{hashtag.name} for user #{user.nickname}")
|
||||
user = maybe_load_followed_hashtags(user)
|
||||
|
||||
with {:ok, _} <- HashtagFollow.new(user, hashtag),
|
||||
follows <- HashtagFollow.get_by_user(user),
|
||||
%User{} = user <- user |> Map.put(:followed_hashtags, follows) do
|
||||
user
|
||||
|> set_cache()
|
||||
end
|
||||
end
|
||||
|
||||
def unfollow_hashtag(%User{} = user, %Hashtag{} = hashtag) do
|
||||
Logger.debug("Unfollow hashtag #{hashtag.name} for user #{user.nickname}")
|
||||
user = maybe_load_followed_hashtags(user)
|
||||
|
||||
with {:ok, _} <- HashtagFollow.delete(user, hashtag),
|
||||
follows <- HashtagFollow.get_by_user(user),
|
||||
%User{} = user <- user |> Map.put(:followed_hashtags, follows) do
|
||||
user
|
||||
|> set_cache()
|
||||
end
|
||||
end
|
||||
|
||||
def following_hashtag?(%User{} = user, %Hashtag{} = hashtag) do
|
||||
not is_nil(HashtagFollow.get(user, hashtag))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.User.Backup do
|
|||
alias Pleroma.Bookmark
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.SafeZip
|
||||
alias Pleroma.Uploaders.Uploader
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
|
@ -179,12 +180,12 @@ defmodule Pleroma.User.Backup do
|
|||
end
|
||||
|
||||
@files [
|
||||
~c"actor.json",
|
||||
~c"outbox.json",
|
||||
~c"likes.json",
|
||||
~c"bookmarks.json",
|
||||
~c"followers.json",
|
||||
~c"following.json"
|
||||
"actor.json",
|
||||
"outbox.json",
|
||||
"likes.json",
|
||||
"bookmarks.json",
|
||||
"followers.json",
|
||||
"following.json"
|
||||
]
|
||||
|
||||
@spec run(t()) :: {:ok, t()} | {:error, :failed}
|
||||
|
@ -200,7 +201,7 @@ defmodule Pleroma.User.Backup do
|
|||
{_, :ok} <- {:followers, followers(backup.tempdir, backup.user)},
|
||||
{_, :ok} <- {:following, following(backup.tempdir, backup.user)},
|
||||
{_, {:ok, _zip_path}} <-
|
||||
{:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))},
|
||||
{:zip, SafeZip.zip(tempfile, @files, backup.tempdir)},
|
||||
{_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)},
|
||||
{:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do
|
||||
{:ok, updated_backup}
|
||||
|
@ -246,7 +247,13 @@ defmodule Pleroma.User.Backup do
|
|||
defp actor(dir, user) do
|
||||
with {:ok, json} <-
|
||||
UserView.render("user.json", %{user: user})
|
||||
|> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"})
|
||||
|> Map.merge(%{
|
||||
"bookmarks" => "bookmarks.json",
|
||||
"likes" => "likes.json",
|
||||
"outbox" => "outbox.json",
|
||||
"followers" => "followers.json",
|
||||
"following" => "following.json"
|
||||
})
|
||||
|> Jason.encode() do
|
||||
File.write(Path.join(dir, "actor.json"), json)
|
||||
end
|
||||
|
|
55
lib/pleroma/user/hashtag_follow.ex
Normal file
55
lib/pleroma/user/hashtag_follow.ex
Normal file
|
@ -0,0 +1,55 @@
|
|||
defmodule Pleroma.User.HashtagFollow do
|
||||
use Ecto.Schema
|
||||
import Ecto.Query
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Pleroma.Hashtag
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
schema "user_follows_hashtag" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:hashtag, Hashtag)
|
||||
end
|
||||
|
||||
def changeset(%__MODULE__{} = user_hashtag_follow, attrs) do
|
||||
user_hashtag_follow
|
||||
|> cast(attrs, [:user_id, :hashtag_id])
|
||||
|> unique_constraint(:hashtag_id,
|
||||
name: :user_hashtag_follows_user_id_hashtag_id_index,
|
||||
message: "already following"
|
||||
)
|
||||
|> validate_required([:user_id, :hashtag_id])
|
||||
end
|
||||
|
||||
def new(%User{} = user, %Hashtag{} = hashtag) do
|
||||
%__MODULE__{}
|
||||
|> changeset(%{user_id: user.id, hashtag_id: hashtag.id})
|
||||
|> Repo.insert(on_conflict: :nothing)
|
||||
end
|
||||
|
||||
def delete(%User{} = user, %Hashtag{} = hashtag) do
|
||||
with %__MODULE__{} = user_hashtag_follow <- get(user, hashtag) do
|
||||
Repo.delete(user_hashtag_follow)
|
||||
else
|
||||
_ -> {:ok, nil}
|
||||
end
|
||||
end
|
||||
|
||||
def get(%User{} = user, %Hashtag{} = hashtag) do
|
||||
from(hf in __MODULE__)
|
||||
|> where([hf], hf.user_id == ^user.id and hf.hashtag_id == ^hashtag.id)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
def get_by_user(%User{} = user) do
|
||||
user
|
||||
|> followed_hashtags_query()
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def followed_hashtags_query(%User{} = user) do
|
||||
Ecto.assoc(user, :followed_hashtags)
|
||||
|> Ecto.Query.order_by([h], desc: h.id)
|
||||
end
|
||||
end
|
|
@ -55,9 +55,13 @@ defmodule Pleroma.UserRelationship do
|
|||
|
||||
def user_relationship_mappings, do: Pleroma.UserRelationship.Type.__enum_map__()
|
||||
|
||||
def datetime_impl do
|
||||
Application.get_env(:pleroma, :datetime_impl, Pleroma.DateTime.Impl)
|
||||
end
|
||||
|
||||
def changeset(%UserRelationship{} = user_relationship, params \\ %{}) do
|
||||
user_relationship
|
||||
|> cast(params, [:relationship_type, :source_id, :target_id, :expires_at])
|
||||
|> cast(params, [:relationship_type, :source_id, :target_id, :expires_at, :inserted_at])
|
||||
|> validate_required([:relationship_type, :source_id, :target_id])
|
||||
|> unique_constraint(:relationship_type,
|
||||
name: :user_relationships_source_id_relationship_type_target_id_index
|
||||
|
@ -65,6 +69,7 @@ defmodule Pleroma.UserRelationship do
|
|||
|> validate_not_self_relationship()
|
||||
end
|
||||
|
||||
@spec exists?(any(), Pleroma.User.t(), Pleroma.User.t()) :: boolean()
|
||||
def exists?(relationship_type, %User{} = source, %User{} = target) do
|
||||
UserRelationship
|
||||
|> where(relationship_type: ^relationship_type, source_id: ^source.id, target_id: ^target.id)
|
||||
|
@ -90,7 +95,8 @@ defmodule Pleroma.UserRelationship do
|
|||
relationship_type: relationship_type,
|
||||
source_id: source.id,
|
||||
target_id: target.id,
|
||||
expires_at: expires_at
|
||||
expires_at: expires_at,
|
||||
inserted_at: datetime_impl().utc_now()
|
||||
})
|
||||
|> Repo.insert(
|
||||
on_conflict: {:replace_all_except, [:id, :inserted_at]},
|
||||
|
|
|
@ -924,6 +924,31 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
)
|
||||
end
|
||||
|
||||
# Essentially, either look for activities addressed to `recipients`, _OR_ ones
|
||||
# that reference a hashtag that the user follows
|
||||
# Firstly, two fallbacks in case there's no hashtag constraint, or the user doesn't
|
||||
# follow any
|
||||
defp restrict_recipients_or_hashtags(query, recipients, user, nil) do
|
||||
restrict_recipients(query, recipients, user)
|
||||
end
|
||||
|
||||
defp restrict_recipients_or_hashtags(query, recipients, user, []) do
|
||||
restrict_recipients(query, recipients, user)
|
||||
end
|
||||
|
||||
defp restrict_recipients_or_hashtags(query, recipients, _user, hashtag_ids) do
|
||||
from([activity, object] in query)
|
||||
|> join(:left, [activity, object], hto in "hashtags_objects",
|
||||
on: hto.object_id == object.id,
|
||||
as: :hto
|
||||
)
|
||||
|> where(
|
||||
[activity, object, hto: hto],
|
||||
(hto.hashtag_id in ^hashtag_ids and ^Constants.as_public() in activity.recipients) or
|
||||
fragment("? && ?", ^recipients, activity.recipients)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_local(query, %{local_only: true}) do
|
||||
from(activity in query, where: activity.local == true)
|
||||
end
|
||||
|
@ -1414,7 +1439,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> maybe_preload_report_notes(opts)
|
||||
|> maybe_set_thread_muted_field(opts)
|
||||
|> maybe_order(opts)
|
||||
|> restrict_recipients(recipients, opts[:user])
|
||||
|> restrict_recipients_or_hashtags(recipients, opts[:user], opts[:followed_hashtags])
|
||||
|> restrict_replies(opts)
|
||||
|> restrict_since(opts)
|
||||
|> restrict_local(opts)
|
||||
|
|
|
@ -482,7 +482,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> put_status(:forbidden)
|
||||
|> json(message)
|
||||
|
||||
{:error, message} ->
|
||||
{:error, message} when is_binary(message) ->
|
||||
conn
|
||||
|> put_status(:bad_request)
|
||||
|> json(message)
|
||||
|
|
|
@ -1,146 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do
|
||||
@moduledoc """
|
||||
Dynamic activity filtering based on an RBL database
|
||||
|
||||
This MRF makes queries to a custom DNS server which will
|
||||
respond with values indicating the classification of the domain
|
||||
the activity originated from. This method has been widely used
|
||||
in the email anti-spam industry for very fast reputation checks.
|
||||
|
||||
e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK
|
||||
Other values such as 127.0.0.2 may be used for specific classifications.
|
||||
|
||||
Information for why the host is blocked can be stored in a corresponding TXT record.
|
||||
|
||||
This method is fail-open so if the queries fail the activites are accepted.
|
||||
|
||||
An example of software meant for this purpsoe is rbldnsd which can be found
|
||||
at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at
|
||||
https://git.pleroma.social/feld/rbldnsd
|
||||
|
||||
It is highly recommended that you run your own copy of rbldnsd and use an
|
||||
external mechanism to sync/share the contents of the zone file. This is
|
||||
important to keep the latency on the queries as low as possible and prevent
|
||||
your DNS server from being attacked so it fails and content is permitted.
|
||||
"""
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
alias Pleroma.Config
|
||||
|
||||
require Logger
|
||||
|
||||
@query_retries 1
|
||||
@query_timeout 500
|
||||
|
||||
@impl true
|
||||
def filter(%{"actor" => actor} = activity) do
|
||||
actor_info = URI.parse(actor)
|
||||
|
||||
with {:ok, activity} <- check_rbl(actor_info, activity) do
|
||||
{:ok, activity}
|
||||
else
|
||||
_ -> {:reject, "[DNSRBLPolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe do
|
||||
mrf_dnsrbl =
|
||||
Config.get(:mrf_dnsrbl)
|
||||
|> Enum.into(%{})
|
||||
|
||||
{:ok, %{mrf_dnsrbl: mrf_dnsrbl}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def config_description do
|
||||
%{
|
||||
key: :mrf_dnsrbl,
|
||||
related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy",
|
||||
label: "MRF DNSRBL",
|
||||
description: "DNS RealTime Blackhole Policy",
|
||||
children: [
|
||||
%{
|
||||
key: :nameserver,
|
||||
type: {:string},
|
||||
description: "DNSRBL Nameserver to Query (IP or hostame)",
|
||||
suggestions: ["127.0.0.1"]
|
||||
},
|
||||
%{
|
||||
key: :port,
|
||||
type: {:string},
|
||||
description: "Nameserver port",
|
||||
suggestions: ["53"]
|
||||
},
|
||||
%{
|
||||
key: :zone,
|
||||
type: {:string},
|
||||
description: "Root zone for querying",
|
||||
suggestions: ["bl.pleroma.com"]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
|
||||
defp check_rbl(%{host: actor_host}, activity) do
|
||||
with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()),
|
||||
zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do
|
||||
query =
|
||||
Enum.join([actor_host, zone], ".")
|
||||
|> String.to_charlist()
|
||||
|
||||
rbl_response = rblquery(query)
|
||||
|
||||
if Enum.empty?(rbl_response) do
|
||||
{:ok, activity}
|
||||
else
|
||||
Task.start(fn ->
|
||||
reason =
|
||||
case rblquery(query, :txt) do
|
||||
[[result]] -> result
|
||||
_ -> "undefined"
|
||||
end
|
||||
|
||||
Logger.warning(
|
||||
"DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}"
|
||||
)
|
||||
end)
|
||||
|
||||
:error
|
||||
end
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_rblhost_ip(rblhost) do
|
||||
case rblhost |> String.to_charlist() |> :inet_parse.address() do
|
||||
{:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address()
|
||||
_ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()}
|
||||
end
|
||||
end
|
||||
|
||||
defp rblquery(query, type \\ :a) do
|
||||
config = Config.get([:mrf_dnsrbl])
|
||||
|
||||
case get_rblhost_ip(config[:nameserver]) do
|
||||
{:ok, rblnsip} ->
|
||||
:inet_res.lookup(query, :in, type,
|
||||
nameservers: [{rblnsip, config[:port]}],
|
||||
timeout: @query_timeout,
|
||||
retry: @query_retries
|
||||
)
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,53 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.FODirectReply do
|
||||
@moduledoc """
|
||||
FODirectReply alters the scope of replies to activities which are Followers Only to be Direct. The purpose of this policy is to prevent broken threads for followers of the reply author because their response was to a user that they are not also following.
|
||||
"""
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
"type" => "Create",
|
||||
"to" => to,
|
||||
"object" => %{
|
||||
"actor" => actor,
|
||||
"type" => "Note",
|
||||
"inReplyTo" => in_reply_to
|
||||
}
|
||||
} = activity
|
||||
) do
|
||||
with true <- is_binary(in_reply_to),
|
||||
%User{follower_address: followers_collection, local: true} <- User.get_by_ap_id(actor),
|
||||
%Object{} = in_reply_to_object <- Object.get_by_ap_id(in_reply_to),
|
||||
"private" <- Visibility.get_visibility(in_reply_to_object) do
|
||||
direct_to = to -- [followers_collection]
|
||||
|
||||
updated_activity =
|
||||
activity
|
||||
|> Map.put("cc", [])
|
||||
|> Map.put("to", direct_to)
|
||||
|> Map.put("directMessage", true)
|
||||
|> put_in(["object", "cc"], [])
|
||||
|> put_in(["object", "to"], direct_to)
|
||||
|
||||
{:ok, updated_activity}
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
end
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue