diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 39947c75e..675d0e067 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,7 +2,7 @@ image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.14.5-otp-25 variables: &global_variables # Only used for the release - ELIXIR_VER: 1.14.5 + ELIXIR_VER: 1.17.3 POSTGRES_DB: pleroma_test POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres @@ -272,7 +272,8 @@ stop_review_app: amd64: stage: release - image: elixir:$ELIXIR_VER + image: + name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011 only: &release-only - stable@pleroma/pleroma - develop@pleroma/pleroma @@ -297,8 +298,9 @@ amd64: variables: &release-variables MIX_ENV: prod VIX_COMPILATION_MODE: PLATFORM_PROVIDED_LIBVIPS + DEBIAN_FRONTEND: noninteractive before_script: &before-release - - apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev + - apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev git - echo "import Config" > config/prod.secret.exs - mix local.hex --force - mix local.rebar --force @@ -313,7 +315,8 @@ amd64-musl: stage: release artifacts: *release-artifacts only: *release-only - image: elixir:$ELIXIR_VER-alpine + image: + name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-alpine-3.17.9 tags: - amd64 cache: *release-cache @@ -327,6 +330,7 @@ amd64-musl: arm: stage: release + allow_failure: true artifacts: *release-artifacts only: *release-only tags: @@ -355,7 +359,8 @@ arm64: only: *release-only tags: - arm - image: arm64v8/elixir:$ELIXIR_VER + image: + name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011 cache: *release-cache variables: *release-variables before_script: *before-release @@ -367,7 +372,8 @@ arm64-musl: only: *release-only tags: - arm - image: arm64v8/elixir:$ELIXIR_VER-alpine + image: + name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-alpine-3.17.9 cache: *release-cache variables: *release-variables before_script: *before-release-musl diff --git a/CHANGELOG.md b/CHANGELOG.md index 424a9afbb..19b87f09a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,109 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## 2.9.1 + +### Security +- Fix authorization checks for C2S Update activities to prevent unauthorized modifications of other users' content. +- Fix content-type spoofing vulnerability that could allow users to upload ActivityPub objects as attachments +- Reject cross-domain redirects when fetching ActivityPub objects to prevent bypassing domain-based security controls. +- Limit emoji shortcodes to alphanumeric, dash, or underscore characters to prevent potential abuse. +- Block attempts to fetch activities from the local instance to prevent spoofing. +- Sanitize Content-Type headers in media proxy to prevent serving malicious ActivityPub content through proxied media. +- Validate Content-Type headers when fetching remote ActivityPub objects to prevent spoofing attacks. + +### Changed +- Include `pl-fe` in available frontends + +### Fixed +- Remove trailing ` from end of line 75 which caused issues copy-pasting + +## 2.9.0 + +### Security +- Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API +- Fix several spoofing vectors + +### Changed +- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response. + +### Added +- Include "published" in actor view +- Link to exported outbox/followers/following collections in backup actor.json +- Hashtag following +- Allow to specify post language + +### Fixed +- Verify a local Update sent through AP C2S so users can only update their own objects +- Fix Mastodon incoming edits with inlined "likes" +- Allow incoming "Listen" activities +- Fix missing check for domain presence in rich media ignore_host configuration +- Fix Rich Media parsing of TwitterCards/OpenGraph to adhere to the spec and always choose the first image if multiple are provided. +- Fix OpenGraph/TwitterCard meta tag ordering for posts with multiple attachments +- Fix blurhash generation crashes + +### Removed +- Retire MRFs DNSRBL, FODirectReply, and QuietReply + +## 2.8.0 + +### Changed +- Metadata: Do not include .atom feed links for remote accounts +- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time +- Dedupe upload filter now uses a three-level sharding directory structure +- Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe` +- Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types. +- Elixir 1.14 and Erlang/OTP 23 is now the minimum supported release +- Support `id` param in `GET /api/v1/statuses` +- LDAP authentication has been refactored to operate as a GenServer process which will maintain an active connection to the LDAP server. +- Fix 'Setting a marker should mark notifications as read' +- Adjust more Oban workers to enforce unique job constraints. +- Oban updated to 2.18.3 +- Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention. +- Poll results refreshing is handled asynchronously and will not attempt to keep fetching updates to a closed poll. +- Tuning for release builds to lower CPU usage. +- Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch +- Fix nonexisting user will not generate metadata for search engine opt-out +- Update Oban to 2.18 +- Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues. + +### Added +- Add metadata provider for ActivityPub alternate links +- Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream. +- Respect :restrict_unauthenticated for hashtag rss/atom feeds +- LDAP configuration now permits overriding the CA root certificate file for TLS validation. +- LDAP now supports users changing their passwords +- Include list id in StatusView +- Added MRF.FODirectReply which changes replies to followers-only posts to be direct. +- Add `id_filter` to MRF to filter URLs and their domain prior to fetching +- Added MRF.QuietReply which prevents replies to public posts from being published to the timelines +- Add `group_key` to notifications +- Allow providing avatar/header descriptions +- Added RemoteReportPolicy from Rebased for handling bogus federated reports +- scrubbers/default: Allow "mention hashtag" classes used by Mastodon +- Added dependencies for Swoosh's Mua mail adapter +- Include session scopes in TokenView + +### Fixed +- Verify a local Update sent through AP C2S so users can only update their own objects +- Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them. +- Fix incoming Block activities being rejected +- STARTTLS certificate and hostname verification for LDAP authentication +- LDAPS connections (implicit TLS) are now supported. +- Fix /api/v2/media returning the wrong status code (202) for media processed synchronously +- Miscellaneous fixes for Meilisearch support +- Fix pleroma_ctl mix task calls sometimes not being found +- Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users. +- ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally. +- Address case where instance reachability status couldn't be updated +- Remote Fetcher Worker recognizes more permanent failure errors +- StreamerView: Do not leak follows count if hidden +- Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job +- Make vapid_config return empty array, fixing preloading for instances without push notifications configured + +### Removed +- Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0) + ## 2.7.1 ### Changed diff --git a/changelog.d/activity-pub-metadata.add b/changelog.d/activity-pub-metadata.add deleted file mode 100644 index 2ad3d7b2d..000000000 --- a/changelog.d/activity-pub-metadata.add +++ /dev/null @@ -1 +0,0 @@ -Add metadata provider for ActivityPub alternate links diff --git a/changelog.d/argon2-passwords.add b/changelog.d/argon2-passwords.add deleted file mode 100644 index 36fd7faf2..000000000 --- a/changelog.d/argon2-passwords.add +++ /dev/null @@ -1 +0,0 @@ -Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream. diff --git a/changelog.d/atom-tag.change b/changelog.d/atom-tag.change deleted file mode 100644 index 1b3590dea..000000000 --- a/changelog.d/atom-tag.change +++ /dev/null @@ -1 +0,0 @@ -Metadata: Do not include .atom feed links for remote accounts diff --git a/changelog.d/bump-lexbor.change b/changelog.d/bump-lexbor.change deleted file mode 100644 index 2c7061a81..000000000 --- a/changelog.d/bump-lexbor.change +++ /dev/null @@ -1 +0,0 @@ -- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time \ No newline at end of file diff --git a/changelog.d/ci-git-fetch.skip b/changelog.d/ci-git-fetch.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/commonapi.skip b/changelog.d/commonapi.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/debian-install-improve.skip b/changelog.d/debian-install-improve.skip deleted file mode 100644 index 6068a3066..000000000 --- a/changelog.d/debian-install-improve.skip +++ /dev/null @@ -1 +0,0 @@ -Fixed a formatting issue that had a required commend embedded in a textblock, and change the language to make it a bit more idiomatic. \ No newline at end of file diff --git a/changelog.d/dedupe-sharding.change b/changelog.d/dedupe-sharding.change deleted file mode 100644 index 2e140d8a2..000000000 --- a/changelog.d/dedupe-sharding.change +++ /dev/null @@ -1 +0,0 @@ -Dedupe upload filter now uses a three-level sharding directory structure diff --git a/changelog.d/deprecate-subscribe.change b/changelog.d/deprecate-subscribe.change deleted file mode 100644 index bd7e8aec7..000000000 --- a/changelog.d/deprecate-subscribe.change +++ /dev/null @@ -1 +0,0 @@ -Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe` \ No newline at end of file diff --git a/changelog.d/dialyzer.skip b/changelog.d/dialyzer.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/docs-fix.skip b/changelog.d/docs-fix.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/docs-vips.skip b/changelog.d/docs-vips.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/drop-unwanted.change b/changelog.d/drop-unwanted.change deleted file mode 100644 index 459d4bfe6..000000000 --- a/changelog.d/drop-unwanted.change +++ /dev/null @@ -1 +0,0 @@ -Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types. diff --git a/changelog.d/elixir-1.14-docker.skip b/changelog.d/elixir-1.14-docker.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/elixir.change b/changelog.d/elixir.change deleted file mode 100644 index 779c01562..000000000 --- a/changelog.d/elixir.change +++ /dev/null @@ -1 +0,0 @@ -Elixir 1.14 and Erlang/OTP 23 is now the minimum supported release diff --git a/changelog.d/follow-request.fix b/changelog.d/follow-request.fix deleted file mode 100644 index 59d34e9bf..000000000 --- a/changelog.d/follow-request.fix +++ /dev/null @@ -1 +0,0 @@ -Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them. diff --git a/changelog.d/freebsd-docs.skip b/changelog.d/freebsd-docs.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/get-statuses-param.change b/changelog.d/get-statuses-param.change deleted file mode 100644 index 3edcad268..000000000 --- a/changelog.d/get-statuses-param.change +++ /dev/null @@ -1 +0,0 @@ -Support `id` param in `GET /api/v1/statuses` \ No newline at end of file diff --git a/changelog.d/hashtag-feeds-restricted.add b/changelog.d/hashtag-feeds-restricted.add deleted file mode 100644 index accac9c9c..000000000 --- a/changelog.d/hashtag-feeds-restricted.add +++ /dev/null @@ -1 +0,0 @@ -Repesct :restrict_unauthenticated for hashtag rss/atom feeds \ No newline at end of file diff --git a/changelog.d/identity-proofs.remove b/changelog.d/identity-proofs.remove deleted file mode 100644 index efe1c34f5..000000000 --- a/changelog.d/identity-proofs.remove +++ /dev/null @@ -1 +0,0 @@ -Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0) \ No newline at end of file diff --git a/changelog.d/incoming-blocks.fix b/changelog.d/incoming-blocks.fix deleted file mode 100644 index 3228d7318..000000000 --- a/changelog.d/incoming-blocks.fix +++ /dev/null @@ -1 +0,0 @@ -Fix incoming Block activities being rejected diff --git a/changelog.d/ldap-ca.add b/changelog.d/ldap-ca.add deleted file mode 100644 index 32ecbb5c0..000000000 --- a/changelog.d/ldap-ca.add +++ /dev/null @@ -1 +0,0 @@ -LDAP configuration now permits overriding the CA root certificate file for TLS validation. diff --git a/changelog.d/ldap-password-change.add b/changelog.d/ldap-password-change.add deleted file mode 100644 index 7ca555ee4..000000000 --- a/changelog.d/ldap-password-change.add +++ /dev/null @@ -1 +0,0 @@ -LDAP now supports users changing their passwords diff --git a/changelog.d/ldap-refactor.change b/changelog.d/ldap-refactor.change deleted file mode 100644 index 1510eea6a..000000000 --- a/changelog.d/ldap-refactor.change +++ /dev/null @@ -1 +0,0 @@ -LDAP authentication has been refactored to operate as a GenServer process which will maintain an active connection to the LDAP server. diff --git a/changelog.d/ldap-tls.fix b/changelog.d/ldap-tls.fix deleted file mode 100644 index b15137d77..000000000 --- a/changelog.d/ldap-tls.fix +++ /dev/null @@ -1 +0,0 @@ -STARTTLS certificate and hostname verification for LDAP authentication diff --git a/changelog.d/ldap-warning.skip b/changelog.d/ldap-warning.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/ldaps.fix b/changelog.d/ldaps.fix deleted file mode 100644 index a1dc901ab..000000000 --- a/changelog.d/ldaps.fix +++ /dev/null @@ -1 +0,0 @@ -LDAPS connections (implicit TLS) are now supported. diff --git a/changelog.d/list-id-visibility.add b/changelog.d/list-id-visibility.add deleted file mode 100644 index 2fea2d771..000000000 --- a/changelog.d/list-id-visibility.add +++ /dev/null @@ -1 +0,0 @@ -Include list id in StatusView \ No newline at end of file diff --git a/changelog.d/manifest-icon-size.skip b/changelog.d/manifest-icon-size.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/mediav2_status.fix b/changelog.d/mediav2_status.fix deleted file mode 100644 index 28e93e030..000000000 --- a/changelog.d/mediav2_status.fix +++ /dev/null @@ -1 +0,0 @@ -Fix /api/v2/media returning the wrong status code (202) for media processed synchronously diff --git a/changelog.d/meilisearch-misc-fixes.fix b/changelog.d/meilisearch-misc-fixes.fix deleted file mode 100644 index 0f127d3a8..000000000 --- a/changelog.d/meilisearch-misc-fixes.fix +++ /dev/null @@ -1 +0,0 @@ -Miscellaneous fixes for Meilisearch support diff --git a/changelog.d/module-search-in-pleroma-ctl.fix b/changelog.d/module-search-in-pleroma-ctl.fix deleted file mode 100644 index d32fe3f33..000000000 --- a/changelog.d/module-search-in-pleroma-ctl.fix +++ /dev/null @@ -1 +0,0 @@ -Fix pleroma_ctl mix task calls sometimes not being found diff --git a/changelog.d/mogrify.skip b/changelog.d/mogrify.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/mrf-cleanup.skip b/changelog.d/mrf-cleanup.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/mrf-fodirectreply.add b/changelog.d/mrf-fodirectreply.add deleted file mode 100644 index 10fd5d16a..000000000 --- a/changelog.d/mrf-fodirectreply.add +++ /dev/null @@ -1 +0,0 @@ -Added MRF.FODirectReply which changes replies to followers-only posts to be direct. diff --git a/changelog.d/mrf-id_filter.add b/changelog.d/mrf-id_filter.add deleted file mode 100644 index f556f9bc4..000000000 --- a/changelog.d/mrf-id_filter.add +++ /dev/null @@ -1 +0,0 @@ -Add `id_filter` to MRF to filter URLs and their domain prior to fetching \ No newline at end of file diff --git a/changelog.d/mrf-quietreply.add b/changelog.d/mrf-quietreply.add deleted file mode 100644 index 4ed20bce6..000000000 --- a/changelog.d/mrf-quietreply.add +++ /dev/null @@ -1 +0,0 @@ -Added MRF.QuietReply which prevents replies to public posts from being published to the timelines diff --git a/changelog.d/notifications-group-key.add b/changelog.d/notifications-group-key.add deleted file mode 100644 index 386927f4a..000000000 --- a/changelog.d/notifications-group-key.add +++ /dev/null @@ -1 +0,0 @@ -Add `group_key` to notifications \ No newline at end of file diff --git a/changelog.d/notifications-marker.change b/changelog.d/notifications-marker.change deleted file mode 100644 index 9e350a95c..000000000 --- a/changelog.d/notifications-marker.change +++ /dev/null @@ -1 +0,0 @@ -Fix 'Setting a marker should mark notifications as read' \ No newline at end of file diff --git a/changelog.d/oauth-app-spam.fix b/changelog.d/oauth-app-spam.fix deleted file mode 100644 index cdc2e816d..000000000 --- a/changelog.d/oauth-app-spam.fix +++ /dev/null @@ -1 +0,0 @@ -Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users. diff --git a/changelog.d/oban-recevier-improvements.fix b/changelog.d/oban-recevier-improvements.fix deleted file mode 100644 index f91502ed2..000000000 --- a/changelog.d/oban-recevier-improvements.fix +++ /dev/null @@ -1 +0,0 @@ -ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally. diff --git a/changelog.d/oban-uniques.change b/changelog.d/oban-uniques.change deleted file mode 100644 index d9deb4696..000000000 --- a/changelog.d/oban-uniques.change +++ /dev/null @@ -1 +0,0 @@ -Adjust more Oban workers to enforce unique job constraints. diff --git a/changelog.d/oban-update.change b/changelog.d/oban-update.change deleted file mode 100644 index 48a54ed2d..000000000 --- a/changelog.d/oban-update.change +++ /dev/null @@ -1 +0,0 @@ -Oban updated to 2.18.3 diff --git a/changelog.d/oban_gun_snooze.change b/changelog.d/oban_gun_snooze.change deleted file mode 100644 index c94525b2a..000000000 --- a/changelog.d/oban_gun_snooze.change +++ /dev/null @@ -1 +0,0 @@ -Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention. diff --git a/changelog.d/poll-refresh.change b/changelog.d/poll-refresh.change deleted file mode 100644 index b755128a1..000000000 --- a/changelog.d/poll-refresh.change +++ /dev/null @@ -1 +0,0 @@ -Poll results refreshing is handled asynchronously and will not attempt to keep fetching updates to a closed poll. diff --git a/changelog.d/profile-image-descriptions.add b/changelog.d/profile-image-descriptions.add deleted file mode 100644 index 85cc48083..000000000 --- a/changelog.d/profile-image-descriptions.add +++ /dev/null @@ -1 +0,0 @@ -Allow providing avatar/header descriptions \ No newline at end of file diff --git a/changelog.d/profile-image-descriptions.skip b/changelog.d/profile-image-descriptions.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/publisher-reachability.fix b/changelog.d/publisher-reachability.fix deleted file mode 100644 index 3f50be581..000000000 --- a/changelog.d/publisher-reachability.fix +++ /dev/null @@ -1 +0,0 @@ -Address case where instance reachability status couldn't be updated diff --git a/changelog.d/release-tuning.change b/changelog.d/release-tuning.change deleted file mode 100644 index bf9abc3ad..000000000 --- a/changelog.d/release-tuning.change +++ /dev/null @@ -1 +0,0 @@ -Tuning for release builds to lower CPU usage. diff --git a/changelog.d/remote-object-fetcher.fix b/changelog.d/remote-object-fetcher.fix deleted file mode 100644 index dcf2b1b31..000000000 --- a/changelog.d/remote-object-fetcher.fix +++ /dev/null @@ -1 +0,0 @@ -Remote Fetcher Worker recognizes more permanent failure errors diff --git a/changelog.d/remote-report-policy.add b/changelog.d/remote-report-policy.add deleted file mode 100644 index 1cf25b1a8..000000000 --- a/changelog.d/remote-report-policy.add +++ /dev/null @@ -1 +0,0 @@ -Added RemoteReportPolicy from Rebased for handling bogus federated reports diff --git a/changelog.d/rich-media-no-heads.change b/changelog.d/rich-media-no-heads.change deleted file mode 100644 index 0bab323aa..000000000 --- a/changelog.d/rich-media-no-heads.change +++ /dev/null @@ -1 +0,0 @@ -Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch diff --git a/changelog.d/scrubbers-allow-mention-hashtag.add b/changelog.d/scrubbers-allow-mention-hashtag.add deleted file mode 100644 index c12ab1ffb..000000000 --- a/changelog.d/scrubbers-allow-mention-hashtag.add +++ /dev/null @@ -1 +0,0 @@ -scrubbers/default: Allow "mention hashtag" classes used by Mastodon \ No newline at end of file diff --git a/changelog.d/se-opt-out.change b/changelog.d/se-opt-out.change deleted file mode 100644 index dd694033f..000000000 --- a/changelog.d/se-opt-out.change +++ /dev/null @@ -1 +0,0 @@ -Fix nonexisting user will not generate metadata for search engine opt-out diff --git a/changelog.d/stream-follow-relationships-count.fix b/changelog.d/stream-follow-relationships-count.fix deleted file mode 100644 index 68452a88b..000000000 --- a/changelog.d/stream-follow-relationships-count.fix +++ /dev/null @@ -1 +0,0 @@ -StreamerView: Do not leak follows count if hidden \ No newline at end of file diff --git a/changelog.d/swoosh-mua.add b/changelog.d/swoosh-mua.add deleted file mode 100644 index d4c4bbd08..000000000 --- a/changelog.d/swoosh-mua.add +++ /dev/null @@ -1 +0,0 @@ -Added dependencies for Swoosh's Mua mail adapter diff --git a/changelog.d/text-extensions.skip b/changelog.d/text-extensions.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/todo-cleanup.skip b/changelog.d/todo-cleanup.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/token-view-scopes.add b/changelog.d/token-view-scopes.add deleted file mode 100644 index e24fa38e6..000000000 --- a/changelog.d/token-view-scopes.add +++ /dev/null @@ -1 +0,0 @@ -Include session scopes in TokenView \ No newline at end of file diff --git a/changelog.d/update-oban.change b/changelog.d/update-oban.change deleted file mode 100644 index a67b3e3cf..000000000 --- a/changelog.d/update-oban.change +++ /dev/null @@ -1 +0,0 @@ -Update Oban to 2.18 diff --git a/changelog.d/user-factory.skip b/changelog.d/user-factory.skip deleted file mode 100644 index e69de29bb..000000000 diff --git a/changelog.d/user-imports.fix b/changelog.d/user-imports.fix deleted file mode 100644 index 0076c73d7..000000000 --- a/changelog.d/user-imports.fix +++ /dev/null @@ -1 +0,0 @@ -Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job diff --git a/changelog.d/vapid_keyword_fallback.fix b/changelog.d/vapid_keyword_fallback.fix deleted file mode 100644 index aa48f8938..000000000 --- a/changelog.d/vapid_keyword_fallback.fix +++ /dev/null @@ -1 +0,0 @@ -Make vapid_config return empty array, fixing preloading for instances without push notifications configured \ No newline at end of file diff --git a/changelog.d/workerhelper.change b/changelog.d/workerhelper.change deleted file mode 100644 index 539c9b54f..000000000 --- a/changelog.d/workerhelper.change +++ /dev/null @@ -1 +0,0 @@ -Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues. diff --git a/config/config.exs b/config/config.exs index cecb6b424..3e50b4207 100644 --- a/config/config.exs +++ b/config/config.exs @@ -66,6 +66,7 @@ config :pleroma, Pleroma.Upload, filename_display_max_length: 30, default_description: :filename, base_url: nil + allowed_mime_types: ["image", "audio", "video"] config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads" @@ -150,7 +151,10 @@ config :mime, :types, %{ "application/xrd+xml" => ["xrd+xml"], "application/jrd+json" => ["jrd+json"], "application/activity+json" => ["activity+json"], - "application/ld+json" => ["activity+json"] + "application/ld+json" => ["activity+json"], + # Can be removed when bumping MIME past 2.0.5 + # see https://akkoma.dev/AkkomaGang/akkoma/issues/657 + "image/apng" => ["apng"] } config :tesla, adapter: Tesla.Adapter.Hackney @@ -357,7 +361,8 @@ config :pleroma, :activitypub, follow_handshake_timeout: 500, note_replies_output_limit: 5, sign_object_fetches: true, - authorized_fetch_mode: false + authorized_fetch_mode: false, + client_api_enabled: false config :pleroma, :streamer, workers: 3, @@ -426,11 +431,6 @@ config :pleroma, :mrf_vocabulary, accept: [], reject: [] -config :pleroma, :mrf_dnsrbl, - nameserver: "127.0.0.1", - port: 53, - zone: "bl.pleroma.com" - # threshold of 7 days config :pleroma, :mrf_object_age, threshold: 604_800, @@ -820,6 +820,13 @@ config :pleroma, :frontends, "https://lily-is.land/infra/glitch-lily/-/jobs/artifacts/${ref}/download?job=build", "ref" => "servant", "build_dir" => "public" + }, + "pl-fe" => %{ + "name" => "pl-fe", + "git" => "https://github.com/mkljczk/pl-fe", + "build_url" => "https://pl.mkljczk.pl/pl-fe.zip", + "ref" => "develop", + "build_dir" => "." } } diff --git a/config/description.exs b/config/description.exs index 47f4771eb..996978298 100644 --- a/config/description.exs +++ b/config/description.exs @@ -117,6 +117,19 @@ config :pleroma, :config_description, [ key: :filename_display_max_length, type: :integer, description: "Set max length of a filename to display. 0 = no limit. Default: 30" + }, + %{ + key: :allowed_mime_types, + label: "Allowed MIME types", + type: {:list, :string}, + description: + "List of MIME (main) types uploads are allowed to identify themselves with. Other types may still be uploaded, but will identify as a generic binary to clients. WARNING: Loosening this over the defaults can lead to security issues. Removing types is safe, but only add to the list if you are sure you know what you are doing.", + suggestions: [ + "image", + "audio", + "video", + "font" + ] } ] }, @@ -1772,6 +1785,11 @@ config :pleroma, :config_description, [ type: :integer, description: "Following handshake timeout", suggestions: [500] + }, + %{ + key: :client_api_enabled, + type: :boolean, + description: "Allow client to server ActivityPub interactions" } ] }, @@ -3302,8 +3320,7 @@ config :pleroma, :config_description, [ suggestions: [ Pleroma.Web.Preload.Providers.Instance, Pleroma.Web.Preload.Providers.User, - Pleroma.Web.Preload.Providers.Timelines, - Pleroma.Web.Preload.Providers.StatusNet + Pleroma.Web.Preload.Providers.Timelines ] } ] diff --git a/config/test.exs b/config/test.exs index 6fe84478a..0f8b12ffe 100644 --- a/config/test.exs +++ b/config/test.exs @@ -38,7 +38,10 @@ config :pleroma, :instance, external_user_synchronization: false, static_dir: "test/instance_static/" -config :pleroma, :activitypub, sign_object_fetches: false, follow_handshake_timeout: 0 +config :pleroma, :activitypub, + sign_object_fetches: false, + follow_handshake_timeout: 0, + client_api_enabled: true # Configure your database config :pleroma, Pleroma.Repo, @@ -144,6 +147,7 @@ config :pleroma, Pleroma.Search.Meilisearch, url: "http://127.0.0.1:7700/", priv config :phoenix, :plug_init_mode, :runtime config :pleroma, :config_impl, Pleroma.UnstubbedConfigMock +config :pleroma, :datetime_impl, Pleroma.DateTimeMock config :pleroma, Pleroma.PromEx, disabled: true @@ -158,6 +162,12 @@ config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMoc config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock config :pleroma, Pleroma.Web.Plugs.HTTPSignaturePlug, config_impl: Pleroma.StaticStubbedConfigMock +config :pleroma, Pleroma.Upload.Filter.AnonymizeFilename, + config_impl: Pleroma.StaticStubbedConfigMock + +config :pleroma, Pleroma.Upload.Filter.Mogrify, config_impl: Pleroma.StaticStubbedConfigMock +config :pleroma, Pleroma.Upload.Filter.Mogrify, mogrify_impl: Pleroma.MogrifyMock + config :pleroma, Pleroma.Signature, http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock peer_module = diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 36e9cbba2..6e2fddcb6 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -98,7 +98,7 @@ To add configuration to your config file, you can copy it from the base config. * `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...) * Possible values are the same as for `admin_privileges` -## :database +## :features * `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). ## Background migrations diff --git a/docs/installation/debian_based_en.md b/docs/installation/debian_based_en.md index 21cfe2bff..30f48792d 100644 --- a/docs/installation/debian_based_en.md +++ b/docs/installation/debian_based_en.md @@ -72,7 +72,7 @@ sudo -Hu pleroma mix deps.get * Generate the configuration: ```shell -sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` +sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen ``` * During this process: diff --git a/lib/mix/tasks/pleroma/emoji.ex b/lib/mix/tasks/pleroma/emoji.ex index 8b9c921c8..b656f161f 100644 --- a/lib/mix/tasks/pleroma/emoji.ex +++ b/lib/mix/tasks/pleroma/emoji.ex @@ -93,6 +93,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do ) files = fetch_and_decode!(files_loc) + files_to_unzip = for({_, f} <- files, do: f) IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name])) @@ -103,17 +104,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do pack_name ]) - files_to_unzip = - Enum.map( - files, - fn {_, f} -> to_charlist(f) end - ) - - {:ok, _} = - :zip.unzip(binary_archive, - cwd: String.to_charlist(pack_path), - file_list: files_to_unzip - ) + {:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip) IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name])) @@ -201,7 +192,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}") - {:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir)) + {:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir) emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts) diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index cf1453c9b..1bc371dec 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -27,6 +27,7 @@ defmodule Pleroma.Config do Application.get_env(:pleroma, key, default) end + @impl true def get!(key) do value = get(key, nil) diff --git a/lib/pleroma/config/getting.ex b/lib/pleroma/config/getting.ex index ec93fd02a..adf764f89 100644 --- a/lib/pleroma/config/getting.ex +++ b/lib/pleroma/config/getting.ex @@ -5,10 +5,13 @@ defmodule Pleroma.Config.Getting do @callback get(any()) :: any() @callback get(any(), any()) :: any() + @callback get!(any()) :: any() def get(key), do: get(key, nil) def get(key, default), do: impl().get(key, default) + def get!(key), do: impl().get!(key) + def impl do Application.get_env(:pleroma, :config_impl, Pleroma.Config) end diff --git a/lib/pleroma/constants.ex b/lib/pleroma/constants.ex index 46c87182f..2cb8112ec 100644 --- a/lib/pleroma/constants.ex +++ b/lib/pleroma/constants.ex @@ -20,7 +20,8 @@ defmodule Pleroma.Constants do "deleted_activity_id", "pleroma_internal", "generator", - "rules" + "rules", + "language" ] ) @@ -36,10 +37,12 @@ defmodule Pleroma.Constants do "updated", "emoji", "content", + "contentMap", "summary", "sensitive", "attachment", - "generator" + "generator", + "language" ] ) @@ -100,7 +103,8 @@ defmodule Pleroma.Constants do "Announce", "Undo", "Flag", - "EmojiReact" + "EmojiReact", + "Listen" ] ) diff --git a/lib/pleroma/date_time.ex b/lib/pleroma/date_time.ex new file mode 100644 index 000000000..d79cb848b --- /dev/null +++ b/lib/pleroma/date_time.ex @@ -0,0 +1,3 @@ +defmodule Pleroma.DateTime do + @callback utc_now() :: NaiveDateTime.t() +end diff --git a/lib/pleroma/date_time/impl.ex b/lib/pleroma/date_time/impl.ex new file mode 100644 index 000000000..102be047b --- /dev/null +++ b/lib/pleroma/date_time/impl.ex @@ -0,0 +1,6 @@ +defmodule Pleroma.DateTime.Impl do + @behaviour Pleroma.DateTime + + @impl true + def utc_now, do: NaiveDateTime.utc_now() +end diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex new file mode 100644 index 000000000..dcdab19f8 --- /dev/null +++ b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex @@ -0,0 +1,49 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do + use Ecto.Type + + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + + def type, do: :map + + def cast(%{} = object) do + with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do + {:ok, data} + else + {_, nil} -> {:ok, nil} + {:error, _} -> :error + end + end + + def cast(_), do: :error + + def dump(data), do: {:ok, data} + + def load(data), do: {:ok, data} + + defp validate_map(%{} = object) do + {status, data} = + object + |> Enum.reduce({:ok, %{}}, fn + {lang, value}, {status, acc} when is_binary(lang) and is_binary(value) -> + if good_locale_code?(lang) do + {status, Map.put(acc, lang, value)} + else + {:modified, acc} + end + + _, {_status, acc} -> + {:modified, acc} + end) + + if data == %{} do + {status, nil} + else + {status, data} + end + end +end diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex new file mode 100644 index 000000000..4779deeb0 --- /dev/null +++ b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex @@ -0,0 +1,27 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do + use Ecto.Type + + def type, do: :string + + def cast(language) when is_binary(language) do + if good_locale_code?(language) do + {:ok, language} + else + {:error, :invalid_language} + end + end + + def cast(_), do: :error + + def dump(data), do: {:ok, data} + + def load(data), do: {:ok, data} + + def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+\z$> + + def good_locale_code?(_code), do: false +end diff --git a/lib/pleroma/emoji/pack.ex b/lib/pleroma/emoji/pack.ex index 785fdb8b2..c58748d3c 100644 --- a/lib/pleroma/emoji/pack.ex +++ b/lib/pleroma/emoji/pack.ex @@ -24,12 +24,13 @@ defmodule Pleroma.Emoji.Pack do alias Pleroma.Emoji alias Pleroma.Emoji.Pack + alias Pleroma.SafeZip alias Pleroma.Utils @spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values} def create(name) do with :ok <- validate_not_empty([name]), - dir <- Path.join(emoji_path(), name), + dir <- path_join_name_safe(emoji_path(), name), :ok <- File.mkdir(dir) do save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")}) end @@ -65,43 +66,21 @@ defmodule Pleroma.Emoji.Pack do {:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values} def delete(name) do with :ok <- validate_not_empty([name]), - pack_path <- Path.join(emoji_path(), name) do + pack_path <- path_join_name_safe(emoji_path(), name) do File.rm_rf(pack_path) end end - @spec unpack_zip_emojies(list(tuple())) :: list(map()) - defp unpack_zip_emojies(zip_files) do - Enum.reduce(zip_files, [], fn - {_, path, s, _, _, _}, acc when elem(s, 2) == :regular -> - with( - filename <- Path.basename(path), - shortcode <- Path.basename(filename, Path.extname(filename)), - false <- Emoji.exist?(shortcode) - ) do - [%{path: path, filename: path, shortcode: shortcode} | acc] - else - _ -> acc - end - - _, acc -> - acc - end) - end - @spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) :: {:ok, t()} | {:error, File.posix() | atom()} def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do - with {:ok, zip_files} <- :zip.table(to_charlist(file.path)), - [_ | _] = emojies <- unpack_zip_emojies(zip_files), + with {:ok, zip_files} <- SafeZip.list_dir_file(file.path), + [_ | _] = emojies <- map_zip_emojies(zip_files), {:ok, tmp_dir} <- Utils.tmp_dir("emoji") do try do {:ok, _emoji_files} = - :zip.unzip( - to_charlist(file.path), - [{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}] - ) + SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojies, & &1[:path])) {_, updated_pack} = Enum.map_reduce(emojies, pack, fn item, emoji_pack -> @@ -292,7 +271,7 @@ defmodule Pleroma.Emoji.Pack do @spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()} def load_pack(name) do name = Path.basename(name) - pack_file = Path.join([emoji_path(), name, "pack.json"]) + pack_file = path_join_name_safe(emoji_path(), name) |> Path.join("pack.json") with {:ok, _} <- File.stat(pack_file), {:ok, pack_data} <- File.read(pack_file) do @@ -416,10 +395,9 @@ defmodule Pleroma.Emoji.Pack do end defp create_archive_and_cache(pack, hash) do - files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)] - - {:ok, {_, result}} = - :zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)]) + pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end) + files = ["pack.json" | pack_file_list] + {:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true) ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file]) overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files)) @@ -478,7 +456,7 @@ defmodule Pleroma.Emoji.Pack do end defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do - file_path = Path.join(pack.path, filename) + file_path = path_join_safe(pack.path, filename) create_subdirs(file_path) with {:ok, _} <- File.copy(upload_path, file_path) do @@ -497,8 +475,8 @@ defmodule Pleroma.Emoji.Pack do end defp rename_file(pack, filename, new_filename) do - old_path = Path.join(pack.path, filename) - new_path = Path.join(pack.path, new_filename) + old_path = path_join_safe(pack.path, filename) + new_path = path_join_safe(pack.path, new_filename) create_subdirs(new_path) with :ok <- File.rename(old_path, new_path) do @@ -516,7 +494,7 @@ defmodule Pleroma.Emoji.Pack do defp remove_file(pack, shortcode) do with {:ok, filename} <- get_filename(pack, shortcode), - emoji <- Path.join(pack.path, filename), + emoji <- path_join_safe(pack.path, filename), :ok <- File.rm(emoji) do remove_dir_if_empty(emoji, filename) end @@ -534,7 +512,7 @@ defmodule Pleroma.Emoji.Pack do defp get_filename(pack, shortcode) do with %{^shortcode => filename} when is_binary(filename) <- pack.files, - file_path <- Path.join(pack.path, filename), + file_path <- path_join_safe(pack.path, filename), {:ok, _} <- File.stat(file_path) do {:ok, filename} else @@ -584,11 +562,10 @@ defmodule Pleroma.Emoji.Pack do defp unzip(archive, pack_info, remote_pack, local_pack) do with :ok <- File.mkdir_p!(local_pack.path) do - files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end) + files = Enum.map(remote_pack["files"], fn {_, path} -> path end) # Fallback cannot contain a pack.json file - files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files] - - :zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files) + files = if pack_info[:fallback], do: files, else: ["pack.json" | files] + SafeZip.unzip_data(archive, local_pack.path, files) end end @@ -649,13 +626,43 @@ defmodule Pleroma.Emoji.Pack do end defp validate_has_all_files(pack, zip) do - with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do - # Check if all files from the pack.json are in the archive - pack.files - |> Enum.all?(fn {_, from_manifest} -> - List.keyfind(f_list, to_charlist(from_manifest), 0) + # Check if all files from the pack.json are in the archive + eset = + Enum.reduce(pack.files, MapSet.new(), fn + {_, file}, s -> MapSet.put(s, to_charlist(file)) end) - |> if(do: :ok, else: {:error, :incomplete}) + + if SafeZip.contains_all_data?(zip, eset), + do: :ok, + else: {:error, :incomplete} + end + + defp path_join_name_safe(dir, name) do + if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do + raise "Invalid or malicious pack name: #{name}" + else + Path.join(dir, name) end end + + defp path_join_safe(dir, path) do + {:ok, safe_path} = Path.safe_relative(path) + Path.join(dir, safe_path) + end + + defp map_zip_emojies(zip_files) do + Enum.reduce(zip_files, [], fn path, acc -> + with( + filename <- Path.basename(path), + shortcode <- Path.basename(filename, Path.extname(filename)), + # note: this only checks the shortcode, if an emoji already exists on the same path, but + # with a different shortcode, the existing one will be degraded to an alias of the new + false <- Emoji.exist?(shortcode) + ) do + [%{path: path, filename: path, shortcode: shortcode} | acc] + else + _ -> acc + end + end) + end end diff --git a/lib/pleroma/frontend.ex b/lib/pleroma/frontend.ex index a4f427ae5..fe7f525ea 100644 --- a/lib/pleroma/frontend.ex +++ b/lib/pleroma/frontend.ex @@ -65,24 +65,12 @@ defmodule Pleroma.Frontend do end def unzip(zip, dest) do - with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do - File.rm_rf!(dest) - File.mkdir_p!(dest) + File.rm_rf!(dest) + File.mkdir_p!(dest) - Enum.each(unzipped, fn {filename, data} -> - path = filename - - new_file_path = Path.join(dest, path) - - path - |> Path.dirname() - |> then(&Path.join(dest, &1)) - |> File.mkdir_p!() - - if not File.dir?(new_file_path) do - File.write!(new_file_path, data) - end - end) + case Pleroma.SafeZip.unzip_data(zip, dest) do + {:ok, _} -> :ok + error -> error end end diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index a43d88220..3682f0c14 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -12,6 +12,7 @@ defmodule Pleroma.Hashtag do alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.User.HashtagFollow schema "hashtags" do field(:name, :string) @@ -27,6 +28,14 @@ defmodule Pleroma.Hashtag do |> String.trim() end + def get_by_id(id) do + Repo.get(Hashtag, id) + end + + def get_by_name(name) do + Repo.get_by(Hashtag, name: normalize_name(name)) + end + def get_or_create_by_name(name) do changeset = changeset(%Hashtag{}, %{name: name}) @@ -103,4 +112,22 @@ defmodule Pleroma.Hashtag do {:ok, deleted_count} end end + + def get_followers(%Hashtag{id: hashtag_id}) do + from(hf in HashtagFollow) + |> where([hf], hf.hashtag_id == ^hashtag_id) + |> join(:inner, [hf], u in assoc(hf, :user)) + |> select([hf, u], u.id) + |> Repo.all() + end + + def get_recipients_for_activity(%Pleroma.Activity{object: %{hashtags: tags}}) + when is_list(tags) do + tags + |> Enum.map(&get_followers/1) + |> List.flatten() + |> Enum.uniq() + end + + def get_recipients_for_activity(_activity), do: [] end diff --git a/lib/pleroma/mogrify_behaviour.ex b/lib/pleroma/mogrify_behaviour.ex new file mode 100644 index 000000000..234cb86cf --- /dev/null +++ b/lib/pleroma/mogrify_behaviour.ex @@ -0,0 +1,15 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.MogrifyBehaviour do + @moduledoc """ + Behaviour for Mogrify operations. + This module defines the interface for Mogrify operations that can be mocked in tests. + """ + + @callback open(binary()) :: map() + @callback custom(map(), binary()) :: map() + @callback custom(map(), binary(), binary()) :: map() + @callback save(map(), keyword()) :: map() +end diff --git a/lib/pleroma/mogrify_wrapper.ex b/lib/pleroma/mogrify_wrapper.ex new file mode 100644 index 000000000..17174fd97 --- /dev/null +++ b/lib/pleroma/mogrify_wrapper.ex @@ -0,0 +1,30 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.MogrifyWrapper do + @moduledoc """ + Default implementation of MogrifyBehaviour that delegates to Mogrify. + """ + @behaviour Pleroma.MogrifyBehaviour + + @impl true + def open(file) do + Mogrify.open(file) + end + + @impl true + def custom(image, action) do + Mogrify.custom(image, action) + end + + @impl true + def custom(image, action, options) do + Mogrify.custom(image, action, options) + end + + @impl true + def save(image, opts) do + Mogrify.save(image, opts) + end +end diff --git a/lib/pleroma/object/containment.ex b/lib/pleroma/object/containment.ex index f6106cb3f..77fac12c0 100644 --- a/lib/pleroma/object/containment.ex +++ b/lib/pleroma/object/containment.ex @@ -47,6 +47,19 @@ defmodule Pleroma.Object.Containment do defp compare_uris(%URI{host: host} = _id_uri, %URI{host: host} = _other_uri), do: :ok defp compare_uris(_id_uri, _other_uri), do: :error + @doc """ + Checks whether an URL to fetch from is from the local server. + + We never want to fetch from ourselves; if it's not in the database + it can't be authentic and must be a counterfeit. + """ + def contain_local_fetch(id) do + case compare_uris(URI.parse(id), Pleroma.Web.Endpoint.struct_url()) do + :ok -> :error + _ -> :ok + end + end + @doc """ Checks that an imported AP object's actor matches the host it came from. """ diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex index c85a8b09f..b54ef9ce5 100644 --- a/lib/pleroma/object/fetcher.ex +++ b/lib/pleroma/object/fetcher.ex @@ -19,6 +19,8 @@ defmodule Pleroma.Object.Fetcher do require Logger require Pleroma.Constants + @mix_env Mix.env() + @spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()} defp reinject_object(%Object{data: %{}} = object, new_data) do Logger.debug("Reinjecting object #{new_data["id"]}") @@ -146,6 +148,7 @@ defmodule Pleroma.Object.Fetcher do with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")}, {_, true} <- {:mrf, MRF.id_filter(id)}, + {_, :ok} <- {:local_fetch, Containment.contain_local_fetch(id)}, {:ok, body} <- get_object(id), {:ok, data} <- safe_json_decode(body), :ok <- Containment.contain_origin_from_id(id, data) do @@ -158,6 +161,9 @@ defmodule Pleroma.Object.Fetcher do {:scheme, _} -> {:error, "Unsupported URI scheme"} + {:local_fetch, _} -> + {:error, "Trying to fetch local resource"} + {:error, e} -> {:error, e} @@ -172,6 +178,19 @@ defmodule Pleroma.Object.Fetcher do def fetch_and_contain_remote_object_from_id(_id), do: {:error, "id must be a string"} + defp check_crossdomain_redirect(final_host, original_url) + + # Handle the common case in tests where responses don't include URLs + if @mix_env == :test do + defp check_crossdomain_redirect(nil, _) do + {:cross_domain_redirect, false} + end + end + + defp check_crossdomain_redirect(final_host, original_url) do + {:cross_domain_redirect, final_host != URI.parse(original_url).host} + end + defp get_object(id) do date = Pleroma.Signature.signed_date() @@ -181,19 +200,29 @@ defmodule Pleroma.Object.Fetcher do |> sign_fetch(id, date) case HTTP.get(id, headers) do + {:ok, %{body: body, status: code, headers: headers, url: final_url}} + when code in 200..299 -> + remote_host = if final_url, do: URI.parse(final_url).host, else: nil + + with {:cross_domain_redirect, false} <- check_crossdomain_redirect(remote_host, id), + {_, content_type} <- List.keyfind(headers, "content-type", 0), + {:ok, _media_type} <- verify_content_type(content_type) do + {:ok, body} + else + {:cross_domain_redirect, true} -> + {:error, {:cross_domain_redirect, true}} + + error -> + error + end + + # Handle the case where URL is not in the response (older HTTP library versions) {:ok, %{body: body, status: code, headers: headers}} when code in 200..299 -> case List.keyfind(headers, "content-type", 0) do {_, content_type} -> - case Plug.Conn.Utils.media_type(content_type) do - {:ok, "application", "activity+json", _} -> - {:ok, body} - - {:ok, "application", "ld+json", - %{"profile" => "https://www.w3.org/ns/activitystreams"}} -> - {:ok, body} - - _ -> - {:error, {:content_type, content_type}} + case verify_content_type(content_type) do + {:ok, _} -> {:ok, body} + error -> error end _ -> @@ -216,4 +245,17 @@ defmodule Pleroma.Object.Fetcher do defp safe_json_decode(nil), do: {:ok, nil} defp safe_json_decode(json), do: Jason.decode(json) + + defp verify_content_type(content_type) do + case Plug.Conn.Utils.media_type(content_type) do + {:ok, "application", "activity+json", _} -> + {:ok, :activity_json} + + {:ok, "application", "ld+json", %{"profile" => "https://www.w3.org/ns/activitystreams"}} -> + {:ok, :ld_json} + + _ -> + {:error, {:content_type, content_type}} + end + end end diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex index 8db732cc9..66812b17b 100644 --- a/lib/pleroma/pagination.ex +++ b/lib/pleroma/pagination.ex @@ -89,9 +89,9 @@ defmodule Pleroma.Pagination do defp cast_params(params) do param_types = %{ - min_id: :string, - since_id: :string, - max_id: :string, + min_id: params[:id_type] || :string, + since_id: params[:id_type] || :string, + max_id: params[:id_type] || :string, offset: :integer, limit: :integer, skip_extra_order: :boolean, diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex index 8aec4ae58..3c82f9996 100644 --- a/lib/pleroma/reverse_proxy.ex +++ b/lib/pleroma/reverse_proxy.ex @@ -17,6 +17,8 @@ defmodule Pleroma.ReverseProxy do @failed_request_ttl :timer.seconds(60) @methods ~w(GET HEAD) + @allowed_mime_types Pleroma.Config.get([Pleroma.Upload, :allowed_mime_types], []) + @cachex Pleroma.Config.get([:cachex, :provider], Cachex) def max_read_duration_default, do: @max_read_duration @@ -301,10 +303,26 @@ defmodule Pleroma.ReverseProxy do headers |> Enum.filter(fn {k, _} -> k in @keep_resp_headers end) |> build_resp_cache_headers(opts) + |> sanitise_content_type() |> build_resp_content_disposition_header(opts) |> Keyword.merge(Keyword.get(opts, :resp_headers, [])) end + defp sanitise_content_type(headers) do + original_ct = get_content_type(headers) + + safe_ct = + Pleroma.Web.Plugs.Utils.get_safe_mime_type( + %{allowed_mime_types: @allowed_mime_types}, + original_ct + ) + + [ + {"content-type", safe_ct} + | Enum.filter(headers, fn {k, _v} -> k != "content-type" end) + ] + end + defp build_resp_cache_headers(headers, _opts) do has_cache? = Enum.any?(headers, fn {k, _} -> k in @resp_cache_headers end) diff --git a/lib/pleroma/safe_zip.ex b/lib/pleroma/safe_zip.ex new file mode 100644 index 000000000..25fe434d6 --- /dev/null +++ b/lib/pleroma/safe_zip.ex @@ -0,0 +1,216 @@ +# Akkoma: Magically expressive social media +# Copyright © 2024 Akkoma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.SafeZip do + @moduledoc """ + Wraps the subset of Erlang's zip module we’d like to use + but enforces path-traversal safety everywhere and other checks. + + For convenience almost all functions accept both elixir strings and charlists, + but output elixir strings themselves. However, this means the input parameter type + can no longer be used to distinguish archive file paths from archive binary data in memory, + thus where needed both a _data and _file variant are provided. + """ + + @type text() :: String.t() | [char()] + + defp safe_path?(path) do + # Path accepts elixir’s chardata() + case Path.safe_relative(path) do + {:ok, _} -> true + _ -> false + end + end + + defp safe_type?(file_type) do + if file_type in [:regular, :directory] do + true + else + false + end + end + + defp maybe_add_file(_type, _path_charlist, nil), do: nil + + defp maybe_add_file(:regular, path_charlist, file_list), + do: [to_string(path_charlist) | file_list] + + defp maybe_add_file(_type, _path_charlist, file_list), do: file_list + + @spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) :: + {:ok, [String.t()]} | {:error, reason :: term()} + defp check_safe_archive_and_maybe_list_files(archive, opts, list) do + acc = if list, do: [], else: nil + + with {:ok, table} <- :zip.table(archive, opts) do + Enum.reduce_while(table, {:ok, acc}, fn + # ZIP comment + {:zip_comment, _}, acc -> + {:cont, acc} + + # File entry + {:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} -> + with {_, type} <- {:get_type, elem(info, 2)}, + {_, true} <- {:type, safe_type?(type)}, + {_, true} <- {:safe_path, safe_path?(path)} do + {:cont, {:ok, maybe_add_file(type, path, fl)}} + else + {:get_type, e} -> + {:halt, + {:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}} + + {:type, _} -> + {:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}} + + {:safe_path, _} -> + {:halt, {:error, "Unsafe path in ZIP: #{path}"}} + end + + # new OTP version? + _, _acc -> + {:halt, {:error, "Unknown ZIP record type"}} + end) + end + end + + @spec check_safe_archive_and_list_files(binary() | [char()], [term()]) :: + {:ok, [String.t()]} | {:error, reason :: term()} + defp check_safe_archive_and_list_files(archive, opts \\ []) do + check_safe_archive_and_maybe_list_files(archive, opts, true) + end + + @spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()} + defp check_safe_archive(archive, opts \\ []) do + case check_safe_archive_and_maybe_list_files(archive, opts, false) do + {:ok, _} -> :ok + error -> error + end + end + + @spec check_safe_file_list([text()], text()) :: :ok | {:error, term()} + defp check_safe_file_list([], _), do: :ok + + defp check_safe_file_list([path | tail], cwd) do + with {_, true} <- {:path, safe_path?(path)}, + {_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))}, + {_, true} <- {:type, safe_type?(fstat.type)} do + check_safe_file_list(tail, cwd) + else + {:path, _} -> + {:error, "Unsafe path escaping cwd: #{path}"} + + {:stat, e} -> + {:error, "Unable to check file type of #{path}: #{inspect(e)}"} + + {:type, _} -> + {:error, "Unsafe type at #{path}"} + end + end + + defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"} + + @doc """ + Checks whether the archive data contais file entries for all paths from fset + + Note this really only accepts entries corresponding to regular _files_, + if a path is contained as for example an directory, this does not count as a match. + """ + @spec contains_all_data?(binary(), MapSet.t()) :: true | false + def contains_all_data?(archive_data, fset) do + with {:ok, table} <- :zip.table(archive_data) do + remaining = + Enum.reduce(table, fset, fn + {:zip_file, path, info, _comment, _offset, _comp_size}, fset -> + if elem(info, 2) == :regular do + MapSet.delete(fset, path) + else + fset + end + + _, _ -> + fset + end) + |> MapSet.size() + + if remaining == 0, do: true, else: false + else + _ -> false + end + end + + @doc """ + List all file entries in ZIP, or error if invalid or unsafe. + + Note this really only lists regular files, no directories, ZIP comments or other types! + """ + @spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()} + def list_dir_file(archive) do + path = to_charlist(archive) + check_safe_archive_and_list_files(path) + end + + defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}} + defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)} + defp stringify_zip(ret), do: ret + + @spec zip(text(), text(), [text()], boolean()) :: + {:ok, file_name :: String.t()} + | {:ok, {file_name :: String.t(), file_data :: binary()}} + | {:error, reason :: term()} + def zip(name, file_list, cwd, memory \\ false) do + opts = [{:cwd, to_charlist(cwd)}] + opts = if memory, do: [:memory | opts], else: opts + + with :ok <- check_safe_file_list(file_list, cwd) do + file_list = for f <- file_list, do: to_charlist(f) + name = to_charlist(name) + stringify_zip(:zip.zip(name, file_list, opts)) + end + end + + @spec unzip_file(text(), text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + def unzip_file(archive, target_dir, file_list \\ nil) do + do_unzip(to_charlist(archive), to_charlist(target_dir), file_list) + end + + @spec unzip_data(binary(), text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + def unzip_data(archive, target_dir, file_list \\ nil) do + do_unzip(archive, to_charlist(target_dir), file_list) + end + + defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}), + do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)} + + defp stringify_unzip({:ok, [_fname | _] = filelist}), + do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)} + + defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}} + defp stringify_unzip(ret), do: ret + + @spec do_unzip(binary() | [char()], text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + defp do_unzip(archive, target_dir, file_list) do + opts = + if file_list != nil do + [ + file_list: for(f <- file_list, do: to_charlist(f)), + cwd: target_dir + ] + else + [cwd: target_dir] + end + + with :ok <- check_safe_archive(archive) do + stringify_unzip(:zip.unzip(archive, opts)) + end + end +end diff --git a/lib/pleroma/upload/filter/analyze_metadata.ex b/lib/pleroma/upload/filter/analyze_metadata.ex index 7ee643277..a8480bf36 100644 --- a/lib/pleroma/upload/filter/analyze_metadata.ex +++ b/lib/pleroma/upload/filter/analyze_metadata.ex @@ -90,9 +90,13 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do {:ok, rgb} = if Image.has_alpha?(resized_image) do # remove alpha channel - resized_image - |> Operation.extract_band!(0, n: 3) - |> Image.write_to_binary() + case Operation.extract_band(resized_image, 0, n: 3) do + {:ok, data} -> + Image.write_to_binary(data) + + _ -> + Image.write_to_binary(resized_image) + end else Image.write_to_binary(resized_image) end diff --git a/lib/pleroma/upload/filter/anonymize_filename.ex b/lib/pleroma/upload/filter/anonymize_filename.ex index 234ccb6bb..c0ad70368 100644 --- a/lib/pleroma/upload/filter/anonymize_filename.ex +++ b/lib/pleroma/upload/filter/anonymize_filename.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do """ @behaviour Pleroma.Upload.Filter - alias Pleroma.Config + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) alias Pleroma.Upload def filter(%Upload{name: name} = upload) do @@ -23,7 +23,7 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do @spec predefined_name(String.t()) :: String.t() | nil defp predefined_name(extension) do - with name when not is_nil(name) <- Config.get([__MODULE__, :text]), + with name when not is_nil(name) <- @config_impl.get([__MODULE__, :text]), do: String.replace(name, "{extension}", extension) end diff --git a/lib/pleroma/upload/filter/mogrify.ex b/lib/pleroma/upload/filter/mogrify.ex index d1e166022..7c7431db6 100644 --- a/lib/pleroma/upload/filter/mogrify.ex +++ b/lib/pleroma/upload/filter/mogrify.ex @@ -8,9 +8,16 @@ defmodule Pleroma.Upload.Filter.Mogrify do @type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()} @type conversions :: conversion() | [conversion()] + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + @mogrify_impl Application.compile_env( + :pleroma, + [__MODULE__, :mogrify_impl], + Pleroma.MogrifyWrapper + ) + def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do try do - do_filter(file, Pleroma.Config.get!([__MODULE__, :args])) + do_filter(file, @config_impl.get!([__MODULE__, :args])) {:ok, :filtered} rescue e in ErlangError -> @@ -22,9 +29,9 @@ defmodule Pleroma.Upload.Filter.Mogrify do def do_filter(file, filters) do file - |> Mogrify.open() + |> @mogrify_impl.open() |> mogrify_filter(filters) - |> Mogrify.save(in_place: true) + |> @mogrify_impl.save(in_place: true) end defp mogrify_filter(mogrify, nil), do: mogrify @@ -38,10 +45,10 @@ defmodule Pleroma.Upload.Filter.Mogrify do defp mogrify_filter(mogrify, []), do: mogrify defp mogrify_filter(mogrify, {action, options}) do - Mogrify.custom(mogrify, action, options) + @mogrify_impl.custom(mogrify, action, options) end defp mogrify_filter(mogrify, action) when is_binary(action) do - Mogrify.custom(mogrify, action) + @mogrify_impl.custom(mogrify, action) end end diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 7a36ece77..d9da9ede1 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -19,6 +19,7 @@ defmodule Pleroma.User do alias Pleroma.Emoji alias Pleroma.FollowingRelationship alias Pleroma.Formatter + alias Pleroma.Hashtag alias Pleroma.HTML alias Pleroma.Keys alias Pleroma.MFA @@ -27,6 +28,7 @@ defmodule Pleroma.User do alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.User + alias Pleroma.User.HashtagFollow alias Pleroma.UserRelationship alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Builder @@ -174,6 +176,12 @@ defmodule Pleroma.User do has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id) has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id) + many_to_many(:followed_hashtags, Hashtag, + on_replace: :delete, + on_delete: :delete_all, + join_through: HashtagFollow + ) + for {relationship_type, [ {outgoing_relation, outgoing_relation_target}, @@ -2861,4 +2869,54 @@ defmodule Pleroma.User do birthday_month: month }) end + + defp maybe_load_followed_hashtags(%User{followed_hashtags: follows} = user) + when is_list(follows), + do: user + + defp maybe_load_followed_hashtags(%User{} = user) do + followed_hashtags = HashtagFollow.get_by_user(user) + %{user | followed_hashtags: followed_hashtags} + end + + def followed_hashtags(%User{followed_hashtags: follows}) + when is_list(follows), + do: follows + + def followed_hashtags(%User{} = user) do + {:ok, user} = + user + |> maybe_load_followed_hashtags() + |> set_cache() + + user.followed_hashtags + end + + def follow_hashtag(%User{} = user, %Hashtag{} = hashtag) do + Logger.debug("Follow hashtag #{hashtag.name} for user #{user.nickname}") + user = maybe_load_followed_hashtags(user) + + with {:ok, _} <- HashtagFollow.new(user, hashtag), + follows <- HashtagFollow.get_by_user(user), + %User{} = user <- user |> Map.put(:followed_hashtags, follows) do + user + |> set_cache() + end + end + + def unfollow_hashtag(%User{} = user, %Hashtag{} = hashtag) do + Logger.debug("Unfollow hashtag #{hashtag.name} for user #{user.nickname}") + user = maybe_load_followed_hashtags(user) + + with {:ok, _} <- HashtagFollow.delete(user, hashtag), + follows <- HashtagFollow.get_by_user(user), + %User{} = user <- user |> Map.put(:followed_hashtags, follows) do + user + |> set_cache() + end + end + + def following_hashtag?(%User{} = user, %Hashtag{} = hashtag) do + not is_nil(HashtagFollow.get(user, hashtag)) + end end diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex index d77d49890..244b08adb 100644 --- a/lib/pleroma/user/backup.ex +++ b/lib/pleroma/user/backup.ex @@ -16,6 +16,7 @@ defmodule Pleroma.User.Backup do alias Pleroma.Bookmark alias Pleroma.Config alias Pleroma.Repo + alias Pleroma.SafeZip alias Pleroma.Uploaders.Uploader alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub @@ -179,12 +180,12 @@ defmodule Pleroma.User.Backup do end @files [ - ~c"actor.json", - ~c"outbox.json", - ~c"likes.json", - ~c"bookmarks.json", - ~c"followers.json", - ~c"following.json" + "actor.json", + "outbox.json", + "likes.json", + "bookmarks.json", + "followers.json", + "following.json" ] @spec run(t()) :: {:ok, t()} | {:error, :failed} @@ -200,7 +201,7 @@ defmodule Pleroma.User.Backup do {_, :ok} <- {:followers, followers(backup.tempdir, backup.user)}, {_, :ok} <- {:following, following(backup.tempdir, backup.user)}, {_, {:ok, _zip_path}} <- - {:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))}, + {:zip, SafeZip.zip(tempfile, @files, backup.tempdir)}, {_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)}, {:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do {:ok, updated_backup} @@ -246,7 +247,13 @@ defmodule Pleroma.User.Backup do defp actor(dir, user) do with {:ok, json} <- UserView.render("user.json", %{user: user}) - |> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"}) + |> Map.merge(%{ + "bookmarks" => "bookmarks.json", + "likes" => "likes.json", + "outbox" => "outbox.json", + "followers" => "followers.json", + "following" => "following.json" + }) |> Jason.encode() do File.write(Path.join(dir, "actor.json"), json) end diff --git a/lib/pleroma/user/hashtag_follow.ex b/lib/pleroma/user/hashtag_follow.ex new file mode 100644 index 000000000..3e28b130b --- /dev/null +++ b/lib/pleroma/user/hashtag_follow.ex @@ -0,0 +1,55 @@ +defmodule Pleroma.User.HashtagFollow do + use Ecto.Schema + import Ecto.Query + import Ecto.Changeset + + alias Pleroma.Hashtag + alias Pleroma.Repo + alias Pleroma.User + + schema "user_follows_hashtag" do + belongs_to(:user, User, type: FlakeId.Ecto.CompatType) + belongs_to(:hashtag, Hashtag) + end + + def changeset(%__MODULE__{} = user_hashtag_follow, attrs) do + user_hashtag_follow + |> cast(attrs, [:user_id, :hashtag_id]) + |> unique_constraint(:hashtag_id, + name: :user_hashtag_follows_user_id_hashtag_id_index, + message: "already following" + ) + |> validate_required([:user_id, :hashtag_id]) + end + + def new(%User{} = user, %Hashtag{} = hashtag) do + %__MODULE__{} + |> changeset(%{user_id: user.id, hashtag_id: hashtag.id}) + |> Repo.insert(on_conflict: :nothing) + end + + def delete(%User{} = user, %Hashtag{} = hashtag) do + with %__MODULE__{} = user_hashtag_follow <- get(user, hashtag) do + Repo.delete(user_hashtag_follow) + else + _ -> {:ok, nil} + end + end + + def get(%User{} = user, %Hashtag{} = hashtag) do + from(hf in __MODULE__) + |> where([hf], hf.user_id == ^user.id and hf.hashtag_id == ^hashtag.id) + |> Repo.one() + end + + def get_by_user(%User{} = user) do + user + |> followed_hashtags_query() + |> Repo.all() + end + + def followed_hashtags_query(%User{} = user) do + Ecto.assoc(user, :followed_hashtags) + |> Ecto.Query.order_by([h], desc: h.id) + end +end diff --git a/lib/pleroma/user_relationship.ex b/lib/pleroma/user_relationship.ex index 82fcc1cdd..5b48d321a 100644 --- a/lib/pleroma/user_relationship.ex +++ b/lib/pleroma/user_relationship.ex @@ -55,9 +55,13 @@ defmodule Pleroma.UserRelationship do def user_relationship_mappings, do: Pleroma.UserRelationship.Type.__enum_map__() + def datetime_impl do + Application.get_env(:pleroma, :datetime_impl, Pleroma.DateTime.Impl) + end + def changeset(%UserRelationship{} = user_relationship, params \\ %{}) do user_relationship - |> cast(params, [:relationship_type, :source_id, :target_id, :expires_at]) + |> cast(params, [:relationship_type, :source_id, :target_id, :expires_at, :inserted_at]) |> validate_required([:relationship_type, :source_id, :target_id]) |> unique_constraint(:relationship_type, name: :user_relationships_source_id_relationship_type_target_id_index @@ -65,6 +69,7 @@ defmodule Pleroma.UserRelationship do |> validate_not_self_relationship() end + @spec exists?(any(), Pleroma.User.t(), Pleroma.User.t()) :: boolean() def exists?(relationship_type, %User{} = source, %User{} = target) do UserRelationship |> where(relationship_type: ^relationship_type, source_id: ^source.id, target_id: ^target.id) @@ -90,7 +95,8 @@ defmodule Pleroma.UserRelationship do relationship_type: relationship_type, source_id: source.id, target_id: target.id, - expires_at: expires_at + expires_at: expires_at, + inserted_at: datetime_impl().utc_now() }) |> Repo.insert( on_conflict: {:replace_all_except, [:id, :inserted_at]}, diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index df8795fe4..62c7a7b31 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -924,6 +924,31 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do ) end + # Essentially, either look for activities addressed to `recipients`, _OR_ ones + # that reference a hashtag that the user follows + # Firstly, two fallbacks in case there's no hashtag constraint, or the user doesn't + # follow any + defp restrict_recipients_or_hashtags(query, recipients, user, nil) do + restrict_recipients(query, recipients, user) + end + + defp restrict_recipients_or_hashtags(query, recipients, user, []) do + restrict_recipients(query, recipients, user) + end + + defp restrict_recipients_or_hashtags(query, recipients, _user, hashtag_ids) do + from([activity, object] in query) + |> join(:left, [activity, object], hto in "hashtags_objects", + on: hto.object_id == object.id, + as: :hto + ) + |> where( + [activity, object, hto: hto], + (hto.hashtag_id in ^hashtag_ids and ^Constants.as_public() in activity.recipients) or + fragment("? && ?", ^recipients, activity.recipients) + ) + end + defp restrict_local(query, %{local_only: true}) do from(activity in query, where: activity.local == true) end @@ -1414,7 +1439,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do |> maybe_preload_report_notes(opts) |> maybe_set_thread_muted_field(opts) |> maybe_order(opts) - |> restrict_recipients(recipients, opts[:user]) + |> restrict_recipients_or_hashtags(recipients, opts[:user], opts[:followed_hashtags]) |> restrict_replies(opts) |> restrict_since(opts) |> restrict_local(opts) diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index a08eda5f4..7ac0bbab4 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -482,7 +482,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do |> put_status(:forbidden) |> json(message) - {:error, message} -> + {:error, message} when is_binary(message) -> conn |> put_status(:bad_request) |> json(message) diff --git a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex b/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex deleted file mode 100644 index ca41c464c..000000000 --- a/lib/pleroma/web/activity_pub/mrf/dnsrbl_policy.ex +++ /dev/null @@ -1,146 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2024 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy do - @moduledoc """ - Dynamic activity filtering based on an RBL database - - This MRF makes queries to a custom DNS server which will - respond with values indicating the classification of the domain - the activity originated from. This method has been widely used - in the email anti-spam industry for very fast reputation checks. - - e.g., if the DNS response is 127.0.0.1 or empty, the domain is OK - Other values such as 127.0.0.2 may be used for specific classifications. - - Information for why the host is blocked can be stored in a corresponding TXT record. - - This method is fail-open so if the queries fail the activites are accepted. - - An example of software meant for this purpsoe is rbldnsd which can be found - at http://www.corpit.ru/mjt/rbldnsd.html or mirrored at - https://git.pleroma.social/feld/rbldnsd - - It is highly recommended that you run your own copy of rbldnsd and use an - external mechanism to sync/share the contents of the zone file. This is - important to keep the latency on the queries as low as possible and prevent - your DNS server from being attacked so it fails and content is permitted. - """ - - @behaviour Pleroma.Web.ActivityPub.MRF.Policy - - alias Pleroma.Config - - require Logger - - @query_retries 1 - @query_timeout 500 - - @impl true - def filter(%{"actor" => actor} = activity) do - actor_info = URI.parse(actor) - - with {:ok, activity} <- check_rbl(actor_info, activity) do - {:ok, activity} - else - _ -> {:reject, "[DNSRBLPolicy]"} - end - end - - @impl true - def filter(activity), do: {:ok, activity} - - @impl true - def describe do - mrf_dnsrbl = - Config.get(:mrf_dnsrbl) - |> Enum.into(%{}) - - {:ok, %{mrf_dnsrbl: mrf_dnsrbl}} - end - - @impl true - def config_description do - %{ - key: :mrf_dnsrbl, - related_policy: "Pleroma.Web.ActivityPub.MRF.DNSRBLPolicy", - label: "MRF DNSRBL", - description: "DNS RealTime Blackhole Policy", - children: [ - %{ - key: :nameserver, - type: {:string}, - description: "DNSRBL Nameserver to Query (IP or hostame)", - suggestions: ["127.0.0.1"] - }, - %{ - key: :port, - type: {:string}, - description: "Nameserver port", - suggestions: ["53"] - }, - %{ - key: :zone, - type: {:string}, - description: "Root zone for querying", - suggestions: ["bl.pleroma.com"] - } - ] - } - end - - defp check_rbl(%{host: actor_host}, activity) do - with false <- match?(^actor_host, Pleroma.Web.Endpoint.host()), - zone when not is_nil(zone) <- Keyword.get(Config.get([:mrf_dnsrbl]), :zone) do - query = - Enum.join([actor_host, zone], ".") - |> String.to_charlist() - - rbl_response = rblquery(query) - - if Enum.empty?(rbl_response) do - {:ok, activity} - else - Task.start(fn -> - reason = - case rblquery(query, :txt) do - [[result]] -> result - _ -> "undefined" - end - - Logger.warning( - "DNSRBL Rejected activity from #{actor_host} for reason: #{inspect(reason)}" - ) - end) - - :error - end - else - _ -> {:ok, activity} - end - end - - defp get_rblhost_ip(rblhost) do - case rblhost |> String.to_charlist() |> :inet_parse.address() do - {:ok, _} -> rblhost |> String.to_charlist() |> :inet_parse.address() - _ -> {:ok, rblhost |> String.to_charlist() |> :inet_res.lookup(:in, :a) |> Enum.random()} - end - end - - defp rblquery(query, type \\ :a) do - config = Config.get([:mrf_dnsrbl]) - - case get_rblhost_ip(config[:nameserver]) do - {:ok, rblnsip} -> - :inet_res.lookup(query, :in, type, - nameservers: [{rblnsip, config[:port]}], - timeout: @query_timeout, - retry: @query_retries - ) - - _ -> - [] - end - end -end diff --git a/lib/pleroma/web/activity_pub/mrf/fo_direct_reply.ex b/lib/pleroma/web/activity_pub/mrf/fo_direct_reply.ex deleted file mode 100644 index 2cf22745a..000000000 --- a/lib/pleroma/web/activity_pub/mrf/fo_direct_reply.ex +++ /dev/null @@ -1,53 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2024 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.FODirectReply do - @moduledoc """ - FODirectReply alters the scope of replies to activities which are Followers Only to be Direct. The purpose of this policy is to prevent broken threads for followers of the reply author because their response was to a user that they are not also following. - """ - - alias Pleroma.Object - alias Pleroma.User - alias Pleroma.Web.ActivityPub.Visibility - - @behaviour Pleroma.Web.ActivityPub.MRF.Policy - - @impl true - def filter( - %{ - "type" => "Create", - "to" => to, - "object" => %{ - "actor" => actor, - "type" => "Note", - "inReplyTo" => in_reply_to - } - } = activity - ) do - with true <- is_binary(in_reply_to), - %User{follower_address: followers_collection, local: true} <- User.get_by_ap_id(actor), - %Object{} = in_reply_to_object <- Object.get_by_ap_id(in_reply_to), - "private" <- Visibility.get_visibility(in_reply_to_object) do - direct_to = to -- [followers_collection] - - updated_activity = - activity - |> Map.put("cc", []) - |> Map.put("to", direct_to) - |> Map.put("directMessage", true) - |> put_in(["object", "cc"], []) - |> put_in(["object", "to"], direct_to) - - {:ok, updated_activity} - else - _ -> {:ok, activity} - end - end - - @impl true - def filter(activity), do: {:ok, activity} - - @impl true - def describe, do: {:ok, %{}} -end diff --git a/lib/pleroma/web/activity_pub/mrf/quiet_reply.ex b/lib/pleroma/web/activity_pub/mrf/quiet_reply.ex deleted file mode 100644 index b07dc3b56..000000000 --- a/lib/pleroma/web/activity_pub/mrf/quiet_reply.ex +++ /dev/null @@ -1,60 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2023 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.QuietReply do - @moduledoc """ - QuietReply alters the scope of activities from local users when replying by enforcing them to be "Unlisted" or "Quiet Public". This delivers the activity to all the expected recipients and instances, but it will not be published in the Federated / The Whole Known Network timelines. It will still be published to the Home timelines of the user's followers and visible to anyone who opens the thread. - """ - require Pleroma.Constants - - alias Pleroma.User - - @behaviour Pleroma.Web.ActivityPub.MRF.Policy - - @impl true - def history_awareness, do: :auto - - @impl true - def filter( - %{ - "type" => "Create", - "to" => to, - "cc" => cc, - "object" => %{ - "actor" => actor, - "type" => "Note", - "inReplyTo" => in_reply_to - } - } = activity - ) do - with true <- is_binary(in_reply_to), - false <- match?([], cc), - %User{follower_address: followers_collection, local: true} <- - User.get_by_ap_id(actor) do - updated_to = - to - |> Kernel.++([followers_collection]) - |> Kernel.--([Pleroma.Constants.as_public()]) - - updated_cc = [Pleroma.Constants.as_public()] - - updated_activity = - activity - |> Map.put("to", updated_to) - |> Map.put("cc", updated_cc) - |> put_in(["object", "to"], updated_to) - |> put_in(["object", "cc"], updated_cc) - - {:ok, updated_activity} - else - _ -> {:ok, activity} - end - end - - @impl true - def filter(activity), do: {:ok, activity} - - @impl true - def describe, do: {:ok, %{}} -end diff --git a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex index 6edfb124e..49d17d8b9 100644 --- a/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/steal_emoji_policy.ex @@ -20,6 +20,19 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do String.match?(shortcode, pattern) end + defp reject_emoji?({shortcode, _url}, installed_emoji) do + valid_shortcode? = String.match?(shortcode, ~r/^[a-zA-Z0-9_-]+$/) + + rejected_shortcode? = + [:mrf_steal_emoji, :rejected_shortcodes] + |> Config.get([]) + |> Enum.any?(fn pattern -> shortcode_matches?(shortcode, pattern) end) + + emoji_installed? = Enum.member?(installed_emoji, shortcode) + + !valid_shortcode? or rejected_shortcode? or emoji_installed? + end + defp steal_emoji({shortcode, url}, emoji_dir_path) do url = Pleroma.Web.MediaProxy.url(url) @@ -78,16 +91,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do new_emojis = foreign_emojis - |> Enum.reject(fn {shortcode, _url} -> shortcode in installed_emoji end) - |> Enum.reject(fn {shortcode, _url} -> String.contains?(shortcode, ["/", "\\"]) end) - |> Enum.filter(fn {shortcode, _url} -> - reject_emoji? = - [:mrf_steal_emoji, :rejected_shortcodes] - |> Config.get([]) - |> Enum.find(false, fn pattern -> shortcode_matches?(shortcode, pattern) end) - - !reject_emoji? - end) + |> Enum.reject(&reject_emoji?(&1, installed_emoji)) |> Enum.map(&steal_emoji(&1, emoji_dir_path)) |> Enum.filter(& &1) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 35774d410..ee12f3ebf 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -26,6 +26,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CreateGenericValidator alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator @@ -115,7 +116,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do meta ) when objtype in ~w[Question Answer Audio Video Image Event Article Note Page] do - with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object), + with {:ok, object_data} <- + object + |> CommonFixes.maybe_add_language_from_activity(create_activity) + |> cast_and_apply_and_stringify_with_history(), meta = Keyword.put(meta, :object_data, object_data), {:ok, create_activity} <- create_activity @@ -165,11 +169,15 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do ) when objtype in ~w[Question Answer Audio Video Event Article Note Page] do with {_, false} <- {:local, Access.get(meta, :local, false)}, - {_, {:ok, object_data, _}} <- {:object_validation, validate(object, meta)}, + {_, {:ok, object_data, _}} <- + {:object_validation, + object + |> CommonFixes.maybe_add_language_from_activity(update_activity) + |> validate(meta)}, meta = Keyword.put(meta, :object_data, object_data), {:ok, update_activity} <- update_activity - |> UpdateValidator.cast_and_validate() + |> UpdateValidator.cast_and_validate(meta) |> Ecto.Changeset.apply_action(:insert) do update_activity = stringify_keys(update_activity) {:ok, update_activity, meta} @@ -177,7 +185,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do {:local, _} -> with {:ok, object} <- update_activity - |> UpdateValidator.cast_and_validate() + |> UpdateValidator.cast_and_validate(meta) |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} @@ -207,9 +215,16 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do "Answer" -> AnswerValidator end + cast_func = + if type == "Update" do + fn o -> validator.cast_and_validate(o, meta) end + else + fn o -> validator.cast_and_validate(o) end + end + with {:ok, object} <- object - |> validator.cast_and_validate() + |> cast_func.() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex index 1b5b2e8fb..81ab354fe 100644 --- a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex @@ -30,7 +30,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do def cast_and_apply(data) do data - |> cast_data + |> cast_data() |> apply_action(:insert) end @@ -85,8 +85,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do |> fix_replies() |> fix_attachments() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> Transmogrifier.fix_content_map() + |> CommonFixes.maybe_add_language() + |> CommonFixes.maybe_add_content_map() end def changeset(struct, data) do diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex index 65ac6bb93..034c6f33f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex @@ -100,6 +100,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator do |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> fix_url() |> fix_content() diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex index 1a5d02601..22cf0cc05 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex @@ -31,6 +31,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do defmacro object_fields do quote bind_quoted: binding() do field(:content, :string) + field(:contentMap, ObjectValidators.ContentLanguageMap) field(:published, ObjectValidators.DateTime) field(:updated, ObjectValidators.DateTime) @@ -58,6 +59,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do field(:like_count, :integer, default: 0) field(:announcement_count, :integer, default: 0) field(:quotes_count, :integer, default: 0) + field(:language, ObjectValidators.LanguageCode) field(:inReplyTo, ObjectValidators.ObjectID) field(:quoteUrl, ObjectValidators.ObjectID) field(:url, ObjectValidators.BareUri) diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex index 4699029d4..87d3e0c8f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex @@ -11,6 +11,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] + require Pleroma.Constants def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do @@ -114,6 +119,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do def fix_quote_url(data), do: data + # On Mastodon, `"likes"` attribute includes an inlined `Collection` with `totalItems`, + # not a list of users. + # https://github.com/mastodon/mastodon/pull/32007 + def fix_likes(%{"likes" => %{}} = data), do: Map.drop(data, ["likes"]) + + def fix_likes(data), do: data + # https://codeberg.org/fediverse/fep/src/branch/main/fep/e232/fep-e232.md def object_link_tag?(%{ "type" => "Link", @@ -125,4 +137,60 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do end def object_link_tag?(_), do: false + + def maybe_add_language_from_activity(object, activity) do + language = get_language_from_context(activity) + + if language do + Map.put(object, "language", language) + else + object + end + end + + def maybe_add_language(object) do + language = + [ + get_language_from_context(object), + get_language_from_content_map(object) + ] + |> Enum.find(&good_locale_code?(&1)) + + if language do + Map.put(object, "language", language) + else + object + end + end + + defp get_language_from_context(%{"@context" => context}) when is_list(context) do + case context + |> Enum.find(fn + %{"@language" => language} -> language != "und" + _ -> nil + end) do + %{"@language" => language} -> language + _ -> nil + end + end + + defp get_language_from_context(_), do: nil + + defp get_language_from_content_map(%{"contentMap" => content_map, "content" => source_content}) do + content_groups = Map.to_list(content_map) + + case Enum.find(content_groups, fn {_, content} -> content == source_content end) do + {language, _} -> language + _ -> nil + end + end + + defp get_language_from_content_map(_), do: nil + + def maybe_add_content_map(%{"language" => language, "content" => content} = object) + when not_empty_string(language) do + Map.put(object, "contentMap", Map.put(%{}, language, content)) + end + + def maybe_add_content_map(object), do: object end diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex index ab204f69a..ea14d6aca 100644 --- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex @@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do def cast_and_apply(data) do data - |> cast_data + |> cast_data() |> apply_action(:insert) end @@ -38,6 +38,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do |> validate_data() end + @spec cast_data(map()) :: map() def cast_data(data) do %__MODULE__{} |> changeset(data) @@ -47,7 +48,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do data |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() + |> CommonFixes.maybe_add_language() + |> CommonFixes.maybe_add_content_map() end def changeset(struct, data) do diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex index 7f9d4d648..21940f4f1 100644 --- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex @@ -64,6 +64,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> fix_closed() end diff --git a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex index 1e940a400..aab90235f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex @@ -6,6 +6,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator do use Ecto.Schema alias Pleroma.EctoType.ActivityPub.ObjectValidators + alias Pleroma.Object + alias Pleroma.User import Ecto.Changeset import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations @@ -31,23 +33,50 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator do |> cast(data, __schema__(:fields)) end - defp validate_data(cng) do + defp validate_data(cng, meta) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Update"]) |> validate_actor_presence() - |> validate_updating_rights() + |> validate_updating_rights(meta) end - def cast_and_validate(data) do + def cast_and_validate(data, meta \\ []) do data |> cast_data - |> validate_data + |> validate_data(meta) end - # For now we only support updating users, and here the rule is easy: - # object id == actor id - def validate_updating_rights(cng) do + def validate_updating_rights(cng, meta) do + if meta[:local] do + validate_updating_rights_local(cng) + else + validate_updating_rights_remote(cng) + end + end + + # For local Updates, verify the actor can edit the object + def validate_updating_rights_local(cng) do + actor = get_field(cng, :actor) + updated_object = get_field(cng, :object) + + if {:ok, actor} == ObjectValidators.ObjectID.cast(updated_object) do + cng + else + with %User{} = user <- User.get_cached_by_ap_id(actor), + {_, %Object{} = orig_object} <- {:object, Object.normalize(updated_object)}, + :ok <- Object.authorize_access(orig_object, user) do + cng + else + _e -> + cng + |> add_error(:object, "Can't be updated by this actor") + end + end + end + + # For remote Updates, verify the host is the same. + def validate_updating_rights_remote(cng) do with actor = get_field(cng, :actor), object = get_field(cng, :object), {:ok, object_id} <- ObjectValidators.ObjectID.cast(object), diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 2f8a7f8f2..1e6ee7dc8 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -16,12 +16,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.ObjectValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Federator import Ecto.Query + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] require Pleroma.Constants @@ -41,6 +43,38 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do |> fix_content_map() |> fix_addressing() |> fix_summary() + |> fix_history(&fix_object/1) + end + + defp maybe_fix_object(%{"attributedTo" => _} = object), do: fix_object(object) + defp maybe_fix_object(object), do: object + + defp fix_history(%{"formerRepresentations" => %{"orderedItems" => list}} = obj, fix_fun) + when is_list(list) do + update_in(obj["formerRepresentations"]["orderedItems"], fn h -> Enum.map(h, fix_fun) end) + end + + defp fix_history(obj, _), do: obj + + defp fix_recursive(obj, fun) do + # unlike Erlang, Elixir does not support recursive inline functions + # which would allow us to avoid reconstructing this on every recursion + rec_fun = fn + obj when is_map(obj) -> fix_recursive(obj, fun) + # there may be simple AP IDs in history (or object field) + obj -> obj + end + + obj + |> fun.() + |> fix_history(rec_fun) + |> then(fn + %{"object" => object} = doc when is_map(object) -> + update_in(doc["object"], rec_fun) + + apdoc -> + apdoc + end) end def fix_summary(%{"summary" => nil} = object) do @@ -166,7 +200,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_quote_url_and_maybe_fetch(object, options \\ []) do quote_url = - case Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes.fix_quote_url(object) do + case CommonFixes.fix_quote_url(object) do %{"quoteUrl" => quote_url} -> quote_url _ -> nil end @@ -336,6 +370,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_tag(object), do: object + # prefer content over contentMap + def fix_content_map(%{"content" => content} = object) when not_empty_string(content), do: object + # content map usually only has one language so this will do for now. def fix_content_map(%{"contentMap" => content_map} = object) do content_groups = Map.to_list(content_map) @@ -370,11 +407,18 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end) end - def handle_incoming(data, options \\ []) + def handle_incoming(data, options \\ []) do + data + |> fix_recursive(&strip_internal_fields/1) + |> handle_incoming_normalized(options) + end # Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them # with nil ID. - def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do + defp handle_incoming_normalized( + %{"type" => "Flag", "object" => objects, "actor" => actor} = data, + _options + ) do with context <- data["context"] || Utils.generate_context_id(), content <- data["content"] || "", %User{} = actor <- User.get_cached_by_ap_id(actor), @@ -395,16 +439,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end # disallow objects with bogus IDs - def handle_incoming(%{"id" => nil}, _options), do: :error - def handle_incoming(%{"id" => ""}, _options), do: :error + defp handle_incoming_normalized(%{"id" => nil}, _options), do: :error + defp handle_incoming_normalized(%{"id" => ""}, _options), do: :error # length of https:// = 8, should validate better, but good enough for now. - def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8, - do: :error + defp handle_incoming_normalized(%{"id" => id}, _options) + when is_binary(id) and byte_size(id) < 8, + do: :error - def handle_incoming( - %{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data, - options - ) do + defp handle_incoming_normalized( + %{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data, + options + ) do actor = Containment.get_actor(data) data = @@ -446,25 +491,25 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do "star" => "⭐" } - @doc "Rewrite misskey likes into EmojiReacts" - def handle_incoming( - %{ - "type" => "Like", - "_misskey_reaction" => reaction - } = data, - options - ) do + # Rewrite misskey likes into EmojiReacts + defp handle_incoming_normalized( + %{ + "type" => "Like", + "_misskey_reaction" => reaction + } = data, + options + ) do data |> Map.put("type", "EmojiReact") |> Map.put("content", @misskey_reactions[reaction] || reaction) - |> handle_incoming(options) + |> handle_incoming_normalized(options) end - def handle_incoming( - %{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data, - options - ) - when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do + defp handle_incoming_normalized( + %{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data, + options + ) + when objtype in ~w{Question Answer ChatMessage Audio Video Event Article Note Page Image} do fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1) object = @@ -487,8 +532,8 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming(%{"type" => type} = data, _options) - when type in ~w{Like EmojiReact Announce Add Remove} do + defp handle_incoming_normalized(%{"type" => type} = data, _options) + when type in ~w{Like EmojiReact Announce Add Remove} do with :ok <- ObjectValidator.fetch_actor_and_object(data), {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do @@ -498,11 +543,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming( - %{"type" => type} = data, - _options - ) - when type in ~w{Update Block Follow Accept Reject} do + defp handle_incoming_normalized( + %{"type" => type} = data, + _options + ) + when type in ~w{Update Block Follow Accept Reject} do + fixed_obj = maybe_fix_object(data["object"]) + data = if fixed_obj != nil, do: %{data | "object" => fixed_obj}, else: data + with {:ok, %User{}} <- ObjectValidator.fetch_actor(data), {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do @@ -510,10 +558,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming( - %{"type" => "Delete"} = data, - _options - ) do + defp handle_incoming_normalized( + %{"type" => "Delete"} = data, + _options + ) do with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do {:ok, activity} @@ -536,15 +584,15 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming( - %{ - "type" => "Undo", - "object" => %{"type" => "Follow", "object" => followed}, - "actor" => follower, - "id" => id - } = _data, - _options - ) do + defp handle_incoming_normalized( + %{ + "type" => "Undo", + "object" => %{"type" => "Follow", "object" => followed}, + "actor" => follower, + "id" => id + } = _data, + _options + ) do with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), {:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do @@ -555,46 +603,46 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming( - %{ - "type" => "Undo", - "object" => %{"type" => type} - } = data, - _options - ) - when type in ["Like", "EmojiReact", "Announce", "Block"] do + defp handle_incoming_normalized( + %{ + "type" => "Undo", + "object" => %{"type" => type} + } = data, + _options + ) + when type in ["Like", "EmojiReact", "Announce", "Block"] do with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do {:ok, activity} end end # For Undos that don't have the complete object attached, try to find it in our database. - def handle_incoming( - %{ - "type" => "Undo", - "object" => object - } = activity, - options - ) - when is_binary(object) do + defp handle_incoming_normalized( + %{ + "type" => "Undo", + "object" => object + } = activity, + options + ) + when is_binary(object) do with %Activity{data: data} <- Activity.get_by_ap_id(object) do activity |> Map.put("object", data) - |> handle_incoming(options) + |> handle_incoming_normalized(options) else _e -> :error end end - def handle_incoming( - %{ - "type" => "Move", - "actor" => origin_actor, - "object" => origin_actor, - "target" => target_actor - }, - _options - ) do + defp handle_incoming_normalized( + %{ + "type" => "Move", + "actor" => origin_actor, + "object" => origin_actor, + "target" => target_actor + }, + _options + ) do with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor), {:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor), true <- origin_actor in target_user.also_known_as do @@ -604,7 +652,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do end end - def handle_incoming(_, _), do: :error + defp handle_incoming_normalized(_, _), do: :error @spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil def get_obj_helper(id, options \\ []) do @@ -716,6 +764,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do |> set_reply_to_uri |> set_quote_url |> set_replies + |> CommonFixes.maybe_add_content_map() |> strip_internal_fields |> strip_internal_tags |> set_type @@ -750,12 +799,11 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object_id |> Object.normalize(fetch: false) |> Map.get(:data) - |> prepare_object data = data - |> Map.put("object", object) - |> Map.merge(Utils.make_json_ld_header()) + |> Map.put("object", prepare_object(object)) + |> Map.merge(Utils.make_json_ld_header(object)) |> Map.delete("bcc") {:ok, data} @@ -763,14 +811,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data) when objtype in Pleroma.Constants.updatable_object_types() do - object = - object - |> prepare_object - data = data - |> Map.put("object", object) - |> Map.merge(Utils.make_json_ld_header()) + |> Map.put("object", prepare_object(object)) + |> Map.merge(Utils.make_json_ld_header(object)) |> Map.delete("bcc") {:ok, data} @@ -840,7 +884,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do data |> strip_internal_fields |> maybe_fix_object_url - |> Map.merge(Utils.make_json_ld_header()) + |> Map.merge(Utils.make_json_ld_header(data)) {:ok, data} end diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex index 6c792804d..f30c92abf 100644 --- a/lib/pleroma/web/activity_pub/utils.ex +++ b/lib/pleroma/web/activity_pub/utils.ex @@ -20,6 +20,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do alias Pleroma.Web.Router.Helpers import Ecto.Query + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] require Logger require Pleroma.Constants @@ -109,18 +110,24 @@ defmodule Pleroma.Web.ActivityPub.Utils do end end - def make_json_ld_header do + def make_json_ld_header(data \\ %{}) do %{ "@context" => [ "https://www.w3.org/ns/activitystreams", "#{Endpoint.url()}/schemas/litepub-0.1.jsonld", %{ - "@language" => "und" + "@language" => get_language(data) } ] } end + defp get_language(%{"language" => language}) when not_empty_string(language) do + language + end + + defp get_language(_), do: "und" + def make_date do DateTime.utc_now() |> DateTime.to_iso8601() end diff --git a/lib/pleroma/web/activity_pub/views/object_view.ex b/lib/pleroma/web/activity_pub/views/object_view.ex index 63caa915c..13b5b2542 100644 --- a/lib/pleroma/web/activity_pub/views/object_view.ex +++ b/lib/pleroma/web/activity_pub/views/object_view.ex @@ -9,7 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do alias Pleroma.Web.ActivityPub.Transmogrifier def render("object.json", %{object: %Object{} = object}) do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(object.data) additional = Transmogrifier.prepare_object(object.data) Map.merge(base, additional) @@ -17,7 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do def render("object.json", %{object: %Activity{data: %{"type" => activity_type}} = activity}) when activity_type in ["Create", "Listen"] do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data) object = Object.normalize(activity, fetch: false) additional = @@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do end def render("object.json", %{object: %Activity{} = activity}) do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data) object_id = Object.normalize(activity, id_only: true) additional = diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index cd485ed64..61975387b 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -127,7 +127,8 @@ defmodule Pleroma.Web.ActivityPub.UserView do "capabilities" => capabilities, "alsoKnownAs" => user.also_known_as, "vcard:bday" => birthday, - "webfinger" => "acct:#{User.full_nickname(user)}" + "webfinger" => "acct:#{User.full_nickname(user)}", + "published" => Pleroma.Web.CommonAPI.Utils.to_masto_date(user.inserted_at) } |> Map.merge( maybe_make_image( diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex index 314782818..63409870e 100644 --- a/lib/pleroma/web/api_spec.ex +++ b/lib/pleroma/web/api_spec.ex @@ -139,7 +139,8 @@ defmodule Pleroma.Web.ApiSpec do "Search", "Status actions", "Media attachments", - "Bookmark folders" + "Bookmark folders", + "Tags" ] }, %{ diff --git a/lib/pleroma/web/api_spec/operations/tag_operation.ex b/lib/pleroma/web/api_spec/operations/tag_operation.ex new file mode 100644 index 000000000..ce4f4ad5b --- /dev/null +++ b/lib/pleroma/web/api_spec/operations/tag_operation.ex @@ -0,0 +1,103 @@ +defmodule Pleroma.Web.ApiSpec.TagOperation do + alias OpenApiSpex.Operation + alias OpenApiSpex.Schema + alias Pleroma.Web.ApiSpec.Schemas.ApiError + alias Pleroma.Web.ApiSpec.Schemas.Tag + + def open_api_operation(action) do + operation = String.to_existing_atom("#{action}_operation") + apply(__MODULE__, operation, []) + end + + def show_operation do + %Operation{ + tags: ["Tags"], + summary: "Hashtag", + description: "View a hashtag", + security: [%{"oAuth" => ["read"]}], + parameters: [id_param()], + operationId: "TagController.show", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def follow_operation do + %Operation{ + tags: ["Tags"], + summary: "Follow a hashtag", + description: "Follow a hashtag", + security: [%{"oAuth" => ["write:follows"]}], + parameters: [id_param()], + operationId: "TagController.follow", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def unfollow_operation do + %Operation{ + tags: ["Tags"], + summary: "Unfollow a hashtag", + description: "Unfollow a hashtag", + security: [%{"oAuth" => ["write:follows"]}], + parameters: [id_param()], + operationId: "TagController.unfollow", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def show_followed_operation do + %Operation{ + tags: ["Tags"], + summary: "Followed hashtags", + description: "View a list of hashtags the currently authenticated user is following", + parameters: pagination_params(), + security: [%{"oAuth" => ["read:follows"]}], + operationId: "TagController.show_followed", + responses: %{ + 200 => + Operation.response("Hashtags", "application/json", %Schema{ + type: :array, + items: Tag + }), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + defp id_param do + Operation.parameter( + :id, + :path, + %Schema{type: :string}, + "Name of the hashtag" + ) + end + + def pagination_params do + [ + Operation.parameter(:max_id, :query, :integer, "Return items older than this ID"), + Operation.parameter( + :min_id, + :query, + :integer, + "Return the oldest items newer than this ID" + ), + Operation.parameter( + :limit, + :query, + %Schema{type: :integer, default: 20}, + "Maximum number of items to return. Will be ignored if it's more than 40" + ) + ] + end +end diff --git a/lib/pleroma/web/api_spec/schemas/tag.ex b/lib/pleroma/web/api_spec/schemas/tag.ex index 66bf0ca71..05ff10cd3 100644 --- a/lib/pleroma/web/api_spec/schemas/tag.ex +++ b/lib/pleroma/web/api_spec/schemas/tag.ex @@ -17,11 +17,22 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Tag do type: :string, format: :uri, description: "A link to the hashtag on the instance" + }, + following: %Schema{ + type: :boolean, + description: "Whether the authenticated user is following the hashtag" + }, + history: %Schema{ + type: :array, + items: %Schema{type: :string}, + description: + "A list of historical uses of the hashtag (not implemented, for compatibility only)" } }, example: %{ name: "cofe", - url: "https://lain.com/tag/cofe" + url: "https://lain.com/tag/cofe", + following: false } }) end diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index 8aa1e258d..4220757df 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -11,6 +11,9 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + import Pleroma.Web.Gettext import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] @@ -38,6 +41,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do cc: [], context: nil, sensitive: false, + language: nil, object: nil, preview?: false, changes: %{} @@ -64,6 +68,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do |> content() |> with_valid(&to_and_cc/1) |> with_valid(&context/1) + |> with_valid(&language/1) |> sensitive() |> with_valid(&object/1) |> preview?() @@ -249,6 +254,16 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do %__MODULE__{draft | sensitive: sensitive} end + defp language(draft) do + language = draft.params[:language] + + if good_locale_code?(language) do + %__MODULE__{draft | language: language} + else + draft + end + end + defp object(draft) do emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji) @@ -288,6 +303,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do "mediaType" => Utils.get_content_type(draft.params[:content_type]) }) |> Map.put("generator", draft.params[:generator]) + |> Map.put("language", draft.language) %__MODULE__{draft | object: object} end diff --git a/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex b/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex new file mode 100644 index 000000000..21c21e984 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex @@ -0,0 +1,77 @@ +defmodule Pleroma.Web.MastodonAPI.TagController do + @moduledoc "Hashtag routes for mastodon API" + use Pleroma.Web, :controller + + alias Pleroma.Hashtag + alias Pleroma.Pagination + alias Pleroma.User + + import Pleroma.Web.ControllerHelper, + only: [ + add_link_headers: 2 + ] + + plug(Pleroma.Web.ApiSpec.CastAndValidate) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["read"]} when action in [:show] + ) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["read:follows"]} when action in [:show_followed] + ) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["write:follows"]} when action in [:follow, :unfollow] + ) + + defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.TagOperation + + def show(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id) do + render(conn, "show.json", tag: hashtag, for_user: conn.assigns.user) + else + _ -> conn |> render_error(:not_found, "Hashtag not found") + end + end + + def follow(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id), + %User{} = user <- conn.assigns.user, + {:ok, _} <- + User.follow_hashtag(user, hashtag) do + render(conn, "show.json", tag: hashtag, for_user: user) + else + _ -> render_error(conn, :not_found, "Hashtag not found") + end + end + + def unfollow(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id), + %User{} = user <- conn.assigns.user, + {:ok, _} <- + User.unfollow_hashtag(user, hashtag) do + render(conn, "show.json", tag: hashtag, for_user: user) + else + _ -> render_error(conn, :not_found, "Hashtag not found") + end + end + + def show_followed(conn, params) do + with %{assigns: %{user: %User{} = user}} <- conn do + params = Map.put(params, :id_type, :integer) + + hashtags = + user + |> User.HashtagFollow.followed_hashtags_query() + |> Pagination.fetch_paginated(params) + + conn + |> add_link_headers(hashtags) + |> render("index.json", tags: hashtags, for_user: user) + end + end +end diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index 293c61b41..5ee74a80e 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -40,6 +40,11 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do # GET /api/v1/timelines/home def home(%{assigns: %{user: user}} = conn, params) do + followed_hashtags = + user + |> User.followed_hashtags() + |> Enum.map(& &1.id) + params = params |> Map.put(:type, ["Create", "Announce"]) @@ -49,6 +54,7 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do |> Map.put(:announce_filtering_user, user) |> Map.put(:user, user) |> Map.put(:local_only, params[:local]) + |> Map.put(:followed_hashtags, followed_hashtags) |> Map.delete(:local) activities = diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 3bf870c24..10966edd6 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -227,7 +227,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do mentions: mentions, tags: reblogged[:tags] || [], application: build_application(object.data["generator"]), - language: nil, + language: get_language(object), emojis: [], pleroma: %{ local: activity.local, @@ -445,7 +445,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do mentions: mentions, tags: build_tags(tags), application: build_application(object.data["generator"]), - language: nil, + language: get_language(object), emojis: build_emojis(object.data["emoji"]), pleroma: %{ local: activity.local, @@ -829,6 +829,10 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do Utils.get_content_type(nil) end + defp get_language(%{data: %{"language" => "und"}}), do: nil + + defp get_language(object), do: object.data["language"] + defp proxied_url(url, page_url_data) do if is_binary(url) do build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url() diff --git a/lib/pleroma/web/mastodon_api/views/tag_view.ex b/lib/pleroma/web/mastodon_api/views/tag_view.ex new file mode 100644 index 000000000..e24d423c2 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/views/tag_view.ex @@ -0,0 +1,25 @@ +defmodule Pleroma.Web.MastodonAPI.TagView do + use Pleroma.Web, :view + alias Pleroma.User + alias Pleroma.Web.Router.Helpers + + def render("index.json", %{tags: tags, for_user: user}) do + safe_render_many(tags, __MODULE__, "show.json", %{for_user: user}) + end + + def render("show.json", %{tag: tag, for_user: user}) do + following = + with %User{} <- user do + User.following_hashtag?(user, tag) + else + _ -> false + end + + %{ + name: tag.name, + url: Helpers.tag_feed_url(Pleroma.Web.Endpoint, :feed, tag.name), + history: [], + following: following + } + end +end diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex index 0b446e0a6..a0aafc32e 100644 --- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex +++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex @@ -71,11 +71,15 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do drop_static_param_and_redirect(conn) content_type == "image/gif" -> - redirect(conn, external: media_proxy_url) + conn + |> put_status(301) + |> redirect(external: media_proxy_url) min_content_length_for_preview() > 0 and content_length > 0 and content_length < min_content_length_for_preview() -> - redirect(conn, external: media_proxy_url) + conn + |> put_status(301) + |> redirect(external: media_proxy_url) true -> handle_preview(content_type, conn, media_proxy_url) diff --git a/lib/pleroma/web/metadata/providers/open_graph.ex b/lib/pleroma/web/metadata/providers/open_graph.ex index fa5fbe553..604434df2 100644 --- a/lib/pleroma/web/metadata/providers/open_graph.ex +++ b/lib/pleroma/web/metadata/providers/open_graph.ex @@ -78,10 +78,10 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do # object when a Video or GIF is attached it will display that in Whatsapp Rich Preview. case Utils.fetch_media_type(@media_types, url["mediaType"]) do "audio" -> - [ - {:meta, [property: "og:audio", content: MediaProxy.url(url["href"])], []} - | acc - ] + acc ++ + [ + {:meta, [property: "og:audio", content: MediaProxy.url(url["href"])], []} + ] # Not using preview_url for this. It saves bandwidth, but the image dimensions will # be wrong. We generate it on the fly and have no way to capture or analyze the @@ -89,18 +89,18 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do # in timelines too, but you can get clever with the aspect ratio metadata as a # workaround. "image" -> - [ - {:meta, [property: "og:image", content: MediaProxy.url(url["href"])], []}, - {:meta, [property: "og:image:alt", content: attachment["name"]], []} - | acc - ] + (acc ++ + [ + {:meta, [property: "og:image", content: MediaProxy.url(url["href"])], []}, + {:meta, [property: "og:image:alt", content: attachment["name"]], []} + ]) |> maybe_add_dimensions(url) "video" -> - [ - {:meta, [property: "og:video", content: MediaProxy.url(url["href"])], []} - | acc - ] + (acc ++ + [ + {:meta, [property: "og:video", content: MediaProxy.url(url["href"])], []} + ]) |> maybe_add_dimensions(url) |> maybe_add_video_thumbnail(url) diff --git a/lib/pleroma/web/metadata/providers/twitter_card.ex b/lib/pleroma/web/metadata/providers/twitter_card.ex index 7f50877c3..212fa85ed 100644 --- a/lib/pleroma/web/metadata/providers/twitter_card.ex +++ b/lib/pleroma/web/metadata/providers/twitter_card.ex @@ -61,13 +61,13 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do Enum.reduce(attachment["url"], [], fn url, acc -> case Utils.fetch_media_type(@media_types, url["mediaType"]) do "audio" -> - [ - {:meta, [name: "twitter:card", content: "player"], []}, - {:meta, [name: "twitter:player:width", content: "480"], []}, - {:meta, [name: "twitter:player:height", content: "80"], []}, - {:meta, [name: "twitter:player", content: player_url(id)], []} - | acc - ] + acc ++ + [ + {:meta, [name: "twitter:card", content: "player"], []}, + {:meta, [name: "twitter:player:width", content: "480"], []}, + {:meta, [name: "twitter:player:height", content: "80"], []}, + {:meta, [name: "twitter:player", content: player_url(id)], []} + ] # Not using preview_url for this. It saves bandwidth, but the image dimensions will # be wrong. We generate it on the fly and have no way to capture or analyze the @@ -75,16 +75,16 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do # in timelines too, but you can get clever with the aspect ratio metadata as a # workaround. "image" -> - [ - {:meta, [name: "twitter:card", content: "summary_large_image"], []}, - {:meta, + (acc ++ [ - name: "twitter:image", - content: MediaProxy.url(url["href"]) - ], []}, - {:meta, [name: "twitter:image:alt", content: truncate(attachment["name"])], []} - | acc - ] + {:meta, [name: "twitter:card", content: "summary_large_image"], []}, + {:meta, + [ + name: "twitter:image", + content: MediaProxy.url(url["href"]) + ], []}, + {:meta, [name: "twitter:image:alt", content: truncate(attachment["name"])], []} + ]) |> maybe_add_dimensions(url) "video" -> @@ -92,17 +92,17 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do height = url["height"] || 480 width = url["width"] || 480 - [ - {:meta, [name: "twitter:card", content: "player"], []}, - {:meta, [name: "twitter:player", content: player_url(id)], []}, - {:meta, [name: "twitter:player:width", content: "#{width}"], []}, - {:meta, [name: "twitter:player:height", content: "#{height}"], []}, - {:meta, [name: "twitter:player:stream", content: MediaProxy.url(url["href"])], - []}, - {:meta, [name: "twitter:player:stream:content_type", content: url["mediaType"]], - []} - | acc - ] + acc ++ + [ + {:meta, [name: "twitter:card", content: "player"], []}, + {:meta, [name: "twitter:player", content: player_url(id)], []}, + {:meta, [name: "twitter:player:width", content: "#{width}"], []}, + {:meta, [name: "twitter:player:height", content: "#{height}"], []}, + {:meta, [name: "twitter:player:stream", content: MediaProxy.url(url["href"])], + []}, + {:meta, [name: "twitter:player:stream:content_type", content: url["mediaType"]], + []} + ] _ -> acc diff --git a/lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex b/lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex new file mode 100644 index 000000000..6807673f9 --- /dev/null +++ b/lib/pleroma/web/plugs/ap_client_api_enabled_plug.ex @@ -0,0 +1,34 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Plugs.APClientApiEnabledPlug do + import Plug.Conn + import Phoenix.Controller, only: [text: 2] + + @config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config) + @enabled_path [:activitypub, :client_api_enabled] + + def init(options \\ []), do: Map.new(options) + + def call(conn, %{allow_server: true}) do + if @config_impl.get(@enabled_path, false) do + conn + else + conn + |> assign(:user, nil) + |> assign(:token, nil) + end + end + + def call(conn, _) do + if @config_impl.get(@enabled_path, false) do + conn + else + conn + |> put_status(:forbidden) + |> text("C2S not enabled") + |> halt() + end + end +end diff --git a/lib/pleroma/web/plugs/http_signature_plug.ex b/lib/pleroma/web/plugs/http_signature_plug.ex index 67974599a..2e16212ce 100644 --- a/lib/pleroma/web/plugs/http_signature_plug.ex +++ b/lib/pleroma/web/plugs/http_signature_plug.ex @@ -19,8 +19,16 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do options end - def call(%{assigns: %{valid_signature: true}} = conn, _opts) do - conn + def call(%{assigns: %{valid_signature: true}} = conn, _opts), do: conn + + # skip for C2S requests from authenticated users + def call(%{assigns: %{user: %Pleroma.User{}}} = conn, _opts) do + if get_format(conn) in ["json", "activity+json"] do + # ensure access token is provided for 2FA + Pleroma.Web.Plugs.EnsureAuthenticatedPlug.call(conn, %{}) + else + conn + end end def call(conn, _opts) do diff --git a/lib/pleroma/web/plugs/instance_static.ex b/lib/pleroma/web/plugs/instance_static.ex index 75bfdd65b..f82b9a098 100644 --- a/lib/pleroma/web/plugs/instance_static.ex +++ b/lib/pleroma/web/plugs/instance_static.ex @@ -4,6 +4,7 @@ defmodule Pleroma.Web.Plugs.InstanceStatic do require Pleroma.Constants + import Plug.Conn, only: [put_resp_header: 3] @moduledoc """ This is a shim to call `Plug.Static` but with runtime `from` configuration. @@ -44,10 +45,31 @@ defmodule Pleroma.Web.Plugs.InstanceStatic do end defp call_static(conn, opts, from) do + # Prevent content-type spoofing by setting content_types: false opts = opts |> Map.put(:from, from) + |> Map.put(:content_types, false) + conn = set_content_type(conn, conn.request_path) + + # Call Plug.Static with our sanitized content-type Plug.Static.call(conn, opts) end + + defp set_content_type(conn, "/emoji/" <> filepath) do + real_mime = MIME.from_path(filepath) + + clean_mime = + Pleroma.Web.Plugs.Utils.get_safe_mime_type(%{allowed_mime_types: ["image"]}, real_mime) + + put_resp_header(conn, "content-type", clean_mime) + end + + defp set_content_type(conn, filepath) do + real_mime = MIME.from_path(filepath) + put_resp_header(conn, "content-type", real_mime) + end end + +# I think this needs to be uncleaned except for emoji. diff --git a/lib/pleroma/web/plugs/uploaded_media.ex b/lib/pleroma/web/plugs/uploaded_media.ex index f1076da1b..abacf965b 100644 --- a/lib/pleroma/web/plugs/uploaded_media.ex +++ b/lib/pleroma/web/plugs/uploaded_media.ex @@ -11,6 +11,7 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do require Logger alias Pleroma.Web.MediaProxy + alias Pleroma.Web.Plugs.Utils @behaviour Plug # no slashes @@ -28,7 +29,9 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do |> Keyword.put(:at, "/__unconfigured_media_plug") |> Plug.Static.init() - %{static_plug_opts: static_plug_opts} + allowed_mime_types = Pleroma.Config.get([Pleroma.Upload, :allowed_mime_types]) + + %{static_plug_opts: static_plug_opts, allowed_mime_types: allowed_mime_types} end def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do @@ -69,13 +72,23 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do defp media_is_banned(_, _), do: false + defp set_content_type(conn, opts, filepath) do + real_mime = MIME.from_path(filepath) + clean_mime = Utils.get_safe_mime_type(opts, real_mime) + put_resp_header(conn, "content-type", clean_mime) + end + defp get_media(conn, {:static_dir, directory}, _, opts) do static_opts = Map.get(opts, :static_plug_opts) |> Map.put(:at, [@path]) |> Map.put(:from, directory) + |> Map.put(:content_types, false) - conn = Plug.Static.call(conn, static_opts) + conn = + conn + |> set_content_type(opts, conn.request_path) + |> Plug.Static.call(static_opts) if conn.halted do conn diff --git a/lib/pleroma/web/plugs/utils.ex b/lib/pleroma/web/plugs/utils.ex new file mode 100644 index 000000000..05e0fbe84 --- /dev/null +++ b/lib/pleroma/web/plugs/utils.ex @@ -0,0 +1,14 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Plugs.Utils do + @moduledoc """ + Some helper functions shared across several plugs + """ + + def get_safe_mime_type(%{allowed_mime_types: allowed_mime_types} = _opts, mime) do + [maintype | _] = String.split(mime, "/", parts: 2) + if maintype in allowed_mime_types, do: mime, else: "application/octet-stream" + end +end diff --git a/lib/pleroma/web/rich_media/card.ex b/lib/pleroma/web/rich_media/card.ex index abad4957e..6b4bb9555 100644 --- a/lib/pleroma/web/rich_media/card.ex +++ b/lib/pleroma/web/rich_media/card.ex @@ -54,7 +54,10 @@ defmodule Pleroma.Web.RichMedia.Card do @spec get_by_url(String.t() | nil) :: t() | nil | :error def get_by_url(url) when is_binary(url) do - if @config_impl.get([:rich_media, :enabled]) do + host = URI.parse(url).host + + with true <- @config_impl.get([:rich_media, :enabled]), + true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do url_hash = url_to_hash(url) @cachex.fetch!(:rich_media_cache, url_hash, fn _ -> @@ -69,7 +72,7 @@ defmodule Pleroma.Web.RichMedia.Card do end end) else - :error + false -> :error end end @@ -77,7 +80,10 @@ defmodule Pleroma.Web.RichMedia.Card do @spec get_or_backfill_by_url(String.t(), keyword()) :: t() | nil def get_or_backfill_by_url(url, opts \\ []) do - if @config_impl.get([:rich_media, :enabled]) do + host = URI.parse(url).host + + with true <- @config_impl.get([:rich_media, :enabled]), + true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do case get_by_url(url) do %__MODULE__{} = card -> card @@ -94,7 +100,7 @@ defmodule Pleroma.Web.RichMedia.Card do nil end else - nil + false -> nil end end diff --git a/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex b/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex index 320a5f515..c42e2c96b 100644 --- a/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex +++ b/lib/pleroma/web/rich_media/parsers/meta_tags_parser.ex @@ -9,7 +9,7 @@ defmodule Pleroma.Web.RichMedia.Parsers.MetaTagsParser do |> Enum.reduce(data, fn el, acc -> attributes = normalize_attributes(el, prefix, key_name, value_name) - Map.merge(acc, attributes) + Map.merge(attributes, acc) end) |> maybe_put_title(html) end diff --git a/lib/pleroma/web/rich_media/parsers/twitter_card.ex b/lib/pleroma/web/rich_media/parsers/twitter_card.ex index cc653729d..6f6f8b2ae 100644 --- a/lib/pleroma/web/rich_media/parsers/twitter_card.ex +++ b/lib/pleroma/web/rich_media/parsers/twitter_card.ex @@ -11,5 +11,16 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCard do |> MetaTagsParser.parse(html, "og", "property") |> MetaTagsParser.parse(html, "twitter", "name") |> MetaTagsParser.parse(html, "twitter", "property") + |> filter_tags() + end + + defp filter_tags(tags) do + Map.filter(tags, fn {k, _v} -> + cond do + k in ["card", "description", "image", "title", "ttl", "type", "url"] -> true + String.starts_with?(k, "image:") -> true + true -> false + end + end) end end diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 0423ca9e2..bf8ebf3e4 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -755,6 +755,11 @@ defmodule Pleroma.Web.Router do get("/announcements", AnnouncementController, :index) post("/announcements/:id/dismiss", AnnouncementController, :mark_read) + + get("/tags/:id", TagController, :show) + post("/tags/:id/follow", TagController, :follow) + post("/tags/:id/unfollow", TagController, :unfollow) + get("/followed_tags", TagController, :show_followed) end scope "/api/v1", Pleroma.Web.MastodonAPI do @@ -902,22 +907,37 @@ defmodule Pleroma.Web.Router do # Client to Server (C2S) AP interactions pipeline :activitypub_client do plug(:ap_service_actor) + plug(Pleroma.Web.Plugs.APClientApiEnabledPlug) plug(:fetch_session) plug(:authenticate) plug(:after_auth) end + # AP interactions used by both S2S and C2S + pipeline :activitypub_server_or_client do + plug(:ap_service_actor) + plug(:fetch_session) + plug(:authenticate) + plug(Pleroma.Web.Plugs.APClientApiEnabledPlug, allow_server: true) + plug(:after_auth) + plug(:http_signature) + end + scope "/", Pleroma.Web.ActivityPub do pipe_through([:activitypub_client]) get("/api/ap/whoami", ActivityPubController, :whoami) get("/users/:nickname/inbox", ActivityPubController, :read_inbox) - get("/users/:nickname/outbox", ActivityPubController, :outbox) post("/users/:nickname/outbox", ActivityPubController, :update_outbox) post("/api/ap/upload_media", ActivityPubController, :upload_media) + end + + scope "/", Pleroma.Web.ActivityPub do + pipe_through([:activitypub_server_or_client]) + + get("/users/:nickname/outbox", ActivityPubController, :outbox) - # The following two are S2S as well, see `ActivityPub.fetch_follow_information_for_user/1`: get("/users/:nickname/followers", ActivityPubController, :followers) get("/users/:nickname/following", ActivityPubController, :following) get("/users/:nickname/collections/featured", ActivityPubController, :pinned) diff --git a/lib/pleroma/web/streamer.ex b/lib/pleroma/web/streamer.ex index 76dc0f42d..cc149e04c 100644 --- a/lib/pleroma/web/streamer.ex +++ b/lib/pleroma/web/streamer.ex @@ -19,6 +19,7 @@ defmodule Pleroma.Web.Streamer do alias Pleroma.Web.OAuth.Token alias Pleroma.Web.Plugs.OAuthScopesPlug alias Pleroma.Web.StreamerView + require Pleroma.Constants @registry Pleroma.Web.StreamerRegistry @@ -305,7 +306,17 @@ defmodule Pleroma.Web.Streamer do User.get_recipients_from_activity(item) |> Enum.map(fn %{id: id} -> "user:#{id}" end) - Enum.each(recipient_topics, fn topic -> + hashtag_recipients = + if Pleroma.Constants.as_public() in item.recipients do + Pleroma.Hashtag.get_recipients_for_activity(item) + |> Enum.map(fn id -> "user:#{id}" end) + else + [] + end + + all_recipients = Enum.uniq(recipient_topics ++ hashtag_recipients) + + Enum.each(all_recipients, fn topic -> push_to_socket(topic, item) end) end diff --git a/mix.exs b/mix.exs index 6e071cd1f..808a2b12c 100644 --- a/mix.exs +++ b/mix.exs @@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do def project do [ app: :pleroma, - version: version("2.7.51"), + version: version("2.9.1"), elixir: "~> 1.14", elixirc_paths: elixirc_paths(Mix.env()), compilers: Mix.compilers(), diff --git a/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs b/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs new file mode 100644 index 000000000..2b5ae91be --- /dev/null +++ b/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.AddUserFollowsHashtag do + use Ecto.Migration + + def change do + create table(:user_follows_hashtag) do + add(:hashtag_id, references(:hashtags)) + add(:user_id, references(:users, type: :uuid, on_delete: :delete_all)) + end + + create(unique_index(:user_follows_hashtag, [:user_id, :hashtag_id])) + + create_if_not_exists(index(:user_follows_hashtag, [:hashtag_id])) + end +end diff --git a/test/fixtures/break_analyze.png b/test/fixtures/break_analyze.png new file mode 100644 index 000000000..b5e91b08a Binary files /dev/null and b/test/fixtures/break_analyze.png differ diff --git a/test/fixtures/fulmo.html b/test/fixtures/fulmo.html new file mode 100644 index 000000000..e54eaf8d8 --- /dev/null +++ b/test/fixtures/fulmo.html @@ -0,0 +1,151 @@ + + + + + + + + + + + + + Fulmo + + + + + + + + + + + + + + + + +
+ + + +
+
+ +
+
+
+

Fulmo

+

Skribis Tirifto

+ +
+

»Kial ĉiam mi? Tio ne justas! Oni kulpigas min, sed ja ne mi kulpas!« La nubofeo lamentis, dum ĝi ordigis restaĵojn de falinta arbo. Plejparto el la pingloj estis brulintaj, kaj el la trunko ankoraŭ leviĝis fumo.

+

Subite aŭdeblis ekstraj kraketoj deapude. Ĝi rigardis flanken, kaj vidis iun kaŭri apud la arbo, derompi branĉetojn, kaj orde ilin amasigi. Ŝajnis, ke ekde sia rimarkiĝo, la nekonatulo laŭeble kuntiriĝis, kaj strebis labori kiel eble plej silente.

+

»Saluton…?« La nubofeo stariĝis, alporolante la eston. Tiu kvazaŭ frostiĝis, sed timeme ankaŭ stariĝis.

+

»S- Saluton…« Ĝi respondis sen kuraĝo rigardi ĝiadirekten. Nun stare, videblis ke ĝi estas verdanta florofeo.

+

»… kion vi faras tie ĉi?« La nubofeo demandis.

+

»Nu… tiel kaj tiel… mi ordigas.«

+

»Ho. Mi ricevis taskon ordigi ĉi tie… se vi povas atendi, vi ne bezonas peni!«

+

»N- Nu… mi tamen volus…« Parolis la florofeo, plu deturnante la kapon.

+

»Nu… bone, se vi tion deziras… dankon!« La nubofeo dankis, kaj returniĝis al sia laboro.

+

Fojfoje ĝi scivole rigardis al sia nova kunlaboranto, kaj fojfoje renkontis similan rigardon de ĝia flanko, kiuokaze ambaŭ rigardoj rapide revenis al la ordigataj pingloj kaj branĉetoj. »(Kial tiom volonte helpi min?)« Pensis al si la nubofeo. »(Ĉu ĝi simple tiom bonkoras? Ĝi ja tre bele floras; eble ankaŭ ĝia koro tiel same belas…)« Kaj vere, ĝiaj surfloroj grandanime malfermis siajn belkolorajn folietojn, kaj bonodoris al mondo.

+
+ + + Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si. + +
+ Pinglordigado +
+ © Tirifto + Emblemo: Permesilo de arto libera +
+
+
+

Post iom da tempo, ĉiu feo tralaboris ĝis la trunkomezo, kaj proksimiĝis al la alia feo. Kaj tiam ekpezis sur ili devosento rompi la silenton.

+

»… kia bela vetero, ĉu ne?« Diris la nubofeo, tuj rimarkonte, ke mallumiĝas, kaj la ĉielo restas kovrita de nuboj.

+

»Jes ja! Tre nube. Mi ŝatas nubojn!« Respondis la alia entuziasme, sed tuj haltetis kaj deturnis la kapon. Ambaŭ feoj daŭrigis laboron silente, kaj plu proksimiĝis, ĝis tiu preskaŭ estis finita.

+

»H… H… Ho ne…!« Subite ekdiris la nubofeo urĝe.

+

»Kio okazas?!«

+

»T… Tern…!«

+

»Jen! Tenu!« La florofeo etendis manon kun granda folio. La nubofeo ĝin prenis, kaj tien ternis. Aperis ekfulmo, kaj la cindriĝinta folio disfalis.

+

»Pardonu… mi ne volis…« Bedaŭris la nubofeo. »Mi ne scias, kial tio ĉiam okazas! Tiom plaĉas al mi promeni tere, sed ĉiuj diras, ke mi maldevus, ĉar ial ĝi ĉiam finiĝas tiel ĉi.« Ĝi montris al la arbo. »Eble ili pravas…«

+

»Nu…« diris la florofeo bedaŭre, kaj etendis la manon.

+

»H… H… Ne ree…!«

+

Ekfulmis. Alia ĵus metita folio cindriĝis en la manoj de la florofeo, time ferminta la okulojn.

+

»Dankegon… mi tre ŝatas vian helpon! Kaj mi ne… ne…«

+

Metiĝis. Ekfulmis. Cindriĝis.

+

»Io tre iritas mian nazon!« Plendis la nubofeo. Poste ĝi rimarkis la florpolvon, kiu disŝutiĝis el la florofeo en la tutan ĉirkaŭaĵon, kaj eĉ tuj antaŭ la nubofeon.

+

»N- Nu…« Diris la florofeo, honte rigardanta la teron. »… pardonu.«

+
+ + +
+ Historio +
+
+
Unua publikigo.
+
+
+
+ Permesilo +

Ĉi tiun verkon vi rajtas libere kopii, disdoni, kaj ŝanĝi, laŭ kondiĉoj de la Permesilo de arto libera. (Resume: Vi devas mencii la aŭtoron kaj doni ligilon al la verko. Se vi ŝanĝas la verkon, vi devas laŭeble noti la faritajn ŝanĝojn, ilian daton, kaj eldoni ilin sub la sama permesilo.)

+ Emblemo: Permesilo de arto libera +
+
+
+
+
+ + + diff --git a/test/fixtures/mastodon-update-with-likes.json b/test/fixtures/mastodon-update-with-likes.json new file mode 100644 index 000000000..3bdb3ba3d --- /dev/null +++ b/test/fixtures/mastodon-update-with-likes.json @@ -0,0 +1,90 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "atomUri": "ostatus:atomUri", + "conversation": "ostatus:conversation", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "ostatus": "http://ostatus.org#", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + }, + "https://w3id.org/security/v1" + ], + "actor": "https://pol.social/users/mkljczk", + "cc": ["https://www.w3.org/ns/activitystreams#Public", + "https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk", + "https://pl.fediverse.pl/users/mkljczk", + "https://fedi.kutno.pl/users/mkljczk"], + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263#updates/1738096776", + "object": { + "atomUri": "https://pol.social/users/mkljczk/statuses/113907871635572263", + "attachment": [], + "attributedTo": "https://pol.social/users/mkljczk", + "cc": ["https://www.w3.org/ns/activitystreams#Public", + "https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk", + "https://pl.fediverse.pl/users/mkljczk", + "https://fedi.kutno.pl/users/mkljczk"], + "content": "

test

", + "contentMap": { + "pl": "

test

" + }, + "conversation": "https://fedi.kutno.pl/contexts/43c14c70-d3fb-42b4-a36d-4eacfab9695a", + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263", + "inReplyTo": "https://pol.social/users/aemstuz/statuses/113907854282654767", + "inReplyToAtomUri": "https://pol.social/users/aemstuz/statuses/113907854282654767", + "likes": { + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/likes", + "totalItems": 1, + "type": "Collection" + }, + "published": "2025-01-28T20:29:45Z", + "replies": { + "first": { + "items": [], + "next": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies?only_other_accounts=true&page=true", + "partOf": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies", + "type": "CollectionPage" + }, + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies", + "type": "Collection" + }, + "sensitive": false, + "shares": { + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/shares", + "totalItems": 0, + "type": "Collection" + }, + "summary": null, + "tag": [ + { + "href": "https://pol.social/users/aemstuz", + "name": "@aemstuz", + "type": "Mention" + }, + { + "href": "https://gts.mkljczk.pl/users/mkljczk", + "name": "@mkljczk@gts.mkljczk.pl", + "type": "Mention" + }, + { + "href": "https://pl.fediverse.pl/users/mkljczk", + "name": "@mkljczk@fediverse.pl", + "type": "Mention" + }, + { + "href": "https://fedi.kutno.pl/users/mkljczk", + "name": "@mkljczk@fedi.kutno.pl", + "type": "Mention" + } + ], + "to": ["https://pol.social/users/mkljczk/followers"], + "type": "Note", + "updated": "2025-01-28T20:39:36Z", + "url": "https://pol.social/@mkljczk/113907871635572263" + }, + "published": "2025-01-28T20:39:36Z", + "to": ["https://pol.social/users/mkljczk/followers"], + "type": "Update" +} diff --git a/test/fixtures/users_mock/friendica_followers.json b/test/fixtures/users_mock/friendica_followers.json index 7b86b5fe2..f58c1d56c 100644 --- a/test/fixtures/users_mock/friendica_followers.json +++ b/test/fixtures/users_mock/friendica_followers.json @@ -13,7 +13,7 @@ "directMessage": "litepub:directMessage" } ], - "id": "http://localhost:8080/followers/fuser3", + "id": "https://remote.org/followers/fuser3", "type": "OrderedCollection", "totalItems": 296 } diff --git a/test/fixtures/users_mock/friendica_following.json b/test/fixtures/users_mock/friendica_following.json index 7c526befc..f3930f42c 100644 --- a/test/fixtures/users_mock/friendica_following.json +++ b/test/fixtures/users_mock/friendica_following.json @@ -13,7 +13,7 @@ "directMessage": "litepub:directMessage" } ], - "id": "http://localhost:8080/following/fuser3", + "id": "https://remote.org/following/fuser3", "type": "OrderedCollection", "totalItems": 32 } diff --git a/test/fixtures/users_mock/masto_closed_followers.json b/test/fixtures/users_mock/masto_closed_followers.json index da296892d..89bb9cba9 100644 --- a/test/fixtures/users_mock/masto_closed_followers.json +++ b/test/fixtures/users_mock/masto_closed_followers.json @@ -1,7 +1,7 @@ { "@context": "https://www.w3.org/ns/activitystreams", - "id": "http://localhost:4001/users/masto_closed/followers", + "id": "https://remote.org/users/masto_closed/followers", "type": "OrderedCollection", "totalItems": 437, - "first": "http://localhost:4001/users/masto_closed/followers?page=1" + "first": "https://remote.org/users/masto_closed/followers?page=1" } diff --git a/test/fixtures/users_mock/masto_closed_followers_page.json b/test/fixtures/users_mock/masto_closed_followers_page.json index 04ab0c4d3..4e9cb315f 100644 --- a/test/fixtures/users_mock/masto_closed_followers_page.json +++ b/test/fixtures/users_mock/masto_closed_followers_page.json @@ -1 +1 @@ -{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"http://localhost:4001/users/masto_closed/followers?page=2","partOf":"http://localhost:4001/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} +{"@context":"https://www.w3.org/ns/activitystreams","id":"https://remote.org/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"https://remote.org/users/masto_closed/followers?page=2","partOf":"https://remote.org/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} diff --git a/test/fixtures/users_mock/masto_closed_following.json b/test/fixtures/users_mock/masto_closed_following.json index 146d49f9c..aa74f8e78 100644 --- a/test/fixtures/users_mock/masto_closed_following.json +++ b/test/fixtures/users_mock/masto_closed_following.json @@ -1,7 +1,7 @@ { "@context": "https://www.w3.org/ns/activitystreams", - "id": "http://localhost:4001/users/masto_closed/following", + "id": "https://remote.org/users/masto_closed/following", "type": "OrderedCollection", "totalItems": 152, - "first": "http://localhost:4001/users/masto_closed/following?page=1" + "first": "https://remote.org/users/masto_closed/following?page=1" } diff --git a/test/fixtures/users_mock/masto_closed_following_page.json b/test/fixtures/users_mock/masto_closed_following_page.json index 8d8324699..b017413cc 100644 --- a/test/fixtures/users_mock/masto_closed_following_page.json +++ b/test/fixtures/users_mock/masto_closed_following_page.json @@ -1 +1 @@ -{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"http://localhost:4001/users/masto_closed/following?page=2","partOf":"http://localhost:4001/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} +{"@context":"https://www.w3.org/ns/activitystreams","id":"https://remote.org/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"https://remote.org/users/masto_closed/following?page=2","partOf":"https://remote.org/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]} diff --git a/test/fixtures/users_mock/pleroma_followers.json b/test/fixtures/users_mock/pleroma_followers.json index db71d084b..6ac3bfee0 100644 --- a/test/fixtures/users_mock/pleroma_followers.json +++ b/test/fixtures/users_mock/pleroma_followers.json @@ -1,18 +1,18 @@ { "type": "OrderedCollection", "totalItems": 527, - "id": "http://localhost:4001/users/fuser2/followers", + "id": "https://remote.org/users/fuser2/followers", "first": { "type": "OrderedCollectionPage", "totalItems": 527, - "partOf": "http://localhost:4001/users/fuser2/followers", + "partOf": "https://remote.org/users/fuser2/followers", "orderedItems": [], - "next": "http://localhost:4001/users/fuser2/followers?page=2", - "id": "http://localhost:4001/users/fuser2/followers?page=1" + "next": "https://remote.org/users/fuser2/followers?page=2", + "id": "https://remote.org/users/fuser2/followers?page=1" }, "@context": [ "https://www.w3.org/ns/activitystreams", - "http://localhost:4001/schemas/litepub-0.1.jsonld", + "https://remote.org/schemas/litepub-0.1.jsonld", { "@language": "und" } diff --git a/test/fixtures/users_mock/pleroma_following.json b/test/fixtures/users_mock/pleroma_following.json index 33d087703..c8306806a 100644 --- a/test/fixtures/users_mock/pleroma_following.json +++ b/test/fixtures/users_mock/pleroma_following.json @@ -1,18 +1,18 @@ { "type": "OrderedCollection", "totalItems": 267, - "id": "http://localhost:4001/users/fuser2/following", + "id": "https://remote.org/users/fuser2/following", "first": { "type": "OrderedCollectionPage", "totalItems": 267, - "partOf": "http://localhost:4001/users/fuser2/following", + "partOf": "https://remote.org/users/fuser2/following", "orderedItems": [], - "next": "http://localhost:4001/users/fuser2/following?page=2", - "id": "http://localhost:4001/users/fuser2/following?page=1" + "next": "https://remote.org/users/fuser2/following?page=2", + "id": "https://remote.org/users/fuser2/following?page=1" }, "@context": [ "https://www.w3.org/ns/activitystreams", - "http://localhost:4001/schemas/litepub-0.1.jsonld", + "https://remote.org/schemas/litepub-0.1.jsonld", { "@language": "und" } diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index 96a925528..38ed096ae 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -411,7 +411,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do ["scheduled_activities"], ["schema_migrations"], ["thread_mutes"], - # ["user_follows_hashtag"], # not in pleroma + ["user_follows_hashtag"], # ["user_frontend_setting_profiles"], # not in pleroma ["user_invite_tokens"], ["user_notes"], diff --git a/test/mix/tasks/pleroma/digest_test.exs b/test/mix/tasks/pleroma/digest_test.exs index 08482aadb..0d1804cdb 100644 --- a/test/mix/tasks/pleroma/digest_test.exs +++ b/test/mix/tasks/pleroma/digest_test.exs @@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do setup do: clear_config([Pleroma.Emails.Mailer, :enabled], true) setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/mix/tasks/pleroma/user_test.exs b/test/mix/tasks/pleroma/user_test.exs index c9bcf2951..7ce5e92cb 100644 --- a/test/mix/tasks/pleroma/user_test.exs +++ b/test/mix/tasks/pleroma/user_test.exs @@ -21,7 +21,7 @@ defmodule Mix.Tasks.Pleroma.UserTest do import Pleroma.Factory setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/conversation_test.exs b/test/pleroma/conversation_test.exs index 809c1951a..02b5de615 100644 --- a/test/pleroma/conversation_test.exs +++ b/test/pleroma/conversation_test.exs @@ -14,7 +14,7 @@ defmodule Pleroma.ConversationTest do setup_all do: clear_config([:instance, :federating], true) setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs b/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs new file mode 100644 index 000000000..a05871a6f --- /dev/null +++ b/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs @@ -0,0 +1,56 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMapTest do + use Pleroma.DataCase, async: true + + alias Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap + + test "it validates" do + data = %{ + "en-US" => "mew mew", + "en-GB" => "meow meow" + } + + assert {:ok, ^data} = ContentLanguageMap.cast(data) + end + + test "it validates empty strings" do + data = %{ + "en-US" => "mew mew", + "en-GB" => "" + } + + assert {:ok, ^data} = ContentLanguageMap.cast(data) + end + + test "it ignores non-strings within the map" do + data = %{ + "en-US" => "mew mew", + "en-GB" => 123 + } + + assert {:ok, validated_data} = ContentLanguageMap.cast(data) + + assert validated_data == %{"en-US" => "mew mew"} + end + + test "it ignores bad locale codes" do + data = %{ + "en-US" => "mew mew", + "en_GB" => "meow meow", + "en<<#@!$#!@%!GB" => "meow meow" + } + + assert {:ok, validated_data} = ContentLanguageMap.cast(data) + + assert validated_data == %{"en-US" => "mew mew"} + end + + test "it complains with non-map data" do + assert :error = ContentLanguageMap.cast("mew") + assert :error = ContentLanguageMap.cast(["mew"]) + assert :error = ContentLanguageMap.cast([%{"en-US" => "mew"}]) + end +end diff --git a/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs b/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs new file mode 100644 index 000000000..086bb3e97 --- /dev/null +++ b/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs @@ -0,0 +1,29 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCodeTest do + use Pleroma.DataCase, async: true + + alias Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode + + test "it accepts language code" do + text = "pl" + assert {:ok, ^text} = LanguageCode.cast(text) + end + + test "it accepts language code with region" do + text = "pl-PL" + assert {:ok, ^text} = LanguageCode.cast(text) + end + + test "errors for invalid language code" do + assert {:error, :invalid_language} = LanguageCode.cast("ru_RU") + assert {:error, :invalid_language} = LanguageCode.cast(" ") + assert {:error, :invalid_language} = LanguageCode.cast("en-US\n") + end + + test "errors for non-text" do + assert :error == LanguageCode.cast(42) + end +end diff --git a/test/pleroma/emoji/pack_test.exs b/test/pleroma/emoji/pack_test.exs index 00001abfc..6ab3e657e 100644 --- a/test/pleroma/emoji/pack_test.exs +++ b/test/pleroma/emoji/pack_test.exs @@ -4,6 +4,7 @@ defmodule Pleroma.Emoji.PackTest do use Pleroma.DataCase + alias Pleroma.Emoji alias Pleroma.Emoji.Pack @emoji_path Path.join( @@ -53,6 +54,63 @@ defmodule Pleroma.Emoji.PackTest do assert updated_pack.files_count == 5 end + + test "skips existing emojis when adding from zip file", %{pack: pack} do + # First, let's create a test pack with a "bear" emoji + test_pack_path = Path.join(@emoji_path, "test_bear_pack") + File.mkdir_p(test_pack_path) + + # Create a pack.json file + File.write!(Path.join(test_pack_path, "pack.json"), """ + { + "files": { "bear": "bear.png" }, + "pack": { + "description": "Bear Pack", "homepage": "https://pleroma.social", + "license": "Test license", "share-files": true + }} + """) + + # Copy a test image to use as the bear emoji + File.cp!( + Path.absname("test/instance_static/emoji/test_pack/blank.png"), + Path.join(test_pack_path, "bear.png") + ) + + # Load the pack to register the "bear" emoji in the global registry + {:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack") + + # Reload emoji to make sure the bear emoji is in the global registry + Emoji.reload() + + # Verify that the bear emoji exists in the global registry + assert Emoji.exist?("bear") + + # Now try to add a zip file that contains an emoji with the same shortcode + file = %Plug.Upload{ + content_type: "application/zip", + filename: "emojis.zip", + path: Path.absname("test/fixtures/emojis.zip") + } + + {:ok, updated_pack} = Pack.add_file(pack, nil, nil, file) + + # Verify that the "bear" emoji was skipped + refute Map.has_key?(updated_pack.files, "bear") + + # Other emojis should be added + assert Map.has_key?(updated_pack.files, "a_trusted_friend-128") + assert Map.has_key?(updated_pack.files, "auroraborealis") + assert Map.has_key?(updated_pack.files, "baby_in_a_box") + assert Map.has_key?(updated_pack.files, "bear-128") + + # Total count should be 4 (all emojis except "bear") + assert updated_pack.files_count == 4 + + # Clean up the test pack + on_exit(fn -> + File.rm_rf!(test_pack_path) + end) + end end test "returns error when zip file is bad", %{pack: pack} do @@ -62,7 +120,7 @@ defmodule Pleroma.Emoji.PackTest do path: Path.absname("test/instance_static/emoji/test_pack/blank.png") } - assert Pack.add_file(pack, nil, nil, file) == {:error, :einval} + assert {:error, _} = Pack.add_file(pack, nil, nil, file) end test "returns pack when zip file is empty", %{pack: pack} do diff --git a/test/pleroma/notification_test.exs b/test/pleroma/notification_test.exs index e595c5c53..4b20e07cf 100644 --- a/test/pleroma/notification_test.exs +++ b/test/pleroma/notification_test.exs @@ -19,7 +19,7 @@ defmodule Pleroma.NotificationTest do alias Pleroma.Web.MastodonAPI.NotificationView setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/object/fetcher_test.exs b/test/pleroma/object/fetcher_test.exs index 215fca570..7ba5090e1 100644 --- a/test/pleroma/object/fetcher_test.exs +++ b/test/pleroma/object/fetcher_test.exs @@ -166,6 +166,91 @@ defmodule Pleroma.Object.FetcherTest do ) end + test "it does not fetch from local instance" do + local_url = Pleroma.Web.Endpoint.url() <> "/objects/local_resource" + + assert {:fetch, {:error, "Trying to fetch local resource"}} = + Fetcher.fetch_object_from_id(local_url) + end + + test "it validates content-type headers according to ActivityPub spec" do + # Setup a mock for an object with invalid content-type + mock(fn + %{method: :get, url: "https://example.com/objects/invalid-content-type"} -> + %Tesla.Env{ + status: 200, + # Not a valid AP content-type + headers: [{"content-type", "application/json"}], + body: + Jason.encode!(%{ + "id" => "https://example.com/objects/invalid-content-type", + "type" => "Note", + "content" => "This has an invalid content type", + "actor" => "https://example.com/users/actor", + "attributedTo" => "https://example.com/users/actor" + }) + } + end) + + assert {:fetch, {:error, {:content_type, "application/json"}}} = + Fetcher.fetch_object_from_id("https://example.com/objects/invalid-content-type") + end + + test "it accepts objects with application/ld+json and ActivityStreams profile" do + # Setup a mock for an object with ld+json content-type and AS profile + mock(fn + %{method: :get, url: "https://example.com/objects/valid-ld-json"} -> + %Tesla.Env{ + status: 200, + headers: [ + {"content-type", + "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""} + ], + body: + Jason.encode!(%{ + "id" => "https://example.com/objects/valid-ld-json", + "type" => "Note", + "content" => "This has a valid ld+json content type", + "actor" => "https://example.com/users/actor", + "attributedTo" => "https://example.com/users/actor" + }) + } + end) + + # This should pass if content-type validation works correctly + assert {:ok, object} = + Fetcher.fetch_and_contain_remote_object_from_id( + "https://example.com/objects/valid-ld-json" + ) + + assert object["content"] == "This has a valid ld+json content type" + end + + test "it rejects objects with no content-type header" do + # Setup a mock for an object with no content-type header + mock(fn + %{method: :get, url: "https://example.com/objects/no-content-type"} -> + %Tesla.Env{ + status: 200, + # No content-type header + headers: [], + body: + Jason.encode!(%{ + "id" => "https://example.com/objects/no-content-type", + "type" => "Note", + "content" => "This has no content type header", + "actor" => "https://example.com/users/actor", + "attributedTo" => "https://example.com/users/actor" + }) + } + end) + + # We want to test that the request fails with a missing content-type error + # but the actual error is {:fetch, {:error, nil}} - we'll check for this format + result = Fetcher.fetch_object_from_id("https://example.com/objects/no-content-type") + assert {:fetch, {:error, nil}} = result + end + test "it resets instance reachability on successful fetch" do id = "http://mastodon.example.org/@admin/99541947525187367" Instances.set_consistently_unreachable(id) @@ -534,6 +619,110 @@ defmodule Pleroma.Object.FetcherTest do end end + describe "cross-domain redirect handling" do + setup do + mock(fn + # Cross-domain redirect with original domain in id + %{method: :get, url: "https://original.test/objects/123"} -> + %Tesla.Env{ + status: 200, + url: "https://media.test/objects/123", + headers: [{"content-type", "application/activity+json"}], + body: + Jason.encode!(%{ + "id" => "https://original.test/objects/123", + "type" => "Note", + "content" => "This is redirected content", + "actor" => "https://original.test/users/actor", + "attributedTo" => "https://original.test/users/actor" + }) + } + + # Cross-domain redirect with final domain in id + %{method: :get, url: "https://original.test/objects/final-domain-id"} -> + %Tesla.Env{ + status: 200, + url: "https://media.test/objects/final-domain-id", + headers: [{"content-type", "application/activity+json"}], + body: + Jason.encode!(%{ + "id" => "https://media.test/objects/final-domain-id", + "type" => "Note", + "content" => "This has final domain in id", + "actor" => "https://original.test/users/actor", + "attributedTo" => "https://original.test/users/actor" + }) + } + + # No redirect - same domain + %{method: :get, url: "https://original.test/objects/same-domain-redirect"} -> + %Tesla.Env{ + status: 200, + url: "https://original.test/objects/different-path", + headers: [{"content-type", "application/activity+json"}], + body: + Jason.encode!(%{ + "id" => "https://original.test/objects/same-domain-redirect", + "type" => "Note", + "content" => "This has a same-domain redirect", + "actor" => "https://original.test/users/actor", + "attributedTo" => "https://original.test/users/actor" + }) + } + + # Test case with missing url field in response (common in tests) + %{method: :get, url: "https://original.test/objects/missing-url"} -> + %Tesla.Env{ + status: 200, + # No url field + headers: [{"content-type", "application/activity+json"}], + body: + Jason.encode!(%{ + "id" => "https://original.test/objects/missing-url", + "type" => "Note", + "content" => "This has no URL field in response", + "actor" => "https://original.test/users/actor", + "attributedTo" => "https://original.test/users/actor" + }) + } + end) + + :ok + end + + test "it rejects objects from cross-domain redirects with original domain in id" do + assert {:error, {:cross_domain_redirect, true}} = + Fetcher.fetch_and_contain_remote_object_from_id( + "https://original.test/objects/123" + ) + end + + test "it rejects objects from cross-domain redirects with final domain in id" do + assert {:error, {:cross_domain_redirect, true}} = + Fetcher.fetch_and_contain_remote_object_from_id( + "https://original.test/objects/final-domain-id" + ) + end + + test "it accepts objects with same-domain redirects" do + assert {:ok, data} = + Fetcher.fetch_and_contain_remote_object_from_id( + "https://original.test/objects/same-domain-redirect" + ) + + assert data["content"] == "This has a same-domain redirect" + end + + test "it handles responses without URL field (common in tests)" do + assert {:ok, data} = + Fetcher.fetch_and_contain_remote_object_from_id( + "https://original.test/objects/missing-url" + ) + + assert data["content"] == "This has no URL field in response" + end + end + describe "fetch with history" do setup do object2 = %{ diff --git a/test/pleroma/repo/migrations/autolinker_to_linkify_test.exs b/test/pleroma/repo/migrations/autolinker_to_linkify_test.exs index 9847781f0..99522994a 100644 --- a/test/pleroma/repo/migrations/autolinker_to_linkify_test.exs +++ b/test/pleroma/repo/migrations/autolinker_to_linkify_test.exs @@ -3,12 +3,11 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Repo.Migrations.AutolinkerToLinkifyTest do - use Pleroma.DataCase + use Pleroma.DataCase, async: true import Pleroma.Factory import Pleroma.Tests.Helpers alias Pleroma.ConfigDB - setup do: clear_config(Pleroma.Formatter) setup_all do: require_migration("20200716195806_autolinker_to_linkify") test "change/0 converts auto_linker opts for Pleroma.Formatter", %{migration: migration} do diff --git a/test/pleroma/reverse_proxy_test.exs b/test/pleroma/reverse_proxy_test.exs index fb330232a..85e1d0910 100644 --- a/test/pleroma/reverse_proxy_test.exs +++ b/test/pleroma/reverse_proxy_test.exs @@ -63,7 +63,11 @@ defmodule Pleroma.ReverseProxyTest do |> Plug.Conn.put_req_header("user-agent", "fake/1.0") |> ReverseProxy.call("/user-agent") - assert json_response(conn, 200) == %{"user-agent" => Pleroma.Application.user_agent()} + # Convert the response to a map without relying on json_response + body = conn.resp_body + assert conn.status == 200 + response = Jason.decode!(body) + assert response == %{"user-agent" => Pleroma.Application.user_agent()} end test "closed connection", %{conn: conn} do @@ -138,11 +142,14 @@ defmodule Pleroma.ReverseProxyTest do test "common", %{conn: conn} do ClientMock |> expect(:request, fn :head, "/head", _, _, _ -> - {:ok, 200, [{"content-type", "text/html; charset=utf-8"}]} + {:ok, 200, [{"content-type", "image/png"}]} end) conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head") - assert html_response(conn, 200) == "" + + assert conn.status == 200 + assert Conn.get_resp_header(conn, "content-type") == ["image/png"] + assert conn.resp_body == "" end end @@ -249,7 +256,10 @@ defmodule Pleroma.ReverseProxyTest do ) |> ReverseProxy.call("/headers") - %{"headers" => headers} = json_response(conn, 200) + body = conn.resp_body + assert conn.status == 200 + response = Jason.decode!(body) + headers = response["headers"] assert headers["Accept"] == "text/html" end @@ -262,7 +272,10 @@ defmodule Pleroma.ReverseProxyTest do ) |> ReverseProxy.call("/headers") - %{"headers" => headers} = json_response(conn, 200) + body = conn.resp_body + assert conn.status == 200 + response = Jason.decode!(body) + headers = response["headers"] refute headers["Accept-Language"] end end @@ -328,4 +341,58 @@ defmodule Pleroma.ReverseProxyTest do assert {"content-disposition", "attachment; filename=\"filename.jpg\""} in conn.resp_headers end end + + describe "content-type sanitisation" do + test "preserves allowed image type", %{conn: conn} do + ClientMock + |> expect(:request, fn :get, "/content", _, _, _ -> + {:ok, 200, [{"content-type", "image/png"}], %{url: "/content"}} + end) + |> expect(:stream_body, fn _ -> :done end) + + conn = ReverseProxy.call(conn, "/content") + + assert conn.status == 200 + assert Conn.get_resp_header(conn, "content-type") == ["image/png"] + end + + test "preserves allowed video type", %{conn: conn} do + ClientMock + |> expect(:request, fn :get, "/content", _, _, _ -> + {:ok, 200, [{"content-type", "video/mp4"}], %{url: "/content"}} + end) + |> expect(:stream_body, fn _ -> :done end) + + conn = ReverseProxy.call(conn, "/content") + + assert conn.status == 200 + assert Conn.get_resp_header(conn, "content-type") == ["video/mp4"] + end + + test "sanitizes ActivityPub content type", %{conn: conn} do + ClientMock + |> expect(:request, fn :get, "/content", _, _, _ -> + {:ok, 200, [{"content-type", "application/activity+json"}], %{url: "/content"}} + end) + |> expect(:stream_body, fn _ -> :done end) + + conn = ReverseProxy.call(conn, "/content") + + assert conn.status == 200 + assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"] + end + + test "sanitizes LD-JSON content type", %{conn: conn} do + ClientMock + |> expect(:request, fn :get, "/content", _, _, _ -> + {:ok, 200, [{"content-type", "application/ld+json"}], %{url: "/content"}} + end) + |> expect(:stream_body, fn _ -> :done end) + + conn = ReverseProxy.call(conn, "/content") + + assert conn.status == 200 + assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"] + end + end end diff --git a/test/pleroma/safe_zip_test.exs b/test/pleroma/safe_zip_test.exs new file mode 100644 index 000000000..3312d4e63 --- /dev/null +++ b/test/pleroma/safe_zip_test.exs @@ -0,0 +1,496 @@ +defmodule Pleroma.SafeZipTest do + # Not making this async because it creates and deletes files + use ExUnit.Case + + alias Pleroma.SafeZip + + @fixtures_dir "test/fixtures" + @tmp_dir "test/zip_tmp" + + setup do + # Ensure tmp directory exists + File.mkdir_p!(@tmp_dir) + + on_exit(fn -> + # Clean up any files created during tests + File.rm_rf!(@tmp_dir) + File.mkdir_p!(@tmp_dir) + end) + + :ok + end + + describe "list_dir_file/1" do + test "lists files in a valid zip" do + {:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip")) + assert is_list(files) + assert length(files) > 0 + end + + test "returns an empty list for empty zip" do + {:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip")) + assert files == [] + end + + test "returns error for non-existent file" do + assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip")) + end + + test "only lists regular files, not directories" do + # Create a zip with both files and directories + zip_path = create_zip_with_directory() + + # List files with SafeZip + {:ok, files} = SafeZip.list_dir_file(zip_path) + + # Verify only regular files are listed, not directories + assert "file_in_dir/test_file.txt" in files + assert "root_file.txt" in files + + # Directory entries should not be included in the list + refute "file_in_dir/" in files + end + end + + describe "contains_all_data?/2" do + test "returns true when all files are in the archive" do + # For this test, we'll create our own zip file with known content + # to ensure we can test the contains_all_data? function properly + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Check if the archive contains the root file + # Note: The function expects charlists (Erlang strings) in the MapSet + assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"])) + end + + test "returns false when files are missing" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Create a MapSet with non-existent files + fset = MapSet.new([~c"nonexistent.txt"]) + + refute SafeZip.contains_all_data?(archive_data, fset) + end + + test "returns false for invalid archive data" do + refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"])) + end + + test "only checks for regular files, not directories" do + # Create a zip with both files and directories + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Check if the archive contains a directory (should return false) + refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"])) + + # For this test, we'll manually check if the file exists in the archive + # by extracting it and verifying it exists + extract_dir = Path.join(@tmp_dir, "extract_check") + File.mkdir_p!(extract_dir) + {:ok, files} = SafeZip.unzip_file(zip_path, extract_dir) + + # Verify the root file was extracted + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify the file exists on disk + assert File.exists?(Path.join(extract_dir, "root_file.txt")) + end + end + + describe "zip/4" do + test "creates a zip file on disk" do + # Create a test file + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "test content") + + # Create a zip file + zip_path = Path.join(@tmp_dir, "test.zip") + assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false) + + # Verify the zip file exists + assert File.exists?(zip_path) + end + + test "creates a zip file in memory" do + # Create a test file + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "test content") + + # Create a zip file in memory + zip_name = Path.join(@tmp_dir, "test.zip") + + assert {:ok, {^zip_name, zip_data}} = + SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true) + + # Verify the zip data is binary + assert is_binary(zip_data) + end + + test "returns error for unsafe paths" do + # Try to zip a file with path traversal + assert {:error, _} = + SafeZip.zip( + Path.join(@tmp_dir, "test.zip"), + ["../fixtures/test.txt"], + @tmp_dir, + false + ) + end + + test "can create zip with directories" do + # Create a directory structure + dir_path = Path.join(@tmp_dir, "test_dir") + File.mkdir_p!(dir_path) + + file_in_dir_path = Path.join(dir_path, "file_in_dir.txt") + File.write!(file_in_dir_path, "file in directory") + + # Create a zip file + zip_path = Path.join(@tmp_dir, "dir_test.zip") + + assert {:ok, ^zip_path} = + SafeZip.zip( + zip_path, + ["test_dir/file_in_dir.txt"], + @tmp_dir, + false + ) + + # Verify the zip file exists + assert File.exists?(zip_path) + + # Extract and verify the directory structure is preserved + extract_dir = Path.join(@tmp_dir, "extract") + {:ok, files} = SafeZip.unzip_file(zip_path, extract_dir) + + # Check if the file path is in the list, accounting for possible full paths + assert Enum.any?(files, fn file -> + String.ends_with?(file, "file_in_dir.txt") + end) + + # Verify the file exists in the expected location + assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"])) + end + end + + describe "unzip_file/3" do + test "extracts files from a zip archive" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + + # Extract the archive + assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir) + + # Verify files were extracted + assert is_list(files) + assert length(files) > 0 + + # Verify at least one file exists + first_file = List.first(files) + + # Simply check that the file exists in the tmp directory + assert File.exists?(first_file) + end + + test "extracts specific files from a zip archive" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + + # Get list of files in the archive + {:ok, all_files} = SafeZip.list_dir_file(archive_path) + file_to_extract = List.first(all_files) + + # Extract only one file + assert {:ok, [extracted_file]} = + SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract]) + + # Verify only the specified file was extracted + assert Path.basename(extracted_file) == Path.basename(file_to_extract) + + # Check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract))) + end + + test "returns error for invalid zip file" do + invalid_path = Path.join(@tmp_dir, "invalid.zip") + File.write!(invalid_path, "not a zip file") + + assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir) + end + + test "creates directories when extracting files in subdirectories" do + # Create a zip with files in subdirectories + zip_path = create_zip_with_directory() + + # Extract the archive + assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir) + + # Verify files were extracted - handle both relative and absolute paths + assert Enum.any?(files, fn file -> + Path.basename(file) == "test_file.txt" && + String.contains?(file, "file_in_dir") + end) + + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify directory was created + dir_path = Path.join(@tmp_dir, "file_in_dir") + assert File.exists?(dir_path) + assert File.dir?(dir_path) + + # Verify file in directory was extracted + file_path = Path.join(dir_path, "test_file.txt") + assert File.exists?(file_path) + end + end + + describe "unzip_data/3" do + test "extracts files from zip data" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Extract the archive from data + assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir) + + # Verify files were extracted + assert is_list(files) + assert length(files) > 0 + + # Verify at least one file exists + first_file = List.first(files) + + # Simply check that the file exists in the tmp directory + assert File.exists?(first_file) + end + + test "extracts specific files from zip data" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Get list of files in the archive + {:ok, all_files} = SafeZip.list_dir_file(archive_path) + file_to_extract = List.first(all_files) + + # Extract only one file + assert {:ok, extracted_files} = + SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract]) + + # Verify only the specified file was extracted + assert Enum.any?(extracted_files, fn path -> + Path.basename(path) == Path.basename(file_to_extract) + end) + + # Simply check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract))) + end + + test "returns error for invalid zip data" do + assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir) + end + + test "creates directories when extracting files in subdirectories from data" do + # Create a zip with files in subdirectories + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Extract the archive from data + assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir) + + # Verify files were extracted - handle both relative and absolute paths + assert Enum.any?(files, fn file -> + Path.basename(file) == "test_file.txt" && + String.contains?(file, "file_in_dir") + end) + + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify directory was created + dir_path = Path.join(@tmp_dir, "file_in_dir") + assert File.exists?(dir_path) + assert File.dir?(dir_path) + + # Verify file in directory was extracted + file_path = Path.join(dir_path, "test_file.txt") + assert File.exists?(file_path) + end + end + + # Security tests + describe "security checks" do + test "prevents path traversal in zip extraction" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + + # Try to extract it with SafeZip + assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir) + + # Verify the file was not extracted outside the target directory + refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt")) + end + + test "prevents directory traversal in zip listing" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + + # Try to list files with SafeZip + assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path) + end + + test "prevents path traversal in zip data extraction" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + malicious_data = File.read!(malicious_zip_path) + + # Try to extract it with SafeZip + assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir) + + # Verify the file was not extracted outside the target directory + refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt")) + end + + test "handles zip bomb attempts" do + # Create a zip bomb (a zip with many files or large files) + zip_bomb_path = create_zip_bomb() + + # The SafeZip module should handle this gracefully + # Either by successfully extracting it (if it's not too large) + # or by returning an error (if it detects a potential zip bomb) + result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir) + + case result do + {:ok, _} -> + # If it successfully extracts, make sure it didn't fill up the disk + # This is a simple check to ensure the extraction was controlled + assert File.exists?(@tmp_dir) + + {:error, _} -> + # If it returns an error, that's also acceptable + # The important thing is that it doesn't crash or hang + assert true + end + end + + test "handles deeply nested directory structures" do + # Create a zip with deeply nested directories + deep_nest_path = create_deeply_nested_zip() + + # The SafeZip module should handle this gracefully + result = SafeZip.unzip_file(deep_nest_path, @tmp_dir) + + case result do + {:ok, files} -> + # If it successfully extracts, verify the files were extracted + assert is_list(files) + assert length(files) > 0 + + {:error, _} -> + # If it returns an error, that's also acceptable + # The important thing is that it doesn't crash or hang + assert true + end + end + end + + # Helper functions to create test fixtures + + # Creates a zip file with a path traversal attempt + defp create_malicious_zip_with_path_traversal do + malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip") + + # Create a file to include in the zip + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "malicious content") + + # Use Erlang's zip module directly to create a zip with path traversal + {:ok, charlist_path} = + :zip.create( + String.to_charlist(malicious_zip_path), + [{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}] + ) + + to_string(charlist_path) + end + + # Creates a zip file with directory entries + defp create_zip_with_directory do + zip_path = Path.join(@tmp_dir, "with_directory.zip") + + # Create files to include in the zip + root_file_path = Path.join(@tmp_dir, "root_file.txt") + File.write!(root_file_path, "root file content") + + # Create a directory and a file in it + dir_path = Path.join(@tmp_dir, "file_in_dir") + File.mkdir_p!(dir_path) + + file_in_dir_path = Path.join(dir_path, "test_file.txt") + File.write!(file_in_dir_path, "file in directory content") + + # Use Erlang's zip module to create a zip with directory structure + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + [ + {String.to_charlist("root_file.txt"), File.read!(root_file_path)}, + {String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)} + ] + ) + + to_string(charlist_path) + end + + # Creates a zip bomb (a zip with many small files) + defp create_zip_bomb do + zip_path = Path.join(@tmp_dir, "zip_bomb.zip") + + # Create a small file to duplicate many times + small_file_path = Path.join(@tmp_dir, "small_file.txt") + File.write!(small_file_path, String.duplicate("A", 100)) + + # Create a list of many files to include in the zip + file_entries = + for i <- 1..100 do + {String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)} + end + + # Use Erlang's zip module to create a zip with many files + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + file_entries + ) + + to_string(charlist_path) + end + + # Creates a zip with deeply nested directories + defp create_deeply_nested_zip do + zip_path = Path.join(@tmp_dir, "deep_nest.zip") + + # Create a file to include in the zip + file_content = "test content" + + # Create a list of deeply nested files + file_entries = + for i <- 1..10 do + nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end) + {String.to_charlist("#{nested_path}/file.txt"), file_content} + end + + # Use Erlang's zip module to create a zip with deeply nested directories + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + file_entries + ) + + to_string(charlist_path) + end +end diff --git a/test/pleroma/upload/filter/analyze_metadata_test.exs b/test/pleroma/upload/filter/analyze_metadata_test.exs index e4ac673b2..6e1f2afaf 100644 --- a/test/pleroma/upload/filter/analyze_metadata_test.exs +++ b/test/pleroma/upload/filter/analyze_metadata_test.exs @@ -34,6 +34,20 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadataTest do assert meta.blurhash == "eXJi-E:SwCEm5rCmn$+YWYn+15K#5A$xxCi{SiV]s*W:Efa#s.jE-T" end + test "it gets dimensions for grayscale images" do + upload = %Pleroma.Upload{ + name: "break_analyze.png", + content_type: "image/png", + path: Path.absname("test/fixtures/break_analyze.png"), + tempfile: Path.absname("test/fixtures/break_analyze.png") + } + + {:ok, :filtered, meta} = AnalyzeMetadata.filter(upload) + + assert %{width: 1410, height: 2048} = meta + assert is_nil(meta.blurhash) + end + test "adds the dimensions for videos" do upload = %Pleroma.Upload{ name: "coolvideo.mp4", diff --git a/test/pleroma/upload/filter/anonymize_filename_test.exs b/test/pleroma/upload/filter/anonymize_filename_test.exs index 9b94b91c3..5dae62003 100644 --- a/test/pleroma/upload/filter/anonymize_filename_test.exs +++ b/test/pleroma/upload/filter/anonymize_filename_test.exs @@ -3,8 +3,10 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do - use Pleroma.DataCase + use Pleroma.DataCase, async: true + import Mox + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock alias Pleroma.Upload setup do @@ -19,21 +21,26 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do %{upload_file: upload_file} end - setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text]) - test "it replaces filename on pre-defined text", %{upload_file: upload_file} do - clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.png") + ConfigMock + |> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end) + {:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file) assert name == "custom-file.png" end test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do - clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}") + ConfigMock + |> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.{extension}" end) + {:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file) assert name == "custom-file.jpg" end test "it replaces filename on random text", %{upload_file: upload_file} do + ConfigMock + |> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> nil end) + {:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file) assert <<_::bytes-size(14)>> <> ".jpg" = name refute name == "an… image.jpg" diff --git a/test/pleroma/upload/filter/mogrifun_test.exs b/test/pleroma/upload/filter/mogrifun_test.exs index bf9b65589..77a9c1666 100644 --- a/test/pleroma/upload/filter/mogrifun_test.exs +++ b/test/pleroma/upload/filter/mogrifun_test.exs @@ -3,9 +3,10 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Upload.Filter.MogrifunTest do - use Pleroma.DataCase - import Mock + use Pleroma.DataCase, async: true + import Mox + alias Pleroma.MogrifyMock alias Pleroma.Upload alias Pleroma.Upload.Filter @@ -22,23 +23,12 @@ defmodule Pleroma.Upload.Filter.MogrifunTest do tempfile: Path.absname("test/fixtures/image_tmp.jpg") } - task = - Task.async(fn -> - assert_receive {:apply_filter, {}}, 4_000 - end) + MogrifyMock + |> stub(:open, fn _file -> %{} end) + |> stub(:custom, fn _image, _action -> %{} end) + |> stub(:custom, fn _image, _action, _options -> %{} end) + |> stub(:save, fn _image, [in_place: true] -> :ok end) - with_mocks([ - {Mogrify, [], - [ - open: fn _f -> %Mogrify.Image{} end, - custom: fn _m, _a -> send(task.pid, {:apply_filter, {}}) end, - custom: fn _m, _a, _o -> send(task.pid, {:apply_filter, {}}) end, - save: fn _f, _o -> :ok end - ]} - ]) do - assert Filter.Mogrifun.filter(upload) == {:ok, :filtered} - end - - Task.await(task) + assert Filter.Mogrifun.filter(upload) == {:ok, :filtered} end end diff --git a/test/pleroma/upload/filter/mogrify_test.exs b/test/pleroma/upload/filter/mogrify_test.exs index 208da57ca..f8ed6e8dd 100644 --- a/test/pleroma/upload/filter/mogrify_test.exs +++ b/test/pleroma/upload/filter/mogrify_test.exs @@ -3,13 +3,18 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Upload.Filter.MogrifyTest do - use Pleroma.DataCase - import Mock + use Pleroma.DataCase, async: true + import Mox + alias Pleroma.MogrifyMock + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock alias Pleroma.Upload.Filter + setup :verify_on_exit! + test "apply mogrify filter" do - clear_config(Filter.Mogrify, args: [{"tint", "40"}]) + ConfigMock + |> stub(:get!, fn [Filter.Mogrify, :args] -> [{"tint", "40"}] end) File.cp!( "test/fixtures/image.jpg", @@ -23,19 +28,11 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do tempfile: Path.absname("test/fixtures/image_tmp.jpg") } - task = - Task.async(fn -> - assert_receive {:apply_filter, {_, "tint", "40"}}, 4_000 - end) + MogrifyMock + |> expect(:open, fn _file -> %{} end) + |> expect(:custom, fn _image, "tint", "40" -> %{} end) + |> expect(:save, fn _image, [in_place: true] -> :ok end) - with_mock Mogrify, - open: fn _f -> %Mogrify.Image{} end, - custom: fn _m, _a -> :ok end, - custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end, - save: fn _f, _o -> :ok end do - assert Filter.Mogrify.filter(upload) == {:ok, :filtered} - end - - Task.await(task) + assert Filter.Mogrify.filter(upload) == {:ok, :filtered} end end diff --git a/test/pleroma/upload/filter_test.exs b/test/pleroma/upload/filter_test.exs index 706fc9ac7..a369a723a 100644 --- a/test/pleroma/upload/filter_test.exs +++ b/test/pleroma/upload/filter_test.exs @@ -5,12 +5,13 @@ defmodule Pleroma.Upload.FilterTest do use Pleroma.DataCase + import Mox + alias Pleroma.StaticStubbedConfigMock, as: ConfigMock alias Pleroma.Upload.Filter - setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text]) - test "applies filters" do - clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text], "custom-file.png") + ConfigMock + |> stub(:get, fn [Pleroma.Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end) File.cp!( "test/fixtures/image.jpg", diff --git a/test/pleroma/user/backup_test.exs b/test/pleroma/user/backup_test.exs index 24fe09f7e..f4b92adf8 100644 --- a/test/pleroma/user/backup_test.exs +++ b/test/pleroma/user/backup_test.exs @@ -185,13 +185,13 @@ defmodule Pleroma.User.BackupTest do %{"@language" => "und"} ], "bookmarks" => "bookmarks.json", - "followers" => "http://cofe.io/users/cofe/followers", - "following" => "http://cofe.io/users/cofe/following", + "followers" => "followers.json", + "following" => "following.json", "id" => "http://cofe.io/users/cofe", "inbox" => "http://cofe.io/users/cofe/inbox", "likes" => "likes.json", "name" => "Cofe", - "outbox" => "http://cofe.io/users/cofe/outbox", + "outbox" => "outbox.json", "preferredUsername" => "cofe", "publicKey" => %{ "id" => "http://cofe.io/users/cofe#main-key", diff --git a/test/pleroma/user_relationship_test.exs b/test/pleroma/user_relationship_test.exs index 7d205a746..5b43cb2b6 100644 --- a/test/pleroma/user_relationship_test.exs +++ b/test/pleroma/user_relationship_test.exs @@ -3,11 +3,12 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.UserRelationshipTest do + alias Pleroma.DateTimeMock alias Pleroma.UserRelationship - use Pleroma.DataCase, async: false + use Pleroma.DataCase, async: true - import Mock + import Mox import Pleroma.Factory describe "*_exists?/2" do @@ -52,6 +53,9 @@ defmodule Pleroma.UserRelationshipTest do end test "creates user relationship record if it doesn't exist", %{users: [user1, user2]} do + DateTimeMock + |> stub_with(Pleroma.DateTime.Impl) + for relationship_type <- [ :block, :mute, @@ -80,13 +84,15 @@ defmodule Pleroma.UserRelationshipTest do end test "if record already exists, returns it", %{users: [user1, user2]} do - user_block = - with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do - {:ok, %{inserted_at: ~N[2017-03-17 17:09:58]}} = - UserRelationship.create_block(user1, user2) - end + fixed_datetime = ~N[2017-03-17 17:09:58] - assert user_block == UserRelationship.create_block(user1, user2) + Pleroma.DateTimeMock + |> expect(:utc_now, 2, fn -> fixed_datetime end) + + {:ok, %{inserted_at: ^fixed_datetime}} = UserRelationship.create_block(user1, user2) + + # Test the idempotency without caring about the exact time + assert {:ok, _} = UserRelationship.create_block(user1, user2) end end diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs index 06afc0709..176e70ef9 100644 --- a/test/pleroma/user_test.exs +++ b/test/pleroma/user_test.exs @@ -20,7 +20,7 @@ defmodule Pleroma.UserTest do import Swoosh.TestAssertions setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end @@ -2405,8 +2405,8 @@ defmodule Pleroma.UserTest do other_user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" + follower_address: "https://remote.org/users/masto_closed/followers", + following_address: "https://remote.org/users/masto_closed/following" ) assert other_user.following_count == 0 @@ -2426,8 +2426,8 @@ defmodule Pleroma.UserTest do other_user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" + follower_address: "https://remote.org/users/masto_closed/followers", + following_address: "https://remote.org/users/masto_closed/following" ) assert other_user.following_count == 0 @@ -2447,8 +2447,8 @@ defmodule Pleroma.UserTest do other_user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" + follower_address: "https://remote.org/users/masto_closed/followers", + following_address: "https://remote.org/users/masto_closed/following" ) assert other_user.following_count == 0 @@ -2919,4 +2919,74 @@ defmodule Pleroma.UserTest do assert [%{"verified_at" => ^verified_at}] = user.fields end + + describe "follow_hashtag/2" do + test "should follow a hashtag" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 1 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + + test "should not follow a hashtag twice" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 1 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + + test "can follow multiple hashtags" do + user = insert(:user) + hashtag = insert(:hashtag) + other_hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.follow_hashtag(other_hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 2 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + assert other_hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + end + + describe "unfollow_hashtag/2" do + test "should unfollow a hashtag" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 0 + end + + test "should not error when trying to unfollow a hashtag twice" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 0 + end + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index d4175b56f..46b3d5f0d 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -26,7 +26,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do require Pleroma.Constants setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end @@ -1344,6 +1344,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do end describe "GET /users/:nickname/outbox" do + setup do + Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Config) + :ok + end + test "it paginates correctly", %{conn: conn} do user = insert(:user) conn = assign(conn, :user, user) @@ -1432,6 +1437,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do assert %{"orderedItems" => []} = resp end + test "it does not return a local note activity when C2S API is disabled", %{conn: conn} do + clear_config([:activitypub, :client_api_enabled], false) + user = insert(:user) + reader = insert(:user) + {:ok, _note_activity} = CommonAPI.post(user, %{status: "mew mew", visibility: "local"}) + + resp = + conn + |> assign(:user, reader) + |> put_req_header("accept", "application/activity+json") + |> get("/users/#{user.nickname}/outbox?page=true") + |> json_response(200) + + assert %{"orderedItems" => []} = resp + end + test "it returns a note activity in a collection", %{conn: conn} do note_activity = insert(:note_activity) note_object = Object.normalize(note_activity, fetch: false) @@ -1483,6 +1504,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do assert [answer_outbox] = outbox_get["orderedItems"] assert answer_outbox["id"] == activity.data["id"] end + + test "it works with authorized fetch forced when authenticated" do + clear_config([:activitypub, :authorized_fetch_mode], true) + + user = insert(:user) + outbox_endpoint = user.ap_id <> "/outbox" + + conn = + build_conn() + |> assign(:user, user) + |> put_req_header("accept", "application/activity+json") + |> get(outbox_endpoint) + + assert json_response(conn, 200) + end + + test "it fails with authorized fetch forced when unauthenticated", %{conn: conn} do + clear_config([:activitypub, :authorized_fetch_mode], true) + + user = insert(:user) + outbox_endpoint = user.ap_id <> "/outbox" + + conn = + conn + |> put_req_header("accept", "application/activity+json") + |> get(outbox_endpoint) + + assert response(conn, 401) + end end describe "POST /users/:nickname/outbox (C2S)" do @@ -1644,6 +1694,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do assert json_response(conn, 403) end + test "it rejects update activity of object from other actor", %{conn: conn} do + note_activity = insert(:note_activity) + note_object = Object.normalize(note_activity, fetch: false) + user = insert(:user) + + data = %{ + type: "Update", + object: %{ + id: note_object.data["id"] + } + } + + conn = + conn + |> assign(:user, user) + |> put_req_header("content-type", "application/activity+json") + |> post("/users/#{user.nickname}/outbox", data) + + assert json_response(conn, 400) + assert note_object == Object.normalize(note_activity, fetch: false) + end + test "it increases like count when receiving a like action", %{conn: conn} do note_activity = insert(:note_activity) note_object = Object.normalize(note_activity, fetch: false) @@ -2131,6 +2203,30 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do |> post("/api/ap/upload_media", %{"file" => image, "description" => desc}) |> json_response(403) end + + test "they don't work when C2S API is disabled", %{conn: conn} do + clear_config([:activitypub, :client_api_enabled], false) + + user = insert(:user) + + assert conn + |> assign(:user, user) + |> get("/api/ap/whoami") + |> response(403) + + desc = "Description of the image" + + image = %Plug.Upload{ + content_type: "image/jpeg", + path: Path.absname("test/fixtures/image.jpg"), + filename: "an_image.jpg" + } + + assert conn + |> assign(:user, user) + |> post("/api/ap/upload_media", %{"file" => image, "description" => desc}) + |> response(403) + end end test "pinned collection", %{conn: conn} do diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index 72222ae88..dbc3aa532 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -867,6 +867,33 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do end end + describe "fetch activities for followed hashtags" do + test "it should return public activities that reference a given hashtag" do + hashtag = insert(:hashtag, name: "tenshi") + user = insert(:user) + other_user = insert(:user) + + {:ok, normally_visible} = + CommonAPI.post(other_user, %{status: "hello :)", visibility: "public"}) + + {:ok, public} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "public"}) + {:ok, _unrelated} = CommonAPI.post(user, %{status: "dai #tensh", visibility: "public"}) + {:ok, unlisted} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "unlisted"}) + {:ok, _private} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "private"}) + + activities = + ActivityPub.fetch_activities([other_user.follower_address], %{ + followed_hashtags: [hashtag.id] + }) + + assert length(activities) == 3 + normal_id = normally_visible.id + public_id = public.id + unlisted_id = unlisted.id + assert [%{id: ^normal_id}, %{id: ^public_id}, %{id: ^unlisted_id}] = activities + end + end + describe "fetch activities in context" do test "retrieves activities that have a given context" do {:ok, activity} = ActivityBuilder.insert(%{"type" => "Create", "context" => "2hu"}) @@ -1758,8 +1785,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/fuser2/followers", - following_address: "http://localhost:4001/users/fuser2/following" + follower_address: "https://remote.org/users/fuser2/followers", + following_address: "https://remote.org/users/fuser2/following" ) {:ok, info} = ActivityPub.fetch_follow_information_for_user(user) @@ -1770,7 +1797,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do test "detects hidden followers" do mock(fn env -> case env.url do - "http://localhost:4001/users/masto_closed/followers?page=1" -> + "https://remote.org/users/masto_closed/followers?page=1" -> %Tesla.Env{status: 403, body: ""} _ -> @@ -1781,8 +1808,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" + follower_address: "https://remote.org/users/masto_closed/followers", + following_address: "https://remote.org/users/masto_closed/following" ) {:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user) @@ -1793,7 +1820,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do test "detects hidden follows" do mock(fn env -> case env.url do - "http://localhost:4001/users/masto_closed/following?page=1" -> + "https://remote.org/users/masto_closed/following?page=1" -> %Tesla.Env{status: 403, body: ""} _ -> @@ -1804,8 +1831,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_closed/followers", - following_address: "http://localhost:4001/users/masto_closed/following" + follower_address: "https://remote.org/users/masto_closed/followers", + following_address: "https://remote.org/users/masto_closed/following" ) {:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user) @@ -1817,8 +1844,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do user = insert(:user, local: false, - follower_address: "http://localhost:8080/followers/fuser3", - following_address: "http://localhost:8080/following/fuser3" + follower_address: "https://remote.org/followers/fuser3", + following_address: "https://remote.org/following/fuser3" ) {:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user) @@ -1831,28 +1858,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do test "doesn't crash when follower and following counters are hidden" do mock(fn env -> case env.url do - "http://localhost:4001/users/masto_hidden_counters/following" -> + "https://remote.org/users/masto_hidden_counters/following" -> json( %{ "@context" => "https://www.w3.org/ns/activitystreams", - "id" => "http://localhost:4001/users/masto_hidden_counters/followers" + "id" => "https://remote.org/users/masto_hidden_counters/followers" }, headers: HttpRequestMock.activitypub_object_headers() ) - "http://localhost:4001/users/masto_hidden_counters/following?page=1" -> + "https://remote.org/users/masto_hidden_counters/following?page=1" -> %Tesla.Env{status: 403, body: ""} - "http://localhost:4001/users/masto_hidden_counters/followers" -> + "https://remote.org/users/masto_hidden_counters/followers" -> json( %{ "@context" => "https://www.w3.org/ns/activitystreams", - "id" => "http://localhost:4001/users/masto_hidden_counters/following" + "id" => "https://remote.org/users/masto_hidden_counters/following" }, headers: HttpRequestMock.activitypub_object_headers() ) - "http://localhost:4001/users/masto_hidden_counters/followers?page=1" -> + "https://remote.org/users/masto_hidden_counters/followers?page=1" -> %Tesla.Env{status: 403, body: ""} end end) @@ -1860,8 +1887,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do user = insert(:user, local: false, - follower_address: "http://localhost:4001/users/masto_hidden_counters/followers", - following_address: "http://localhost:4001/users/masto_hidden_counters/following" + follower_address: "https://remote.org/users/masto_hidden_counters/followers", + following_address: "https://remote.org/users/masto_hidden_counters/following" ) {:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user) diff --git a/test/pleroma/web/activity_pub/mrf/fo_direct_reply_test.exs b/test/pleroma/web/activity_pub/mrf/fo_direct_reply_test.exs deleted file mode 100644 index 2d6af3b68..000000000 --- a/test/pleroma/web/activity_pub/mrf/fo_direct_reply_test.exs +++ /dev/null @@ -1,117 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2022 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.FODirectReplyTest do - use Pleroma.DataCase - import Pleroma.Factory - - require Pleroma.Constants - - alias Pleroma.Object - alias Pleroma.Web.ActivityPub.MRF.FODirectReply - alias Pleroma.Web.CommonAPI - - test "replying to followers-only/private is changed to direct" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = - CommonAPI.post(batman, %{ - status: "Has anyone seen Selina Kyle's latest selfies?", - visibility: "private" - }) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman 🤤 ❤️ 🐈‍⬛", - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - expected_to = [batman.ap_id] - expected_cc = [] - - assert {:ok, filtered} = FODirectReply.filter(reply) - - assert expected_to == filtered["to"] - assert expected_cc == filtered["cc"] - assert expected_to == filtered["object"]["to"] - assert expected_cc == filtered["object"]["cc"] - end - - test "replies to unlisted posts are unmodified" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = - CommonAPI.post(batman, %{ - status: "Has anyone seen Selina Kyle's latest selfies?", - visibility: "unlisted" - }) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman 🤤 ❤️ 🐈<200d>⬛", - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - assert {:ok, filtered} = FODirectReply.filter(reply) - - assert match?(^filtered, reply) - end - - test "replies to public posts are unmodified" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = - CommonAPI.post(batman, %{status: "Has anyone seen Selina Kyle's latest selfies?"}) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman 🤤 ❤️ 🐈<200d>⬛", - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - assert {:ok, filtered} = FODirectReply.filter(reply) - - assert match?(^filtered, reply) - end - - test "non-reply posts are unmodified" do - batman = insert(:user, nickname: "batman") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"}) - - assert {:ok, filtered} = FODirectReply.filter(post) - - assert match?(^filtered, post) - end -end diff --git a/test/pleroma/web/activity_pub/mrf/quiet_reply_test.exs b/test/pleroma/web/activity_pub/mrf/quiet_reply_test.exs deleted file mode 100644 index 79e64d650..000000000 --- a/test/pleroma/web/activity_pub/mrf/quiet_reply_test.exs +++ /dev/null @@ -1,140 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2022 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.ActivityPub.MRF.QuietReplyTest do - use Pleroma.DataCase - import Pleroma.Factory - - require Pleroma.Constants - - alias Pleroma.Object - alias Pleroma.Web.ActivityPub.MRF.QuietReply - alias Pleroma.Web.CommonAPI - - test "replying to public post is forced to be quiet" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"}) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [ - batman.ap_id, - Pleroma.Constants.as_public() - ], - "cc" => [robin.follower_address], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman Wait up, I forgot my spandex!", - "to" => [ - batman.ap_id, - Pleroma.Constants.as_public() - ], - "cc" => [robin.follower_address], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - expected_to = [batman.ap_id, robin.follower_address] - expected_cc = [Pleroma.Constants.as_public()] - - assert {:ok, filtered} = QuietReply.filter(reply) - - assert expected_to == filtered["to"] - assert expected_cc == filtered["cc"] - assert expected_to == filtered["object"]["to"] - assert expected_cc == filtered["object"]["cc"] - end - - test "replying to unlisted post is unmodified" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!", visibility: "private"}) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman Wait up, I forgot my spandex!", - "to" => [batman.ap_id], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - assert {:ok, filtered} = QuietReply.filter(reply) - - assert match?(^filtered, reply) - end - - test "replying direct is unmodified" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"}) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman Wait up, I forgot my spandex!", - "to" => [batman.ap_id], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - assert {:ok, filtered} = QuietReply.filter(reply) - - assert match?(^filtered, reply) - end - - test "replying followers-only is unmodified" do - batman = insert(:user, nickname: "batman") - robin = insert(:user, nickname: "robin") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"}) - - reply = %{ - "type" => "Create", - "actor" => robin.ap_id, - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "object" => %{ - "type" => "Note", - "actor" => robin.ap_id, - "content" => "@batman Wait up, I forgot my spandex!", - "to" => [batman.ap_id, robin.follower_address], - "cc" => [], - "inReplyTo" => Object.normalize(post).data["id"] - } - } - - assert {:ok, filtered} = QuietReply.filter(reply) - - assert match?(^filtered, reply) - end - - test "non-reply posts are unmodified" do - batman = insert(:user, nickname: "batman") - - {:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"}) - - assert {:ok, filtered} = QuietReply.filter(post) - - assert match?(^filtered, post) - end -end diff --git a/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs b/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs index 2c7497da5..61c162bc9 100644 --- a/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/steal_emoji_policy_test.exs @@ -87,7 +87,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do assert File.exists?(fullpath) end - test "rejects invalid shortcodes", %{path: path} do + test "rejects invalid shortcodes with slashes", %{path: path} do message = %{ "type" => "Create", "object" => %{ @@ -113,6 +113,58 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do refute File.exists?(fullpath) end + test "rejects invalid shortcodes with dots", %{path: path} do + message = %{ + "type" => "Create", + "object" => %{ + "emoji" => [{"fired.fox", "https://example.org/emoji/firedfox"}], + "actor" => "https://example.org/users/admin" + } + } + + fullpath = Path.join(path, "fired.fox.png") + + Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")} + end) + + clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468) + + refute "fired.fox" in installed() + refute File.exists?(path) + + assert {:ok, _message} = StealEmojiPolicy.filter(message) + + refute "fired.fox" in installed() + refute File.exists?(fullpath) + end + + test "rejects invalid shortcodes with special characters", %{path: path} do + message = %{ + "type" => "Create", + "object" => %{ + "emoji" => [{"fired:fox", "https://example.org/emoji/firedfox"}], + "actor" => "https://example.org/users/admin" + } + } + + fullpath = Path.join(path, "fired:fox.png") + + Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")} + end) + + clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468) + + refute "fired:fox" in installed() + refute File.exists?(path) + + assert {:ok, _message} = StealEmojiPolicy.filter(message) + + refute "fired:fox" in installed() + refute File.exists?(fullpath) + end + test "reject regex shortcode", %{message: message} do refute "firedfox" in installed() @@ -171,5 +223,74 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do refute "firedfox" in installed() end + test "accepts valid alphanum shortcodes", %{path: path} do + message = %{ + "type" => "Create", + "object" => %{ + "emoji" => [{"fire1fox", "https://example.org/emoji/fire1fox.png"}], + "actor" => "https://example.org/users/admin" + } + } + + Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire1fox.png"} -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")} + end) + + clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468) + + refute "fire1fox" in installed() + refute File.exists?(path) + + assert {:ok, _message} = StealEmojiPolicy.filter(message) + + assert "fire1fox" in installed() + end + + test "accepts valid shortcodes with underscores", %{path: path} do + message = %{ + "type" => "Create", + "object" => %{ + "emoji" => [{"fire_fox", "https://example.org/emoji/fire_fox.png"}], + "actor" => "https://example.org/users/admin" + } + } + + Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire_fox.png"} -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")} + end) + + clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468) + + refute "fire_fox" in installed() + refute File.exists?(path) + + assert {:ok, _message} = StealEmojiPolicy.filter(message) + + assert "fire_fox" in installed() + end + + test "accepts valid shortcodes with hyphens", %{path: path} do + message = %{ + "type" => "Create", + "object" => %{ + "emoji" => [{"fire-fox", "https://example.org/emoji/fire-fox.png"}], + "actor" => "https://example.org/users/admin" + } + } + + Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire-fox.png"} -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")} + end) + + clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468) + + refute "fire-fox" in installed() + refute File.exists?(path) + + assert {:ok, _message} = StealEmojiPolicy.filter(message) + + assert "fire-fox" in installed() + end + defp installed, do: Emoji.get_all() |> Enum.map(fn {k, _} -> k end) end diff --git a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs index e1dbb20c3..829598246 100644 --- a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs @@ -128,6 +128,17 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note) end + test "a Note with validated likes collection validates" do + insert(:user, ap_id: "https://pol.social/users/mkljczk") + + %{"object" => note} = + "test/fixtures/mastodon-update-with-likes.json" + |> File.read!() + |> Jason.decode!() + + %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note) + end + test "Fedibird quote post" do insert(:user, ap_id: "https://fedibird.com/users/noellabo") @@ -176,4 +187,71 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest name: "RE: https://server.example/objects/123" } end + + describe "Note language" do + test "it detects language from JSON-LD context" do + user = insert(:user) + + note_activity = %{ + "@context" => ["https://www.w3.org/ns/activitystreams", %{"@language" => "pl"}], + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "type" => "Create", + "object" => %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Szczęść Boże", + "attributedTo" => user.ap_id + }, + "actor" => user.ap_id + } + + {:ok, _create_activity, meta} = ObjectValidator.validate(note_activity, []) + + assert meta[:object_data]["language"] == "pl" + end + + test "it detects language from contentMap" do + user = insert(:user) + + note = %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Szczęść Boże", + "contentMap" => %{ + "de" => "Gott segne", + "pl" => "Szczęść Boże" + }, + "attributedTo" => user.ap_id + } + + {:ok, object} = ArticleNotePageValidator.cast_and_apply(note) + + assert object.language == "pl" + end + + test "it adds contentMap if language is specified" do + user = insert(:user) + + note = %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "тест", + "language" => "uk", + "attributedTo" => user.ap_id + } + + {:ok, object} = ArticleNotePageValidator.cast_and_apply(note) + + assert object.contentMap == %{ + "uk" => "тест" + } + end + end end diff --git a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs index 6627fa6db..744ae8704 100644 --- a/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/attachment_validator_test.exs @@ -13,6 +13,23 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do import Pleroma.Factory describe "attachments" do + test "works with apng" do + attachment = + %{ + "mediaType" => "image/apng", + "name" => "", + "type" => "Document", + "url" => + "https://media.misskeyusercontent.com/io/2859c26e-cd43-4550-848b-b6243bc3fe28.apng" + } + + assert {:ok, attachment} = + AttachmentValidator.cast_and_validate(attachment) + |> Ecto.Changeset.apply_action(:insert) + + assert attachment.mediaType == "image/apng" + end + test "fails without url" do attachment = %{ "mediaType" => "", diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs index ed71dcb90..fd7a3c772 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs @@ -219,6 +219,36 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do "

@lain

" end + test "it only uses contentMap if content is not present" do + user = insert(:user) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "type" => "Create", + "object" => %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Hi", + "contentMap" => %{ + "de" => "Hallo", + "uk" => "Привіт" + }, + "inReplyTo" => nil, + "attributedTo" => user.ap_id + }, + "actor" => user.ap_id + } + + {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(message) + object = Object.normalize(data["object"], fetch: false) + + assert object.data["content"] == "Hi" + end + test "it works for incoming notices with a nil contentMap (firefish)" do data = File.read!("test/fixtures/mastodon-post-activity-contentmap.json") diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 6da7e4a89..e0395d7bb 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -156,6 +156,246 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do # It fetched the quoted post assert Object.normalize("https://misskey.io/notes/8vs6wxufd0") end + + test "doesn't allow remote edits to fake local likes" do + # as a spot check for no internal fields getting injected + now = DateTime.utc_now() + pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3))) + edit_date = DateTime.to_iso8601(now) + + local_user = insert(:user) + + create_data = %{ + "type" => "Create", + "id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity", + "actor" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "object" => %{ + "type" => "Note", + "id" => "http://mastodon.example.org/users/admin/statuses/2619539638", + "attributedTo" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "published" => pub_date, + "content" => "miaow", + "likes" => [local_user.ap_id] + } + } + + update_data = + create_data + |> Map.put("type", "Update") + |> Map.put("id", create_data["object"]["id"] <> "/update/1") + |> put_in(["object", "content"], "miaow :3") + |> put_in(["object", "updated"], edit_date) + |> put_in(["object", "formerRepresentations"], %{ + "type" => "OrderedCollection", + "totalItems" => 1, + "orderedItems" => [create_data["object"]] + }) + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]) + assert object.data["content"] == "miaow" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"]) + assert object.data["content"] == "miaow :3" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + end + + test "strips internal fields from history items in edited notes" do + now = DateTime.utc_now() + pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3))) + edit_date = DateTime.to_iso8601(now) + + local_user = insert(:user) + + create_data = %{ + "type" => "Create", + "id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity", + "actor" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "object" => %{ + "type" => "Note", + "id" => "http://mastodon.example.org/users/admin/statuses/2619539638", + "attributedTo" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "published" => pub_date, + "content" => "miaow", + "likes" => [], + "like_count" => 0 + } + } + + update_data = + create_data + |> Map.put("type", "Update") + |> Map.put("id", create_data["object"]["id"] <> "/update/1") + |> put_in(["object", "content"], "miaow :3") + |> put_in(["object", "updated"], edit_date) + |> put_in(["object", "formerRepresentations"], %{ + "type" => "OrderedCollection", + "totalItems" => 1, + "orderedItems" => [ + Map.merge(create_data["object"], %{ + "likes" => [local_user.ap_id], + "like_count" => 1, + "pleroma" => %{"internal_field" => "should_be_stripped"} + }) + ] + }) + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]) + assert object.data["content"] == "miaow" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"]) + assert object.data["content"] == "miaow :3" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + + # Check that internal fields are stripped from history items + history_item = List.first(object.data["formerRepresentations"]["orderedItems"]) + assert history_item["likes"] == [] + assert history_item["like_count"] == 0 + refute Map.has_key?(history_item, "pleroma") + end + + test "doesn't trip over remote likes in notes" do + now = DateTime.utc_now() + pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3))) + edit_date = DateTime.to_iso8601(now) + + create_data = %{ + "type" => "Create", + "id" => "http://mastodon.example.org/users/admin/statuses/3409297097/activity", + "actor" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "object" => %{ + "type" => "Note", + "id" => "http://mastodon.example.org/users/admin/statuses/3409297097", + "attributedTo" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "published" => pub_date, + "content" => "miaow", + "likes" => %{ + "id" => "http://mastodon.example.org/users/admin/statuses/3409297097/likes", + "totalItems" => 0, + "type" => "Collection" + } + } + } + + update_data = + create_data + |> Map.put("type", "Update") + |> Map.put("id", create_data["object"]["id"] <> "/update/1") + |> put_in(["object", "content"], "miaow :3") + |> put_in(["object", "updated"], edit_date) + |> put_in(["object", "likes", "totalItems"], 666) + |> put_in(["object", "formerRepresentations"], %{ + "type" => "OrderedCollection", + "totalItems" => 1, + "orderedItems" => [create_data["object"]] + }) + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]) + assert object.data["content"] == "miaow" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"]) + assert object.data["content"] == "miaow :3" + assert object.data["likes"] == [] + # in the future this should retain remote likes, but for now: + assert object.data["like_count"] == 0 + end + + test "doesn't trip over remote likes in polls" do + now = DateTime.utc_now() + pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3))) + edit_date = DateTime.to_iso8601(now) + + create_data = %{ + "type" => "Create", + "id" => "http://mastodon.example.org/users/admin/statuses/2471790073/activity", + "actor" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "object" => %{ + "type" => "Question", + "id" => "http://mastodon.example.org/users/admin/statuses/2471790073", + "attributedTo" => "http://mastodon.example.org/users/admin", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "published" => pub_date, + "content" => "vote!", + "anyOf" => [ + %{ + "type" => "Note", + "name" => "a", + "replies" => %{ + "type" => "Collection", + "totalItems" => 3 + } + }, + %{ + "type" => "Note", + "name" => "b", + "replies" => %{ + "type" => "Collection", + "totalItems" => 1 + } + } + ], + "likes" => %{ + "id" => "http://mastodon.example.org/users/admin/statuses/2471790073/likes", + "totalItems" => 0, + "type" => "Collection" + } + } + } + + update_data = + create_data + |> Map.put("type", "Update") + |> Map.put("id", create_data["object"]["id"] <> "/update/1") + |> put_in(["object", "content"], "vote now!") + |> put_in(["object", "updated"], edit_date) + |> put_in(["object", "likes", "totalItems"], 666) + |> put_in(["object", "formerRepresentations"], %{ + "type" => "OrderedCollection", + "totalItems" => 1, + "orderedItems" => [create_data["object"]] + }) + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]) + assert object.data["content"] == "vote!" + assert object.data["likes"] == [] + assert object.data["like_count"] == 0 + + {:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data) + %Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"]) + assert object.data["content"] == "vote now!" + assert object.data["likes"] == [] + # in the future this should retain remote likes, but for now: + assert object.data["like_count"] == 0 + end end describe "prepare outgoing" do @@ -384,6 +624,24 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do assert modified["object"]["quoteUrl"] == quote_id assert modified["object"]["quoteUri"] == quote_id end + + test "it adds language of the object to its json-ld context" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.object.data) + + assert [_, _, %{"@language" => "pl"}] = modified["@context"] + end + + test "it adds language of the object to Create activity json-ld context" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + assert [_, _, %{"@language" => "pl"}] = modified["@context"] + end end describe "actor rewriting" do @@ -621,5 +879,14 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do processed = Transmogrifier.prepare_object(original) assert processed["formerRepresentations"] == original["formerRepresentations"] end + + test "it uses contentMap to specify post language" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + object = Transmogrifier.prepare_object(activity.object.data) + + assert %{"contentMap" => %{"pl" => "Cześć"}} = object + end end end diff --git a/test/pleroma/web/activity_pub/utils_test.exs b/test/pleroma/web/activity_pub/utils_test.exs index 872a440cb..45fef154e 100644 --- a/test/pleroma/web/activity_pub/utils_test.exs +++ b/test/pleroma/web/activity_pub/utils_test.exs @@ -173,16 +173,30 @@ defmodule Pleroma.Web.ActivityPub.UtilsTest do end end - test "make_json_ld_header/0" do - assert Utils.make_json_ld_header() == %{ - "@context" => [ - "https://www.w3.org/ns/activitystreams", - "http://localhost:4001/schemas/litepub-0.1.jsonld", - %{ - "@language" => "und" - } - ] - } + describe "make_json_ld_header/1" do + test "makes jsonld header" do + assert Utils.make_json_ld_header() == %{ + "@context" => [ + "https://www.w3.org/ns/activitystreams", + "http://localhost:4001/schemas/litepub-0.1.jsonld", + %{ + "@language" => "und" + } + ] + } + end + + test "includes language if specified" do + assert Utils.make_json_ld_header(%{"language" => "pl"}) == %{ + "@context" => [ + "https://www.w3.org/ns/activitystreams", + "http://localhost:4001/schemas/litepub-0.1.jsonld", + %{ + "@language" => "pl" + } + ] + } + end end describe "get_existing_votes" do diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index dc12155f5..e12115ea1 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1211,8 +1211,6 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do end test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do - clear_config(Pleroma.Upload.Filter.Mogrify) - assert conn |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/config", %{ @@ -1240,7 +1238,8 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do "need_reboot" => false } - assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]] + config = Config.get(Pleroma.Upload.Filter.Mogrify) + assert {:args, ["auto-orient", "strip"]} in config assert conn |> put_req_header("content-type", "application/json") @@ -1289,9 +1288,9 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do "need_reboot" => false } - assert Config.get(Pleroma.Upload.Filter.Mogrify) == [ - args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}] - ] + config = Config.get(Pleroma.Upload.Filter.Mogrify) + + assert {:args, ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]} in config end test "enables the welcome messages", %{conn: conn} do diff --git a/test/pleroma/web/admin_api/controllers/user_controller_test.exs b/test/pleroma/web/admin_api/controllers/user_controller_test.exs index c8495c477..0e5650285 100644 --- a/test/pleroma/web/admin_api/controllers/user_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/user_controller_test.exs @@ -20,7 +20,7 @@ defmodule Pleroma.Web.AdminAPI.UserControllerTest do alias Pleroma.Web.MediaProxy setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/mastodon_api/controllers/account_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/account_controller_test.exs index 54f6818bd..cd3107f32 100644 --- a/test/pleroma/web/mastodon_api/controllers/account_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/account_controller_test.exs @@ -19,7 +19,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountControllerTest do import Pleroma.Factory setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs index 3f696d94d..ae86078d7 100644 --- a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs @@ -227,4 +227,93 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do |> json_response_and_validate_schema(403) end end + + describe "Content-Type sanitization" do + setup do: oauth_access(["write:media", "read:media"]) + + setup do + ConfigMock + |> stub_with(Pleroma.Test.StaticConfig) + + config = + Pleroma.Config.get([Pleroma.Upload]) + |> Keyword.put(:uploader, Pleroma.Uploaders.Local) + + clear_config([Pleroma.Upload], config) + clear_config([Pleroma.Upload, :allowed_mime_types], ["image", "audio", "video"]) + + # Create a file with a malicious content type and dangerous extension + malicious_file = %Plug.Upload{ + content_type: "application/activity+json", + path: Path.absname("test/fixtures/image.jpg"), + # JSON extension to make MIME.from_path detect application/json + filename: "malicious.json" + } + + [malicious_file: malicious_file] + end + + test "sanitizes malicious content types when serving media", %{ + conn: conn, + malicious_file: malicious_file + } do + # First upload the file with the malicious content type + media = + conn + |> put_req_header("content-type", "multipart/form-data") + |> post("/api/v1/media", %{"file" => malicious_file}) + |> json_response_and_validate_schema(:ok) + + # Get the file URL from the response + url = media["url"] + + # Now make a direct request to the media URL and check the content-type header + response = + build_conn() + |> get(URI.parse(url).path) + + # Find the content-type header + content_type_header = + Enum.find(response.resp_headers, fn {name, _} -> name == "content-type" end) + + # The server should detect the application/json MIME type from the .json extension + # and replace it with application/octet-stream since it's not in allowed_mime_types + assert content_type_header == {"content-type", "application/octet-stream"} + + # Verify that the file was still served correctly + assert response.status == 200 + end + + test "allows safe content types", %{conn: conn} do + safe_image = %Plug.Upload{ + content_type: "image/jpeg", + path: Path.absname("test/fixtures/image.jpg"), + filename: "safe_image.jpg" + } + + # Upload a file with a safe content type + media = + conn + |> put_req_header("content-type", "multipart/form-data") + |> post("/api/v1/media", %{"file" => safe_image}) + |> json_response_and_validate_schema(:ok) + + # Get the file URL from the response + url = media["url"] + + # Make a direct request to the media URL and check the content-type header + response = + build_conn() + |> get(URI.parse(url).path) + + # The server should preserve the image/jpeg MIME type since it's allowed + content_type_header = + Enum.find(response.resp_headers, fn {name, _} -> name == "content-type" end) + + assert content_type_header == {"content-type", "image/jpeg"} + + # Verify that the file was served correctly + assert response.status == 200 + end + end end diff --git a/test/pleroma/web/mastodon_api/controllers/notification_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/notification_controller_test.exs index 8fc22dde1..88f2fb7af 100644 --- a/test/pleroma/web/mastodon_api/controllers/notification_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/notification_controller_test.exs @@ -13,7 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationControllerTest do import Pleroma.Factory setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/mastodon_api/controllers/search_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/search_controller_test.exs index d38767c96..d8263dfad 100644 --- a/test/pleroma/web/mastodon_api/controllers/search_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/search_controller_test.exs @@ -14,7 +14,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do import Mock setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs new file mode 100644 index 000000000..71c8e7fc0 --- /dev/null +++ b/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs @@ -0,0 +1,159 @@ +defmodule Pleroma.Web.MastodonAPI.TagControllerTest do + use Pleroma.Web.ConnCase + + import Pleroma.Factory + import Tesla.Mock + + alias Pleroma.User + + setup do + mock(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + + describe "GET /api/v1/tags/:id" do + test "returns 200 with tag" do + %{user: user, conn: conn} = oauth_access(["read"]) + + tag = insert(:hashtag, name: "jubjub") + {:ok, _user} = User.follow_hashtag(user, tag) + + response = + conn + |> get("/api/v1/tags/jubjub") + |> json_response_and_validate_schema(200) + + assert %{ + "name" => "jubjub", + "url" => "http://localhost:4001/tags/jubjub", + "history" => [], + "following" => true + } = response + end + + test "returns 404 with unknown tag" do + %{conn: conn} = oauth_access(["read"]) + + conn + |> get("/api/v1/tags/jubjub") + |> json_response_and_validate_schema(404) + end + end + + describe "POST /api/v1/tags/:id/follow" do + test "should follow a hashtag" do + %{user: user, conn: conn} = oauth_access(["write:follows"]) + hashtag = insert(:hashtag, name: "jubjub") + + response = + conn + |> post("/api/v1/tags/jubjub/follow") + |> json_response_and_validate_schema(200) + + assert response["following"] == true + user = User.get_cached_by_ap_id(user.ap_id) + assert User.following_hashtag?(user, hashtag) + end + + test "should 404 if hashtag doesn't exist" do + %{conn: conn} = oauth_access(["write:follows"]) + + response = + conn + |> post("/api/v1/tags/rubrub/follow") + |> json_response_and_validate_schema(404) + + assert response["error"] == "Hashtag not found" + end + end + + describe "POST /api/v1/tags/:id/unfollow" do + test "should unfollow a hashtag" do + %{user: user, conn: conn} = oauth_access(["write:follows"]) + hashtag = insert(:hashtag, name: "jubjub") + {:ok, user} = User.follow_hashtag(user, hashtag) + + response = + conn + |> post("/api/v1/tags/jubjub/unfollow") + |> json_response_and_validate_schema(200) + + assert response["following"] == false + user = User.get_cached_by_ap_id(user.ap_id) + refute User.following_hashtag?(user, hashtag) + end + + test "should 404 if hashtag doesn't exist" do + %{conn: conn} = oauth_access(["write:follows"]) + + response = + conn + |> post("/api/v1/tags/rubrub/unfollow") + |> json_response_and_validate_schema(404) + + assert response["error"] == "Hashtag not found" + end + end + + describe "GET /api/v1/followed_tags" do + test "should list followed tags" do + %{user: user, conn: conn} = oauth_access(["read:follows"]) + + response = + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(200) + + assert Enum.empty?(response) + + hashtag = insert(:hashtag, name: "jubjub") + {:ok, _user} = User.follow_hashtag(user, hashtag) + + response = + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(200) + + assert [%{"name" => "jubjub"}] = response + end + + test "should include a link header to paginate" do + %{user: user, conn: conn} = oauth_access(["read:follows"]) + + for i <- 1..21 do + hashtag = insert(:hashtag, name: "jubjub#{i}}") + {:ok, _user} = User.follow_hashtag(user, hashtag) + end + + response = + conn + |> get("/api/v1/followed_tags") + + json = json_response_and_validate_schema(response, 200) + assert Enum.count(json) == 20 + assert [link_header] = get_resp_header(response, "link") + assert link_header =~ "rel=\"next\"" + next_link = extract_next_link_header(link_header) + + response = + conn + |> get(next_link) + |> json_response_and_validate_schema(200) + + assert Enum.count(response) == 1 + end + + test "should refuse access without read:follows scope" do + %{conn: conn} = oauth_access(["write"]) + + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(403) + end + end + + defp extract_next_link_header(header) do + [_, next_link] = Regex.run(~r{<(?.*)>; rel="next"}, header) + next_link + end +end diff --git a/test/pleroma/web/mastodon_api/views/notification_view_test.exs b/test/pleroma/web/mastodon_api/views/notification_view_test.exs index b1f3523ac..ce5ddd0fc 100644 --- a/test/pleroma/web/mastodon_api/views/notification_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/notification_view_test.exs @@ -23,7 +23,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationViewTest do import Pleroma.Factory setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index bc6dec32a..e6a164d72 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -951,6 +951,26 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do assert status.edited_at end + test "it shows post language" do + user = insert(:user) + + {:ok, post} = CommonAPI.post(user, %{status: "Szczęść Boże", language: "pl"}) + + status = StatusView.render("show.json", activity: post) + + assert status.language == "pl" + end + + test "doesn't show post language if it's 'und'" do + user = insert(:user) + + {:ok, post} = CommonAPI.post(user, %{status: "sdifjogijodfg", language: "und"}) + + status = StatusView.render("show.json", activity: post) + + assert status.language == nil + end + test "with a source object" do note = insert(:note, diff --git a/test/pleroma/web/media_proxy/media_proxy_controller_test.exs b/test/pleroma/web/media_proxy/media_proxy_controller_test.exs index f0c1dd640..f7e52483c 100644 --- a/test/pleroma/web/media_proxy/media_proxy_controller_test.exs +++ b/test/pleroma/web/media_proxy/media_proxy_controller_test.exs @@ -248,8 +248,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do response = get(conn, url) - assert response.status == 302 - assert redirected_to(response) == media_proxy_url + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url end test "with `static` param and non-GIF image preview requested, " <> @@ -290,8 +290,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do response = get(conn, url) - assert response.status == 302 - assert redirected_to(response) == media_proxy_url + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url end test "thumbnails PNG images into PNG", %{ @@ -356,5 +356,32 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do assert response.status == 302 assert redirected_to(response) == media_proxy_url end + + test "redirects to media proxy URI with 301 when image is too small for preview", %{ + conn: conn, + url: url, + media_proxy_url: media_proxy_url + } do + clear_config([:media_preview_proxy], + enabled: true, + min_content_length: 1000, + image_quality: 85, + thumbnail_max_width: 100, + thumbnail_max_height: 100 + ) + + Tesla.Mock.mock(fn + %{method: :head, url: ^media_proxy_url} -> + %Tesla.Env{ + status: 200, + body: "", + headers: [{"content-type", "image/png"}, {"content-length", "500"}] + } + end) + + response = get(conn, url) + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url + end end end diff --git a/test/pleroma/web/metadata/providers/open_graph_test.exs b/test/pleroma/web/metadata/providers/open_graph_test.exs index 6a0fc9b10..29cc036ba 100644 --- a/test/pleroma/web/metadata/providers/open_graph_test.exs +++ b/test/pleroma/web/metadata/providers/open_graph_test.exs @@ -9,6 +9,7 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do alias Pleroma.UnstubbedConfigMock, as: ConfigMock alias Pleroma.Web.Metadata.Providers.OpenGraph + alias Pleroma.Web.Metadata.Utils setup do ConfigMock @@ -197,4 +198,58 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do "http://localhost:4001/proxy/preview/LzAnlke-l5oZbNzWsrHfprX1rGw/aHR0cHM6Ly9wbGVyb21hLmdvdi9hYm91dC9qdWNoZS53ZWJt/juche.webm" ], []} in result end + + test "meta tag ordering matches attachment order" do + user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994") + + note = + insert(:note, %{ + data: %{ + "actor" => user.ap_id, + "tag" => [], + "id" => "https://pleroma.gov/objects/whatever", + "summary" => "", + "content" => "pleroma in a nutshell", + "attachment" => [ + %{ + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "https://example.com/first.png", + "height" => 1024, + "width" => 1280 + } + ] + }, + %{ + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "https://example.com/second.png", + "height" => 1024, + "width" => 1280 + } + ] + } + ] + } + }) + + result = OpenGraph.build_tags(%{object: note, url: note.data["id"], user: user}) + + assert [ + {:meta, [property: "og:title", content: Utils.user_name_string(user)], []}, + {:meta, [property: "og:url", content: "https://pleroma.gov/objects/whatever"], []}, + {:meta, [property: "og:description", content: "pleroma in a nutshell"], []}, + {:meta, [property: "og:type", content: "article"], []}, + {:meta, [property: "og:image", content: "https://example.com/first.png"], []}, + {:meta, [property: "og:image:alt", content: nil], []}, + {:meta, [property: "og:image:width", content: "1280"], []}, + {:meta, [property: "og:image:height", content: "1024"], []}, + {:meta, [property: "og:image", content: "https://example.com/second.png"], []}, + {:meta, [property: "og:image:alt", content: nil], []}, + {:meta, [property: "og:image:width", content: "1280"], []}, + {:meta, [property: "og:image:height", content: "1024"], []} + ] == result + end end diff --git a/test/pleroma/web/metadata/providers/twitter_card_test.exs b/test/pleroma/web/metadata/providers/twitter_card_test.exs index f8d01c5c8..f9e917719 100644 --- a/test/pleroma/web/metadata/providers/twitter_card_test.exs +++ b/test/pleroma/web/metadata/providers/twitter_card_test.exs @@ -202,4 +202,58 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCardTest do {:meta, [name: "twitter:player:stream:content_type", content: "video/webm"], []} ] == result end + + test "meta tag ordering matches attachment order" do + user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994") + + note = + insert(:note, %{ + data: %{ + "actor" => user.ap_id, + "tag" => [], + "id" => "https://pleroma.gov/objects/whatever", + "summary" => "", + "content" => "pleroma in a nutshell", + "attachment" => [ + %{ + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "https://example.com/first.png", + "height" => 1024, + "width" => 1280 + } + ] + }, + %{ + "url" => [ + %{ + "mediaType" => "image/png", + "href" => "https://example.com/second.png", + "height" => 1024, + "width" => 1280 + } + ] + } + ] + } + }) + + result = TwitterCard.build_tags(%{object: note, activity_id: note.data["id"], user: user}) + + assert [ + {:meta, [name: "twitter:title", content: Utils.user_name_string(user)], []}, + {:meta, [name: "twitter:description", content: "pleroma in a nutshell"], []}, + {:meta, [name: "twitter:card", content: "summary_large_image"], []}, + {:meta, [name: "twitter:image", content: "https://example.com/first.png"], []}, + {:meta, [name: "twitter:image:alt", content: ""], []}, + {:meta, [name: "twitter:player:width", content: "1280"], []}, + {:meta, [name: "twitter:player:height", content: "1024"], []}, + {:meta, [name: "twitter:card", content: "summary_large_image"], []}, + {:meta, [name: "twitter:image", content: "https://example.com/second.png"], []}, + {:meta, [name: "twitter:image:alt", content: ""], []}, + {:meta, [name: "twitter:player:width", content: "1280"], []}, + {:meta, [name: "twitter:player:height", content: "1024"], []} + ] == result + end end diff --git a/test/pleroma/web/o_auth/app_test.exs b/test/pleroma/web/o_auth/app_test.exs index 44219cf90..a69ba371e 100644 --- a/test/pleroma/web/o_auth/app_test.exs +++ b/test/pleroma/web/o_auth/app_test.exs @@ -58,16 +58,28 @@ defmodule Pleroma.Web.OAuth.AppTest do attrs = %{client_name: "Mastodon-Local", redirect_uris: "."} {:ok, %App{} = old_app} = App.get_or_make(attrs, ["write"]) + # backdate the old app so it's within the threshold for being cleaned up + one_hour_ago = DateTime.add(DateTime.utc_now(), -3600) + + {:ok, _} = + "UPDATE apps SET inserted_at = $1, updated_at = $1 WHERE id = $2" + |> Pleroma.Repo.query([one_hour_ago, old_app.id]) + + # Create the new app after backdating the old one attrs = %{client_name: "PleromaFE", redirect_uris: "."} {:ok, %App{} = app} = App.get_or_make(attrs, ["write"]) - # backdate the old app so it's within the threshold for being cleaned up + # Ensure the new app has a recent timestamp + now = DateTime.utc_now() + {:ok, _} = - "UPDATE apps SET inserted_at = now() - interval '1 hour' WHERE id = #{old_app.id}" - |> Pleroma.Repo.query() + "UPDATE apps SET inserted_at = $1, updated_at = $1 WHERE id = $2" + |> Pleroma.Repo.query([now, app.id]) App.remove_orphans() - assert [app] == Pleroma.Repo.all(App) + assert [returned_app] = Pleroma.Repo.all(App) + assert returned_app.client_name == "PleromaFE" + assert returned_app.id == app.id end end diff --git a/test/pleroma/web/pleroma_api/controllers/emoji_reaction_controller_test.exs b/test/pleroma/web/pleroma_api/controllers/emoji_reaction_controller_test.exs index 8c2dcc1bb..c1e452a1e 100644 --- a/test/pleroma/web/pleroma_api/controllers/emoji_reaction_controller_test.exs +++ b/test/pleroma/web/pleroma_api/controllers/emoji_reaction_controller_test.exs @@ -14,7 +14,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionControllerTest do import Pleroma.Factory setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/web/plugs/instance_static_test.exs b/test/pleroma/web/plugs/instance_static_test.exs index f91021a16..33b74dcf0 100644 --- a/test/pleroma/web/plugs/instance_static_test.exs +++ b/test/pleroma/web/plugs/instance_static_test.exs @@ -62,4 +62,79 @@ defmodule Pleroma.Web.Plugs.InstanceStaticTest do index = get(build_conn(), "/static/kaniini.html") assert html_response(index, 200) == "

rabbit hugs as a service

" end + + test "does not sanitize dangerous files in general, as there can be html and javascript files legitimately in this folder" do + # Create a file with a potentially dangerous extension (.json) + # This mimics an attacker trying to serve ActivityPub JSON with a static file + File.mkdir!(@dir <> "/static") + File.write!(@dir <> "/static/malicious.json", "{\"type\": \"ActivityPub\"}") + + conn = get(build_conn(), "/static/malicious.json") + + assert conn.status == 200 + + content_type = + Enum.find_value(conn.resp_headers, fn + {"content-type", value} -> value + _ -> nil + end) + + assert content_type == "application/json" + + File.write!(@dir <> "/static/safe.jpg", "fake image data") + + conn = get(build_conn(), "/static/safe.jpg") + + assert conn.status == 200 + + # Get the content-type + content_type = + Enum.find_value(conn.resp_headers, fn + {"content-type", value} -> value + _ -> nil + end) + + assert content_type == "image/jpeg" + end + + test "always sanitizes emojis to images" do + File.mkdir!(@dir <> "/emoji") + File.write!(@dir <> "/emoji/malicious.html", "") + + # Request the malicious file + conn = get(build_conn(), "/emoji/malicious.html") + + # Verify the file was served (status 200) + assert conn.status == 200 + + # The content should be served, but with a sanitized content-type + content_type = + Enum.find_value(conn.resp_headers, fn + {"content-type", value} -> value + _ -> nil + end) + + # It should have been sanitized to application/octet-stream because "application" + # is not in the allowed_mime_types list + assert content_type == "application/octet-stream" + + # Create a file with an allowed extension (.jpg) + File.write!(@dir <> "/emoji/safe.jpg", "fake image data") + + # Request the safe file + conn = get(build_conn(), "/emoji/safe.jpg") + + # Verify the file was served (status 200) + assert conn.status == 200 + + # Get the content-type + content_type = + Enum.find_value(conn.resp_headers, fn + {"content-type", value} -> value + _ -> nil + end) + + # It should be preserved because "image" is in the allowed_mime_types list + assert content_type == "image/jpeg" + end end diff --git a/test/pleroma/web/plugs/uploaded_media_test.exs b/test/pleroma/web/plugs/uploaded_media_test.exs new file mode 100644 index 000000000..69affa019 --- /dev/null +++ b/test/pleroma/web/plugs/uploaded_media_test.exs @@ -0,0 +1,53 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Plugs.UploadedMediaTest do + use ExUnit.Case, async: true + + alias Pleroma.Web.Plugs.Utils + + describe "content-type sanitization with Utils.get_safe_mime_type/2" do + test "it allows safe MIME types" do + opts = %{allowed_mime_types: ["image", "audio", "video"]} + + assert Utils.get_safe_mime_type(opts, "image/jpeg") == "image/jpeg" + assert Utils.get_safe_mime_type(opts, "audio/mpeg") == "audio/mpeg" + assert Utils.get_safe_mime_type(opts, "video/mp4") == "video/mp4" + end + + test "it sanitizes potentially dangerous content-types" do + opts = %{allowed_mime_types: ["image", "audio", "video"]} + + assert Utils.get_safe_mime_type(opts, "application/activity+json") == + "application/octet-stream" + + assert Utils.get_safe_mime_type(opts, "text/html") == "application/octet-stream" + + assert Utils.get_safe_mime_type(opts, "application/javascript") == + "application/octet-stream" + end + + test "it sanitizes ActivityPub content types" do + opts = %{allowed_mime_types: ["image", "audio", "video"]} + + assert Utils.get_safe_mime_type(opts, "application/activity+json") == + "application/octet-stream" + + assert Utils.get_safe_mime_type(opts, "application/ld+json") == "application/octet-stream" + assert Utils.get_safe_mime_type(opts, "application/jrd+json") == "application/octet-stream" + end + + test "it sanitizes other potentially dangerous types" do + opts = %{allowed_mime_types: ["image", "audio", "video"]} + + assert Utils.get_safe_mime_type(opts, "text/html") == "application/octet-stream" + + assert Utils.get_safe_mime_type(opts, "application/javascript") == + "application/octet-stream" + + assert Utils.get_safe_mime_type(opts, "text/javascript") == "application/octet-stream" + assert Utils.get_safe_mime_type(opts, "application/xhtml+xml") == "application/octet-stream" + end + end +end diff --git a/test/pleroma/web/rich_media/card_test.exs b/test/pleroma/web/rich_media/card_test.exs index 387defc8c..c69f85323 100644 --- a/test/pleroma/web/rich_media/card_test.exs +++ b/test/pleroma/web/rich_media/card_test.exs @@ -83,4 +83,23 @@ defmodule Pleroma.Web.RichMedia.CardTest do Card.get_by_activity(activity) ) end + + test "refuses to crawl URL in activity from ignored host/domain" do + clear_config([:rich_media, :ignore_hosts], ["example.com"]) + + user = insert(:user) + + url = "https://example.com/ogp" + + {:ok, activity} = + CommonAPI.post(user, %{ + status: "[test](#{url})", + content_type: "text/markdown" + }) + + refute_enqueued( + worker: RichMediaWorker, + args: %{"url" => url, "activity_id" => activity.id} + ) + end end diff --git a/test/pleroma/web/rich_media/parser_test.exs b/test/pleroma/web/rich_media/parser_test.exs index 8fd75b57a..20f61badc 100644 --- a/test/pleroma/web/rich_media/parser_test.exs +++ b/test/pleroma/web/rich_media/parser_test.exs @@ -54,7 +54,6 @@ defmodule Pleroma.Web.RichMedia.ParserTest do {:ok, %{ "card" => "summary", - "site" => "@flickr", "image" => "https://farm6.staticflickr.com/5510/14338202952_93595258ff_z.jpg", "title" => "Small Island Developing States Photo Submission", "description" => "View the album on Flickr.", diff --git a/test/pleroma/web/rich_media/parsers/twitter_card_test.exs b/test/pleroma/web/rich_media/parsers/twitter_card_test.exs index e84a4e50a..15b272ff2 100644 --- a/test/pleroma/web/rich_media/parsers/twitter_card_test.exs +++ b/test/pleroma/web/rich_media/parsers/twitter_card_test.exs @@ -17,10 +17,6 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do assert TwitterCard.parse(html, %{}) == %{ - "app:id:googleplay" => "com.nytimes.android", - "app:name:googleplay" => "NYTimes", - "app:url:googleplay" => "nytimes://reader/id/100000006583622", - "site" => nil, "description" => "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", "image" => @@ -44,7 +40,7 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do "description" => "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", "image" => - "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg", + "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg", "image:alt" => "", "title" => "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.", @@ -61,16 +57,12 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do assert TwitterCard.parse(html, %{}) == %{ - "app:id:googleplay" => "com.nytimes.android", - "app:name:googleplay" => "NYTimes", - "app:url:googleplay" => "nytimes://reader/id/100000006583622", "card" => "summary_large_image", "description" => "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", "image" => - "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg", + "https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg", "image:alt" => "", - "site" => nil, "title" => "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.", "url" => @@ -90,13 +82,11 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do assert TwitterCard.parse(html, %{}) == %{ - "site" => "@atlasobscura", "title" => "The Missing Grave of Margaret Corbin, Revolutionary War Veteran", "card" => "summary_large_image", "image" => image_path, "description" => "She's the only woman veteran honored with a monument at West Point. But where was she buried?", - "site_name" => "Atlas Obscura", "type" => "article", "url" => "http://www.atlasobscura.com/articles/margaret-corbin-grave-west-point" } @@ -109,12 +99,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do assert TwitterCard.parse(html, %{}) == %{ - "site" => nil, "title" => "She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.", - "app:id:googleplay" => "com.nytimes.android", - "app:name:googleplay" => "NYTimes", - "app:url:googleplay" => "nytimes://reader/id/100000006583622", "description" => "With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.", "image" => @@ -124,4 +110,23 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do "https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html" } end + + test "takes first image if multiple are specified" do + html = + File.read!("test/fixtures/fulmo.html") + |> Floki.parse_document!() + + assert TwitterCard.parse(html, %{}) == + %{ + "description" => "Pri feoj, kiuj devis ordigi falintan arbon.", + "image" => "https://tirifto.xwx.moe/r/ilustrajhoj/pinglordigado.png", + "title" => "Fulmo", + "type" => "website", + "url" => "https://tirifto.xwx.moe/eo/rakontoj/fulmo.html", + "image:alt" => + "Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.", + "image:height" => "630", + "image:width" => "1200" + } + end end diff --git a/test/pleroma/web/streamer_test.exs b/test/pleroma/web/streamer_test.exs index 262ff11d2..85978e824 100644 --- a/test/pleroma/web/streamer_test.exs +++ b/test/pleroma/web/streamer_test.exs @@ -558,6 +558,36 @@ defmodule Pleroma.Web.StreamerTest do assert_receive {:render_with_user, _, "status_update.json", ^create, _} refute Streamer.filtered_by_user?(user, edited) end + + test "it streams posts containing followed hashtags on the 'user' stream", %{ + user: user, + token: oauth_token + } do + hashtag = insert(:hashtag, %{name: "tenshi"}) + other_user = insert(:user) + {:ok, user} = User.follow_hashtag(user, hashtag) + + Streamer.get_topic_and_add_socket("user", user, oauth_token) + {:ok, activity} = CommonAPI.post(other_user, %{status: "hey #tenshi"}) + + assert_receive {:render_with_user, _, "update.json", ^activity, _} + end + + test "should not stream private posts containing followed hashtags on the 'user' stream", %{ + user: user, + token: oauth_token + } do + hashtag = insert(:hashtag, %{name: "tenshi"}) + other_user = insert(:user) + {:ok, user} = User.follow_hashtag(user, hashtag) + + Streamer.get_topic_and_add_socket("user", user, oauth_token) + + {:ok, activity} = + CommonAPI.post(other_user, %{status: "hey #tenshi", visibility: "private"}) + + refute_receive {:render_with_user, _, "update.json", ^activity, _} + end end describe "public streams" do diff --git a/test/pleroma/workers/cron/digest_emails_worker_test.exs b/test/pleroma/workers/cron/digest_emails_worker_test.exs index e0bdf303e..46be82a4f 100644 --- a/test/pleroma/workers/cron/digest_emails_worker_test.exs +++ b/test/pleroma/workers/cron/digest_emails_worker_test.exs @@ -14,7 +14,7 @@ defmodule Pleroma.Workers.Cron.DigestEmailsWorkerTest do setup do: clear_config([:email_notifications, :digest]) setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/pleroma/workers/cron/new_users_digest_worker_test.exs b/test/pleroma/workers/cron/new_users_digest_worker_test.exs index 0e4234cc8..ca4139eac 100644 --- a/test/pleroma/workers/cron/new_users_digest_worker_test.exs +++ b/test/pleroma/workers/cron/new_users_digest_worker_test.exs @@ -11,7 +11,7 @@ defmodule Pleroma.Workers.Cron.NewUsersDigestWorkerTest do alias Pleroma.Workers.Cron.NewUsersDigestWorker setup do - Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config) + Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig) :ok end diff --git a/test/support/data_case.ex b/test/support/data_case.ex index 52d4bef1a..304bee5da 100644 --- a/test/support/data_case.ex +++ b/test/support/data_case.ex @@ -117,6 +117,8 @@ defmodule Pleroma.DataCase do Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config) Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Test.StaticConfig) Mox.stub_with(Pleroma.StubbedHTTPSignaturesMock, Pleroma.Test.HTTPSignaturesProxy) + + Mox.stub_with(Pleroma.DateTimeMock, Pleroma.DateTime.Impl) end def ensure_local_uploader(context) do diff --git a/test/support/factory.ex b/test/support/factory.ex index 91e5805c8..88c4ed8e5 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -668,4 +668,11 @@ defmodule Pleroma.Factory do |> Map.merge(params) |> Pleroma.Announcement.add_rendered_properties() end + + def hashtag_factory(params \\ %{}) do + %Pleroma.Hashtag{ + name: "test #{sequence(:hashtag_name, & &1)}" + } + |> Map.merge(params) + end end diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index ed044cf98..1c472fca9 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -955,7 +955,7 @@ defmodule HttpRequestMock do {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/ogp.html")}} end - def get("http://localhost:4001/users/masto_closed/followers", _, _, _) do + def get("https://remote.org/users/masto_closed/followers", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -964,7 +964,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:4001/users/masto_closed/followers?page=1", _, _, _) do + def get("https://remote.org/users/masto_closed/followers?page=1", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -973,7 +973,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:4001/users/masto_closed/following", _, _, _) do + def get("https://remote.org/users/masto_closed/following", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -982,7 +982,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:4001/users/masto_closed/following?page=1", _, _, _) do + def get("https://remote.org/users/masto_closed/following?page=1", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -991,7 +991,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:8080/followers/fuser3", _, _, _) do + def get("https://remote.org/followers/fuser3", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -1000,7 +1000,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:8080/following/fuser3", _, _, _) do + def get("https://remote.org/following/fuser3", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -1009,7 +1009,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:4001/users/fuser2/followers", _, _, _) do + def get("https://remote.org/users/fuser2/followers", _, _, _) do {:ok, %Tesla.Env{ status: 200, @@ -1018,7 +1018,7 @@ defmodule HttpRequestMock do }} end - def get("http://localhost:4001/users/fuser2/following", _, _, _) do + def get("https://remote.org/users/fuser2/following", _, _, _) do {:ok, %Tesla.Env{ status: 200, diff --git a/test/support/mocks.ex b/test/support/mocks.ex index d84958e15..ca2974504 100644 --- a/test/support/mocks.ex +++ b/test/support/mocks.ex @@ -33,3 +33,6 @@ Mox.defmock(Pleroma.StubbedHTTPSignaturesMock, for: Pleroma.HTTPSignaturesAPI) Mox.defmock(Pleroma.LoggerMock, for: Pleroma.Logging) Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI) + +Mox.defmock(Pleroma.DateTimeMock, for: Pleroma.DateTime) +Mox.defmock(Pleroma.MogrifyMock, for: Pleroma.MogrifyBehaviour) diff --git a/test/test_helper.exs b/test/test_helper.exs index fed7ce8a7..94661353b 100644 --- a/test/test_helper.exs +++ b/test/test_helper.exs @@ -34,7 +34,13 @@ defmodule Pleroma.Test.StaticConfig do @behaviour Pleroma.Config.Getting @config Application.get_all_env(:pleroma) + @impl true def get(path, default \\ nil) do get_in(@config, path) || default end + + @impl true + def get!(path) do + get_in(@config, path) + end end